text
stringlengths 213
32.3k
|
---|
from homeassistant.components.flo.const import DOMAIN as FLO_DOMAIN
from homeassistant.components.flo.switch import (
ATTR_REVERT_TO_MODE,
ATTR_SLEEP_MINUTES,
SERVICE_RUN_HEALTH_TEST,
SERVICE_SET_AWAY_MODE,
SERVICE_SET_HOME_MODE,
SERVICE_SET_SLEEP_MODE,
SYSTEM_MODE_HOME,
)
from homeassistant.const import ATTR_ENTITY_ID, CONF_PASSWORD, CONF_USERNAME
from homeassistant.setup import async_setup_component
from .common import TEST_PASSWORD, TEST_USER_ID
SWITCH_ENTITY_ID = "switch.shutoff_valve"
async def test_services(hass, config_entry, aioclient_mock_fixture, aioclient_mock):
"""Test Flo services."""
config_entry.add_to_hass(hass)
assert await async_setup_component(
hass, FLO_DOMAIN, {CONF_USERNAME: TEST_USER_ID, CONF_PASSWORD: TEST_PASSWORD}
)
await hass.async_block_till_done()
assert len(hass.data[FLO_DOMAIN][config_entry.entry_id]["devices"]) == 1
assert aioclient_mock.call_count == 4
await hass.services.async_call(
FLO_DOMAIN,
SERVICE_RUN_HEALTH_TEST,
{ATTR_ENTITY_ID: SWITCH_ENTITY_ID},
blocking=True,
)
await hass.async_block_till_done()
assert aioclient_mock.call_count == 5
await hass.services.async_call(
FLO_DOMAIN,
SERVICE_SET_AWAY_MODE,
{ATTR_ENTITY_ID: SWITCH_ENTITY_ID},
blocking=True,
)
await hass.async_block_till_done()
assert aioclient_mock.call_count == 6
await hass.services.async_call(
FLO_DOMAIN,
SERVICE_SET_HOME_MODE,
{ATTR_ENTITY_ID: SWITCH_ENTITY_ID},
blocking=True,
)
await hass.async_block_till_done()
assert aioclient_mock.call_count == 7
await hass.services.async_call(
FLO_DOMAIN,
SERVICE_SET_SLEEP_MODE,
{
ATTR_ENTITY_ID: SWITCH_ENTITY_ID,
ATTR_REVERT_TO_MODE: SYSTEM_MODE_HOME,
ATTR_SLEEP_MINUTES: 120,
},
blocking=True,
)
await hass.async_block_till_done()
assert aioclient_mock.call_count == 8
|
from .common import setup_ozw
async def test_lock(hass, lock_data, sent_messages, lock_msg, caplog):
"""Test lock."""
receive_message = await setup_ozw(hass, fixture=lock_data)
# Test loaded
state = hass.states.get("lock.danalock_v3_btze_locked")
assert state is not None
assert state.state == "unlocked"
# Test locking
await hass.services.async_call(
"lock", "lock", {"entity_id": "lock.danalock_v3_btze_locked"}, blocking=True
)
assert len(sent_messages) == 1
msg = sent_messages[0]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": True, "ValueIDKey": 173572112}
# Feedback on state
lock_msg.decode()
lock_msg.payload["Value"] = True
lock_msg.encode()
receive_message(lock_msg)
await hass.async_block_till_done()
state = hass.states.get("lock.danalock_v3_btze_locked")
assert state is not None
assert state.state == "locked"
# Test unlocking
await hass.services.async_call(
"lock", "unlock", {"entity_id": "lock.danalock_v3_btze_locked"}, blocking=True
)
assert len(sent_messages) == 2
msg = sent_messages[1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": False, "ValueIDKey": 173572112}
# Test set_usercode
await hass.services.async_call(
"ozw",
"set_usercode",
{
"entity_id": "lock.danalock_v3_btze_locked",
"usercode": 123456,
"code_slot": 1,
},
blocking=True,
)
assert len(sent_messages) == 3
msg = sent_messages[2]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": "123456", "ValueIDKey": 281475150299159}
# Test clear_usercode
await hass.services.async_call(
"ozw",
"clear_usercode",
{"entity_id": "lock.danalock_v3_btze_locked", "code_slot": 1},
blocking=True,
)
assert len(sent_messages) == 5
msg = sent_messages[4]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": 1, "ValueIDKey": 72057594219905046}
# Test set_usercode invalid length
await hass.services.async_call(
"ozw",
"set_usercode",
{
"entity_id": "lock.danalock_v3_btze_locked",
"usercode": "123",
"code_slot": 1,
},
blocking=True,
)
assert len(sent_messages) == 5
assert "User code must be at least 4 digits" in caplog.text
|
from onvif.exceptions import ONVIFError
from zeep.exceptions import Fault
from homeassistant import config_entries, data_entry_flow
from homeassistant.components.onvif import config_flow
from tests.async_mock import AsyncMock, MagicMock, patch
from tests.common import MockConfigEntry
URN = "urn:uuid:123456789"
NAME = "TestCamera"
HOST = "1.2.3.4"
PORT = 80
USERNAME = "admin"
PASSWORD = "12345"
MAC = "aa:bb:cc:dd:ee"
SERIAL_NUMBER = "ABCDEFGHIJK"
DISCOVERY = [
{
"EPR": URN,
config_flow.CONF_NAME: NAME,
config_flow.CONF_HOST: HOST,
config_flow.CONF_PORT: PORT,
"MAC": MAC,
},
{
"EPR": "urn:uuid:987654321",
config_flow.CONF_NAME: "TestCamera2",
config_flow.CONF_HOST: "5.6.7.8",
config_flow.CONF_PORT: PORT,
"MAC": "ee:dd:cc:bb:aa",
},
]
def setup_mock_onvif_camera(
mock_onvif_camera,
with_h264=True,
two_profiles=False,
with_interfaces=True,
with_serial=True,
):
"""Prepare mock onvif.ONVIFCamera."""
devicemgmt = MagicMock()
device_info = MagicMock()
device_info.SerialNumber = SERIAL_NUMBER if with_serial else None
devicemgmt.GetDeviceInformation = AsyncMock(return_value=device_info)
interface = MagicMock()
interface.Enabled = True
interface.Info.HwAddress = MAC
devicemgmt.GetNetworkInterfaces = AsyncMock(
return_value=[interface] if with_interfaces else []
)
media_service = MagicMock()
profile1 = MagicMock()
profile1.VideoEncoderConfiguration.Encoding = "H264" if with_h264 else "MJPEG"
profile2 = MagicMock()
profile2.VideoEncoderConfiguration.Encoding = "H264" if two_profiles else "MJPEG"
media_service.GetProfiles = AsyncMock(return_value=[profile1, profile2])
mock_onvif_camera.update_xaddrs = AsyncMock(return_value=True)
mock_onvif_camera.create_devicemgmt_service = MagicMock(return_value=devicemgmt)
mock_onvif_camera.create_media_service = MagicMock(return_value=media_service)
mock_onvif_camera.close = AsyncMock(return_value=None)
def mock_constructor(
host,
port,
user,
passwd,
wsdl_dir,
encrypt=True,
no_cache=False,
adjust_time=False,
transport=None,
):
"""Fake the controller constructor."""
return mock_onvif_camera
mock_onvif_camera.side_effect = mock_constructor
def setup_mock_discovery(
mock_discovery, with_name=False, with_mac=False, two_devices=False
):
"""Prepare mock discovery result."""
services = []
for item in DISCOVERY:
service = MagicMock()
service.getXAddrs = MagicMock(
return_value=[
f"http://{item[config_flow.CONF_HOST]}:{item[config_flow.CONF_PORT]}/onvif/device_service"
]
)
service.getEPR = MagicMock(return_value=item["EPR"])
scopes = []
if with_name:
scope = MagicMock()
scope.getValue = MagicMock(
return_value=f"onvif://www.onvif.org/name/{item[config_flow.CONF_NAME]}"
)
scopes.append(scope)
if with_mac:
scope = MagicMock()
scope.getValue = MagicMock(
return_value=f"onvif://www.onvif.org/mac/{item['MAC']}"
)
scopes.append(scope)
service.getScopes = MagicMock(return_value=scopes)
services.append(service)
mock_discovery.return_value = services
def setup_mock_device(mock_device):
"""Prepare mock ONVIFDevice."""
mock_device.async_setup = AsyncMock(return_value=True)
def mock_constructor(hass, config):
"""Fake the controller constructor."""
return mock_device
mock_device.side_effect = mock_constructor
async def setup_onvif_integration(
hass,
config=None,
options=None,
unique_id=MAC,
entry_id="1",
source="user",
):
"""Create an ONVIF config entry."""
if not config:
config = {
config_flow.CONF_NAME: NAME,
config_flow.CONF_HOST: HOST,
config_flow.CONF_PORT: PORT,
config_flow.CONF_USERNAME: USERNAME,
config_flow.CONF_PASSWORD: PASSWORD,
}
config_entry = MockConfigEntry(
domain=config_flow.DOMAIN,
source=source,
data={**config},
connection_class=config_entries.CONN_CLASS_LOCAL_PUSH,
options=options or {},
entry_id=entry_id,
unique_id=unique_id,
)
config_entry.add_to_hass(hass)
with patch(
"homeassistant.components.onvif.config_flow.get_device"
) as mock_onvif_camera, patch(
"homeassistant.components.onvif.config_flow.wsdiscovery"
) as mock_discovery, patch(
"homeassistant.components.onvif.ONVIFDevice"
) as mock_device:
setup_mock_onvif_camera(mock_onvif_camera, two_profiles=True)
# no discovery
mock_discovery.return_value = []
setup_mock_device(mock_device)
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
return config_entry
async def test_flow_discovered_devices(hass):
"""Test that config flow works for discovered devices."""
result = await hass.config_entries.flow.async_init(
config_flow.DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
with patch(
"homeassistant.components.onvif.config_flow.get_device"
) as mock_onvif_camera, patch(
"homeassistant.components.onvif.config_flow.wsdiscovery"
) as mock_discovery, patch(
"homeassistant.components.onvif.ONVIFDevice"
) as mock_device:
setup_mock_onvif_camera(mock_onvif_camera)
setup_mock_discovery(mock_discovery)
setup_mock_device(mock_device)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "device"
assert len(result["data_schema"].schema[config_flow.CONF_HOST].container) == 3
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={config_flow.CONF_HOST: f"{URN} ({HOST})"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "auth"
with patch(
"homeassistant.components.onvif.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.onvif.async_setup_entry", return_value=True
) as mock_setup_entry:
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={
config_flow.CONF_USERNAME: USERNAME,
config_flow.CONF_PASSWORD: PASSWORD,
},
)
await hass.async_block_till_done()
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == f"{URN} - {MAC}"
assert result["data"] == {
config_flow.CONF_NAME: URN,
config_flow.CONF_HOST: HOST,
config_flow.CONF_PORT: PORT,
config_flow.CONF_USERNAME: USERNAME,
config_flow.CONF_PASSWORD: PASSWORD,
}
async def test_flow_discovered_devices_ignore_configured_manual_input(hass):
"""Test that config flow discovery ignores configured devices."""
await setup_onvif_integration(hass)
result = await hass.config_entries.flow.async_init(
config_flow.DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
with patch(
"homeassistant.components.onvif.config_flow.get_device"
) as mock_onvif_camera, patch(
"homeassistant.components.onvif.config_flow.wsdiscovery"
) as mock_discovery, patch(
"homeassistant.components.onvif.ONVIFDevice"
) as mock_device:
setup_mock_onvif_camera(mock_onvif_camera)
setup_mock_discovery(mock_discovery, with_mac=True)
setup_mock_device(mock_device)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "device"
assert len(result["data_schema"].schema[config_flow.CONF_HOST].container) == 2
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={config_flow.CONF_HOST: config_flow.CONF_MANUAL_INPUT},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "manual_input"
async def test_flow_discovery_ignore_existing_and_abort(hass):
"""Test that config flow discovery ignores setup devices."""
await setup_onvif_integration(hass)
await setup_onvif_integration(
hass,
config={
config_flow.CONF_NAME: DISCOVERY[1]["EPR"],
config_flow.CONF_HOST: DISCOVERY[1][config_flow.CONF_HOST],
config_flow.CONF_PORT: DISCOVERY[1][config_flow.CONF_PORT],
config_flow.CONF_USERNAME: "",
config_flow.CONF_PASSWORD: "",
},
unique_id=DISCOVERY[1]["MAC"],
entry_id="2",
)
result = await hass.config_entries.flow.async_init(
config_flow.DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
with patch(
"homeassistant.components.onvif.config_flow.get_device"
) as mock_onvif_camera, patch(
"homeassistant.components.onvif.config_flow.wsdiscovery"
) as mock_discovery, patch(
"homeassistant.components.onvif.ONVIFDevice"
) as mock_device:
setup_mock_onvif_camera(mock_onvif_camera)
setup_mock_discovery(mock_discovery, with_name=True, with_mac=True)
setup_mock_device(mock_device)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={}
)
# It should skip to manual entry if the only devices are already configured
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "manual_input"
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={
config_flow.CONF_NAME: NAME,
config_flow.CONF_HOST: HOST,
config_flow.CONF_PORT: PORT,
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "auth"
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={
config_flow.CONF_USERNAME: USERNAME,
config_flow.CONF_PASSWORD: PASSWORD,
},
)
# It should abort if already configured and entered manually
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
async def test_flow_manual_entry(hass):
"""Test that config flow works for discovered devices."""
result = await hass.config_entries.flow.async_init(
config_flow.DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
with patch(
"homeassistant.components.onvif.config_flow.get_device"
) as mock_onvif_camera, patch(
"homeassistant.components.onvif.config_flow.wsdiscovery"
) as mock_discovery, patch(
"homeassistant.components.onvif.ONVIFDevice"
) as mock_device:
setup_mock_onvif_camera(mock_onvif_camera, two_profiles=True)
# no discovery
mock_discovery.return_value = []
setup_mock_device(mock_device)
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "manual_input"
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={
config_flow.CONF_NAME: NAME,
config_flow.CONF_HOST: HOST,
config_flow.CONF_PORT: PORT,
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "auth"
with patch(
"homeassistant.components.onvif.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.onvif.async_setup_entry", return_value=True
) as mock_setup_entry:
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={
config_flow.CONF_USERNAME: USERNAME,
config_flow.CONF_PASSWORD: PASSWORD,
},
)
await hass.async_block_till_done()
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == f"{NAME} - {MAC}"
assert result["data"] == {
config_flow.CONF_NAME: NAME,
config_flow.CONF_HOST: HOST,
config_flow.CONF_PORT: PORT,
config_flow.CONF_USERNAME: USERNAME,
config_flow.CONF_PASSWORD: PASSWORD,
}
async def test_flow_import_no_mac(hass):
"""Test that config flow uses Serial Number when no MAC available."""
with patch(
"homeassistant.components.onvif.config_flow.get_device"
) as mock_onvif_camera, patch(
"homeassistant.components.onvif.ONVIFDevice"
) as mock_device, patch(
"homeassistant.components.onvif.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.onvif.async_setup_entry", return_value=True
) as mock_setup_entry:
setup_mock_onvif_camera(mock_onvif_camera, with_interfaces=False)
setup_mock_device(mock_device)
result = await hass.config_entries.flow.async_init(
config_flow.DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={
config_flow.CONF_NAME: NAME,
config_flow.CONF_HOST: HOST,
config_flow.CONF_PORT: PORT,
config_flow.CONF_USERNAME: USERNAME,
config_flow.CONF_PASSWORD: PASSWORD,
},
)
await hass.async_block_till_done()
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == f"{NAME} - {SERIAL_NUMBER}"
assert result["data"] == {
config_flow.CONF_NAME: NAME,
config_flow.CONF_HOST: HOST,
config_flow.CONF_PORT: PORT,
config_flow.CONF_USERNAME: USERNAME,
config_flow.CONF_PASSWORD: PASSWORD,
}
async def test_flow_import_no_mac_or_serial(hass):
"""Test that config flow fails when no MAC or Serial Number available."""
with patch(
"homeassistant.components.onvif.config_flow.get_device"
) as mock_onvif_camera:
setup_mock_onvif_camera(
mock_onvif_camera, with_interfaces=False, with_serial=False
)
result = await hass.config_entries.flow.async_init(
config_flow.DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={
config_flow.CONF_NAME: NAME,
config_flow.CONF_HOST: HOST,
config_flow.CONF_PORT: PORT,
config_flow.CONF_USERNAME: USERNAME,
config_flow.CONF_PASSWORD: PASSWORD,
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "no_mac"
async def test_flow_import_no_h264(hass):
"""Test that config flow fails when no MAC available."""
with patch(
"homeassistant.components.onvif.config_flow.get_device"
) as mock_onvif_camera:
setup_mock_onvif_camera(mock_onvif_camera, with_h264=False)
result = await hass.config_entries.flow.async_init(
config_flow.DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={
config_flow.CONF_NAME: NAME,
config_flow.CONF_HOST: HOST,
config_flow.CONF_PORT: PORT,
config_flow.CONF_USERNAME: USERNAME,
config_flow.CONF_PASSWORD: PASSWORD,
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "no_h264"
async def test_flow_import_onvif_api_error(hass):
"""Test that config flow fails when ONVIF API fails."""
with patch(
"homeassistant.components.onvif.config_flow.get_device"
) as mock_onvif_camera:
setup_mock_onvif_camera(mock_onvif_camera)
mock_onvif_camera.create_devicemgmt_service = MagicMock(
side_effect=ONVIFError("Could not get device mgmt service")
)
result = await hass.config_entries.flow.async_init(
config_flow.DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={
config_flow.CONF_NAME: NAME,
config_flow.CONF_HOST: HOST,
config_flow.CONF_PORT: PORT,
config_flow.CONF_USERNAME: USERNAME,
config_flow.CONF_PASSWORD: PASSWORD,
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "onvif_error"
async def test_flow_import_onvif_auth_error(hass):
"""Test that config flow fails when ONVIF API fails."""
with patch(
"homeassistant.components.onvif.config_flow.get_device"
) as mock_onvif_camera:
setup_mock_onvif_camera(mock_onvif_camera)
mock_onvif_camera.create_devicemgmt_service = MagicMock(
side_effect=Fault("Auth Error")
)
result = await hass.config_entries.flow.async_init(
config_flow.DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={
config_flow.CONF_NAME: NAME,
config_flow.CONF_HOST: HOST,
config_flow.CONF_PORT: PORT,
config_flow.CONF_USERNAME: USERNAME,
config_flow.CONF_PASSWORD: PASSWORD,
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "auth"
assert result["errors"]["base"] == "cannot_connect"
async def test_option_flow(hass):
"""Test config flow options."""
entry = await setup_onvif_integration(hass)
result = await hass.config_entries.options.async_init(entry.entry_id)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "onvif_devices"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={
config_flow.CONF_EXTRA_ARGUMENTS: "",
config_flow.CONF_RTSP_TRANSPORT: config_flow.RTSP_TRANS_PROTOCOLS[1],
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["data"] == {
config_flow.CONF_EXTRA_ARGUMENTS: "",
config_flow.CONF_RTSP_TRANSPORT: config_flow.RTSP_TRANS_PROTOCOLS[1],
}
|
import asyncio
import logging
import aiohttp
import async_timeout
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_HOST, CONF_PIN, CONF_TIMEOUT, PERCENTAGE
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
CONF_ALLOW_UNREACHABLE = "allow_unreachable"
DEFAULT_TIMEOUT = 5
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PIN): vol.All(vol.Coerce(str), vol.Match(r"\d{4}")),
vol.Optional(CONF_ALLOW_UNREACHABLE, default=True): cv.boolean,
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
}
)
ERROR_STATE = [
"blade-blocked",
"repositioning-error",
"wire-bounced",
"blade-blocked",
"outside-wire",
"mower-lifted",
"alarm-6",
"upside-down",
"alarm-8",
"collision-sensor-blocked",
"mower-tilted",
"charge-error",
"battery-error",
]
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Worx Landroid sensors."""
for typ in ("battery", "state"):
async_add_entities([WorxLandroidSensor(typ, config)])
class WorxLandroidSensor(Entity):
"""Implementation of a Worx Landroid sensor."""
def __init__(self, sensor, config):
"""Initialize a Worx Landroid sensor."""
self._state = None
self.sensor = sensor
self.host = config.get(CONF_HOST)
self.pin = config.get(CONF_PIN)
self.timeout = config.get(CONF_TIMEOUT)
self.allow_unreachable = config.get(CONF_ALLOW_UNREACHABLE)
self.url = f"http://{self.host}/jsondata.cgi"
@property
def name(self):
"""Return the name of the sensor."""
return f"worxlandroid-{self.sensor}"
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of the sensor."""
if self.sensor == "battery":
return PERCENTAGE
return None
async def async_update(self):
"""Update the sensor data from the mower."""
connection_error = False
try:
session = async_get_clientsession(self.hass)
with async_timeout.timeout(self.timeout):
auth = aiohttp.helpers.BasicAuth("admin", self.pin)
mower_response = await session.get(self.url, auth=auth)
except (asyncio.TimeoutError, aiohttp.ClientError):
if self.allow_unreachable is False:
_LOGGER.error("Error connecting to mower at %s", self.url)
connection_error = True
# connection error
if connection_error is True and self.allow_unreachable is False:
if self.sensor == "error":
self._state = "yes"
elif self.sensor == "state":
self._state = "connection-error"
# connection success
elif connection_error is False:
# set the expected content type to be text/html
# since the mover incorrectly returns it...
data = await mower_response.json(content_type="text/html")
# sensor battery
if self.sensor == "battery":
self._state = data["perc_batt"]
# sensor error
elif self.sensor == "error":
self._state = "no" if self.get_error(data) is None else "yes"
# sensor state
elif self.sensor == "state":
self._state = self.get_state(data)
else:
if self.sensor == "error":
self._state = "no"
@staticmethod
def get_error(obj):
"""Get the mower error."""
for i, err in enumerate(obj["allarmi"]):
if i != 2: # ignore wire bounce errors
if err == 1:
return ERROR_STATE[i]
return None
def get_state(self, obj):
"""Get the state of the mower."""
state = self.get_error(obj)
if state is None:
if obj["batteryChargerState"] == "charging":
return obj["batteryChargerState"]
return obj["state"]
return state
|
import os
from stash.tests.stashtest import StashTestCase
class Sha1sumTests(StashTestCase):
"""tests for the sha1sum command."""
def setUp(self):
"""setup the tests"""
self.cwd = self.get_data_path()
StashTestCase.setUp(self)
def get_data_path(self):
"""return the data/ sibling path"""
return os.path.abspath(os.path.join(os.path.dirname(__file__), "data"))
def test_help(self):
"""test sha1sum --help"""
output = self.run_command("sha1sum --help", exitcode=0)
# check for code words in output
self.assertIn("sha1sum", output)
self.assertIn("-h", output)
self.assertIn("-c", output)
def test_filehash(self):
"""tests the hashes of the files in data/"""
fp = self.get_data_path()
for fn in os.listdir(fp):
if "." in fn:
# file used for something else
continue
expected_hash = fn
fullp = os.path.join(fp, fn)
output = self.run_command("sha1sum " + fullp, exitcode=0)
result = output.split(" ")[0]
self.assertEqual(result, expected_hash)
def test_checkhash(self):
"""test sha1sum -c"""
output = self.run_command("sha1sum -c results.sha1sum", exitcode=0)
self.assertIn("Pass", output)
self.assertNotIn("Fail", output)
def test_checkhash_fail(self):
"""test failure sha1sum -c with invalid data"""
output = self.run_command("sha1sum -c wrong_results.sha1sum", exitcode=1)
self.assertIn("Pass", output) # some files should have the correct hash
self.assertIn("Fail", output)
def test_hash_stdin_implicit(self):
"""test hashing of stdin without arg"""
output = self.run_command("echo test | sha1sum", exitcode=0).replace("\n", "")
expected = "4e1243bd22c66e76c2ba9eddc1f91394e57f9f83"
self.assertEqual(output, expected)
def test_hash_stdin_explicit(self):
"""test hashing of stdin with '-' arg"""
output = self.run_command("echo test | sha1sum -", exitcode=0).replace("\n", "")
expected = "4e1243bd22c66e76c2ba9eddc1f91394e57f9f83"
self.assertEqual(output, expected)
|
import os
# os.environ['TF_CPP_MIN_LOG_LEVEL']='2'
import tensorflow as tf
import numpy as np
from yellowfin import YFOptimizer
from tensorflow.python.ops import variables
import time
n_dim = 1000000
n_iter = 50
def tune_everything(x0squared, C, T, gmin, gmax):
# First tune based on dynamic range
if C==0:
dr=gmax/gmin
mustar=((np.sqrt(dr)-1)/(np.sqrt(dr)+1))**2
alpha_star = (1+np.sqrt(mustar))**2/gmax
return alpha_star,mustar
dist_to_opt = x0squared
grad_var = C
max_curv = gmax
min_curv = gmin
const_fact = dist_to_opt * min_curv**2 / 2 / grad_var
coef = [-1, 3, -(3 + const_fact), 1]
roots = np.roots(coef)
roots = roots[np.real(roots) > 0]
roots = roots[np.real(roots) < 1]
root = roots[np.argmin(np.imag(roots) ) ]
assert root > 0 and root < 1 and np.absolute(root.imag) < 1e-6
dr = max_curv / min_curv
assert max_curv >= min_curv
mu = max( ( (np.sqrt(dr) - 1) / (np.sqrt(dr) + 1) )**2, root**2)
lr_min = (1 - np.sqrt(mu) )**2 / min_curv
lr_max = (1 + np.sqrt(mu) )**2 / max_curv
alpha_star = lr_min
mustar = mu
return alpha_star, mustar
def test_measurement():
opt = YFOptimizer(zero_debias=False)
w = tf.Variable(np.ones([n_dim, ] ), dtype=tf.float32, name="w", trainable=True)
b = tf.Variable(np.ones([1, ], dtype=np.float32), dtype=tf.float32, name="b", trainable=True)
x = tf.constant(np.ones([n_dim, ], dtype=np.float32), dtype=tf.float32)
loss = tf.multiply(w, x) + b
tvars = tf.trainable_variables()
w_grad_val = tf.placeholder(tf.float32, shape=(n_dim, ) )
b_grad_val = tf.placeholder(tf.float32, shape=(1, ) )
apply_op = opt.apply_gradients(zip([w_grad_val, b_grad_val], tvars) )
init_op = tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init_op)
target_h_max = 0.0
target_h_min = 0.0
g_norm_squared_avg = 0.0
g_norm_avg = 0.0
g_avg = 0.0
target_dist = 0.0
for i in range(n_iter):
feed_dict = {w_grad_val: (i + 1) * np.ones( [n_dim, ], dtype=np.float32),
b_grad_val: (i + 1) * np.ones( [1, ], dtype=np.float32) }
res = sess.run( [opt._curv_win, opt._h_max, opt._h_min, opt._grad_var, opt._dist_to_opt_avg, apply_op], feed_dict=feed_dict)
g_norm_squared_avg = 0.999 * g_norm_squared_avg \
+ 0.001 * np.sum(( (i + 1)*np.ones( [n_dim + 1, ] ) )**2)
g_norm_avg = 0.999 * g_norm_avg \
+ 0.001 * np.linalg.norm( (i + 1)*np.ones( [n_dim + 1, ] ) )
g_avg = 0.999 * g_avg + 0.001 * (i + 1)
target_h_max = 0.999 * target_h_max + 0.001 * (i + 1)**2*(n_dim + 1)
target_h_min = 0.999 * target_h_min + 0.001 * max(1, i + 2 - 20)**2*(n_dim + 1)
target_var = g_norm_squared_avg - g_avg**2 * (n_dim + 1)
target_dist = 0.999 * target_dist + 0.001 * g_norm_avg / g_norm_squared_avg
# print "iter ", i, " h max ", res[1], target_h_max, " h min ", res[2], target_h_min, \
# " var ", res[3], target_var, " dist ", res[4], target_dist
assert np.abs(target_h_max - res[1] ) < np.abs(target_h_max) * 1e-3
assert np.abs(target_h_min - res[2] ) < np.abs(target_h_min) * 1e-3
assert np.abs(target_var - res[3] ) < np.abs(res[3] ) * 1e-3
assert np.abs(target_dist - res[4] ) < np.abs(res[4] ) * 1e-3
print("sync measurement test passed!")
def test_lr_mu():
opt = YFOptimizer(zero_debias=False)
w = tf.Variable(np.ones([n_dim, ] ), dtype=tf.float32, name="w", trainable=True)
b = tf.Variable(np.ones([1, ], dtype=np.float32), dtype=tf.float32, name="b", trainable=True)
x = tf.constant(np.ones([n_dim, ], dtype=np.float32), dtype=tf.float32)
loss = tf.multiply(w, x) + b
tvars = tf.trainable_variables()
w_grad_val = tf.Variable(np.zeros( [n_dim, ] ), dtype=tf.float32, trainable=False)
b_grad_val = tf.Variable(np.zeros([1, ] ), dtype=tf.float32, trainable=False)
apply_op = opt.apply_gradients(zip([w_grad_val, b_grad_val], tvars) )
init_op = tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init_op)
target_h_max = 0.0
target_h_min = 0.0
g_norm_squared_avg = 0.0
g_norm_avg = 0.0
g_avg = 0.0
target_dist = 0.0
target_lr = 0.1
target_mu = 0.0
for i in range(n_iter):
sess.run(tf.assign(w_grad_val, (i + 1) * np.ones( [n_dim, ], dtype=np.float32) ) )
sess.run(tf.assign(b_grad_val, (i + 1) * np.ones( [1, ], dtype=np.float32) ) )
res = sess.run( [opt._curv_win, opt._h_max, opt._h_min, opt._grad_var, opt._dist_to_opt_avg,
opt._lr_var, opt._mu_var, apply_op] )
res[5] = opt._lr_var.eval()
res[6] = opt._mu_var.eval()
g_norm_squared_avg = 0.999 * g_norm_squared_avg \
+ 0.001 * np.sum(( (i + 1)*np.ones( [n_dim + 1, ] ) )**2)
g_norm_avg = 0.999 * g_norm_avg \
+ 0.001 * np.linalg.norm( (i + 1)*np.ones( [n_dim + 1, ] ) )
g_avg = 0.999 * g_avg + 0.001 * (i + 1)
target_h_max = 0.999 * target_h_max + 0.001 * (i + 1)**2*(n_dim + 1)
target_h_min = 0.999 * target_h_min + 0.001 * max(1, i + 2 - 20)**2*(n_dim + 1)
target_var = g_norm_squared_avg - g_avg**2 * (n_dim + 1)
target_dist = 0.999 * target_dist + 0.001 * g_norm_avg / g_norm_squared_avg
if i > 0:
lr, mu = tune_everything(target_dist**2, target_var, 1, target_h_min, target_h_max)
target_lr = 0.999 * target_lr + 0.001 * lr
target_mu = 0.999 * target_mu + 0.001 * mu
# print "iter ", i, " h max ", res[1], target_h_max, " h min ", res[2], target_h_min, \
# " var ", res[3], target_var, " dist ", res[4], target_dist
# print "iter ", i, " lr ", res[5], target_lr, " mu ", res[6], target_mu
assert np.abs(target_h_max - res[1] ) < np.abs(target_h_max) * 1e-3
assert np.abs(target_h_min - res[2] ) < np.abs(target_h_min) * 1e-3
assert np.abs(target_var - res[3] ) < np.abs(res[3] ) * 1e-3
assert np.abs(target_dist - res[4] ) < np.abs(res[4] ) * 1e-3
assert target_lr == 0.0 or np.abs(target_lr - res[5] ) < np.abs(res[5] ) * 1e-3
assert target_mu == 0.0 or np.abs(target_mu - res[6] ) < np.abs(res[6] ) * 5e-3
print("lr and mu computing test passed!")
if __name__ == "__main__":
# test gpu mode
with tf.variable_scope("test_sync_measurement"):
start = time.time()
test_measurement()
end = time.time()
print("GPU measurement test done in ", (end - start)/float(n_iter), " s/iter!")
with tf.variable_scope("test_sync_lr_mu"):
start = time.time()
test_lr_mu()
end = time.time()
print("GPU lr and mu test done in ", (end - start)/float(n_iter), " s/iter!")
# test cpu mode
with tf.variable_scope("test_sync_measurement_cpu"), tf.device("cpu:0"):
start = time.time()
test_measurement()
end = time.time()
print("CPU measurement test done in ", (end - start)/float(n_iter), " s/iter!")
with tf.variable_scope("test_sync_lr_mu_cpu"), tf.device("cpu:0"):
start = time.time()
test_lr_mu()
end = time.time()
print("CPU lr and mu test done in ", (end - start)/float(n_iter), " s/iter!")
|
import asyncio
from collections.abc import Mapping
from copy import deepcopy
import voluptuous as vol
from homeassistant.components.notify import (
ATTR_DATA,
ATTR_MESSAGE,
DOMAIN,
PLATFORM_SCHEMA,
BaseNotificationService,
)
from homeassistant.const import ATTR_SERVICE
import homeassistant.helpers.config_validation as cv
# mypy: allow-untyped-calls, allow-untyped-defs, no-check-untyped-defs
CONF_SERVICES = "services"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_SERVICES): vol.All(
cv.ensure_list,
[{vol.Required(ATTR_SERVICE): cv.slug, vol.Optional(ATTR_DATA): dict}],
)
}
)
def update(input_dict, update_source):
"""Deep update a dictionary.
Async friendly.
"""
for key, val in update_source.items():
if isinstance(val, Mapping):
recurse = update(input_dict.get(key, {}), val)
input_dict[key] = recurse
else:
input_dict[key] = update_source[key]
return input_dict
async def async_get_service(hass, config, discovery_info=None):
"""Get the Group notification service."""
return GroupNotifyPlatform(hass, config.get(CONF_SERVICES))
class GroupNotifyPlatform(BaseNotificationService):
"""Implement the notification service for the group notify platform."""
def __init__(self, hass, entities):
"""Initialize the service."""
self.hass = hass
self.entities = entities
async def async_send_message(self, message="", **kwargs):
"""Send message to all entities in the group."""
payload = {ATTR_MESSAGE: message}
payload.update({key: val for key, val in kwargs.items() if val})
tasks = []
for entity in self.entities:
sending_payload = deepcopy(payload.copy())
if entity.get(ATTR_DATA) is not None:
update(sending_payload, entity.get(ATTR_DATA))
tasks.append(
self.hass.services.async_call(
DOMAIN, entity.get(ATTR_SERVICE), sending_payload
)
)
if tasks:
await asyncio.wait(tasks)
|
import atexit
import inspect
import itertools
import os
import platform
import re
import sys
import traceback
from coverage import env
from coverage.backward import code_object
from coverage.disposition import FileDisposition, disposition_init
from coverage.files import TreeMatcher, FnmatchMatcher, ModuleMatcher
from coverage.files import prep_patterns, find_python_files, canonical_filename
from coverage.misc import CoverageException
from coverage.python import source_for_file, source_for_morf
# Pypy has some unusual stuff in the "stdlib". Consider those locations
# when deciding where the stdlib is. These modules are not used for anything,
# they are modules importable from the pypy lib directories, so that we can
# find those directories.
_structseq = _pypy_irc_topic = None
if env.PYPY:
try:
import _structseq
except ImportError:
pass
try:
import _pypy_irc_topic
except ImportError:
pass
def canonical_path(morf, directory=False):
"""Return the canonical path of the module or file `morf`.
If the module is a package, then return its directory. If it is a
module, then return its file, unless `directory` is True, in which
case return its enclosing directory.
"""
morf_path = canonical_filename(source_for_morf(morf))
if morf_path.endswith("__init__.py") or directory:
morf_path = os.path.split(morf_path)[0]
return morf_path
def name_for_module(filename, frame):
"""Get the name of the module for a filename and frame.
For configurability's sake, we allow __main__ modules to be matched by
their importable name.
If loaded via runpy (aka -m), we can usually recover the "original"
full dotted module name, otherwise, we resort to interpreting the
file name to get the module's name. In the case that the module name
can't be determined, None is returned.
"""
module_globals = frame.f_globals if frame is not None else {}
if module_globals is None: # pragma: only ironpython
# IronPython doesn't provide globals: https://github.com/IronLanguages/main/issues/1296
module_globals = {}
dunder_name = module_globals.get('__name__', None)
if isinstance(dunder_name, str) and dunder_name != '__main__':
# This is the usual case: an imported module.
return dunder_name
loader = module_globals.get('__loader__', None)
for attrname in ('fullname', 'name'): # attribute renamed in py3.2
if hasattr(loader, attrname):
fullname = getattr(loader, attrname)
else:
continue
if isinstance(fullname, str) and fullname != '__main__':
# Module loaded via: runpy -m
return fullname
# Script as first argument to Python command line.
inspectedname = inspect.getmodulename(filename)
if inspectedname is not None:
return inspectedname
else:
return dunder_name
def module_is_namespace(mod):
"""Is the module object `mod` a PEP420 namespace module?"""
return hasattr(mod, '__path__') and getattr(mod, '__file__', None) is None
def module_has_file(mod):
"""Does the module object `mod` have an existing __file__ ?"""
mod__file__ = getattr(mod, '__file__', None)
if mod__file__ is None:
return False
return os.path.exists(mod__file__)
class InOrOut(object):
"""Machinery for determining what files to measure."""
def __init__(self, warn, debug):
self.warn = warn
self.debug = debug
# The matchers for should_trace.
self.source_match = None
self.source_pkgs_match = None
self.pylib_paths = self.cover_paths = None
self.pylib_match = self.cover_match = None
self.include_match = self.omit_match = None
self.plugins = []
self.disp_class = FileDisposition
# The source argument can be directories or package names.
self.source = []
self.source_pkgs = []
self.source_pkgs_unmatched = []
self.omit = self.include = None
def configure(self, config):
"""Apply the configuration to get ready for decision-time."""
self.source_pkgs.extend(config.source_pkgs)
for src in config.source or []:
if os.path.isdir(src):
self.source.append(canonical_filename(src))
else:
self.source_pkgs.append(src)
self.source_pkgs_unmatched = self.source_pkgs[:]
self.omit = prep_patterns(config.run_omit)
self.include = prep_patterns(config.run_include)
# The directories for files considered "installed with the interpreter".
self.pylib_paths = set()
if not config.cover_pylib:
# Look at where some standard modules are located. That's the
# indication for "installed with the interpreter". In some
# environments (virtualenv, for example), these modules may be
# spread across a few locations. Look at all the candidate modules
# we've imported, and take all the different ones.
for m in (atexit, inspect, os, platform, _pypy_irc_topic, re, _structseq, traceback):
if m is not None and hasattr(m, "__file__"):
self.pylib_paths.add(canonical_path(m, directory=True))
if _structseq and not hasattr(_structseq, '__file__'):
# PyPy 2.4 has no __file__ in the builtin modules, but the code
# objects still have the file names. So dig into one to find
# the path to exclude. The "filename" might be synthetic,
# don't be fooled by those.
structseq_file = code_object(_structseq.structseq_new).co_filename
if not structseq_file.startswith("<"):
self.pylib_paths.add(canonical_path(structseq_file))
# To avoid tracing the coverage.py code itself, we skip anything
# located where we are.
self.cover_paths = [canonical_path(__file__, directory=True)]
if env.TESTING:
# Don't include our own test code.
self.cover_paths.append(os.path.join(self.cover_paths[0], "tests"))
# When testing, we use PyContracts, which should be considered
# part of coverage.py, and it uses six. Exclude those directories
# just as we exclude ourselves.
import contracts
import six
for mod in [contracts, six]:
self.cover_paths.append(canonical_path(mod))
def debug(msg):
if self.debug:
self.debug.write(msg)
# Create the matchers we need for should_trace
if self.source or self.source_pkgs:
against = []
if self.source:
self.source_match = TreeMatcher(self.source)
against.append("trees {!r}".format(self.source_match))
if self.source_pkgs:
self.source_pkgs_match = ModuleMatcher(self.source_pkgs)
against.append("modules {!r}".format(self.source_pkgs_match))
debug("Source matching against " + " and ".join(against))
else:
if self.cover_paths:
self.cover_match = TreeMatcher(self.cover_paths)
debug("Coverage code matching: {!r}".format(self.cover_match))
if self.pylib_paths:
self.pylib_match = TreeMatcher(self.pylib_paths)
debug("Python stdlib matching: {!r}".format(self.pylib_match))
if self.include:
self.include_match = FnmatchMatcher(self.include)
debug("Include matching: {!r}".format(self.include_match))
if self.omit:
self.omit_match = FnmatchMatcher(self.omit)
debug("Omit matching: {!r}".format(self.omit_match))
def should_trace(self, filename, frame=None):
"""Decide whether to trace execution in `filename`, with a reason.
This function is called from the trace function. As each new file name
is encountered, this function determines whether it is traced or not.
Returns a FileDisposition object.
"""
original_filename = filename
disp = disposition_init(self.disp_class, filename)
def nope(disp, reason):
"""Simple helper to make it easy to return NO."""
disp.trace = False
disp.reason = reason
return disp
if frame is not None:
# Compiled Python files have two file names: frame.f_code.co_filename is
# the file name at the time the .pyc was compiled. The second name is
# __file__, which is where the .pyc was actually loaded from. Since
# .pyc files can be moved after compilation (for example, by being
# installed), we look for __file__ in the frame and prefer it to the
# co_filename value.
dunder_file = frame.f_globals and frame.f_globals.get('__file__')
if dunder_file:
filename = source_for_file(dunder_file)
if original_filename and not original_filename.startswith('<'):
orig = os.path.basename(original_filename)
if orig != os.path.basename(filename):
# Files shouldn't be renamed when moved. This happens when
# exec'ing code. If it seems like something is wrong with
# the frame's file name, then just use the original.
filename = original_filename
if not filename:
# Empty string is pretty useless.
return nope(disp, "empty string isn't a file name")
if filename.startswith('memory:'):
return nope(disp, "memory isn't traceable")
if filename.startswith('<'):
# Lots of non-file execution is represented with artificial
# file names like "<string>", "<doctest readme.txt[0]>", or
# "<exec_function>". Don't ever trace these executions, since we
# can't do anything with the data later anyway.
return nope(disp, "not a real file name")
# pyexpat does a dumb thing, calling the trace function explicitly from
# C code with a C file name.
if re.search(r"[/\\]Modules[/\\]pyexpat.c", filename):
return nope(disp, "pyexpat lies about itself")
# Jython reports the .class file to the tracer, use the source file.
if filename.endswith("$py.class"):
filename = filename[:-9] + ".py"
canonical = canonical_filename(filename)
disp.canonical_filename = canonical
# Try the plugins, see if they have an opinion about the file.
plugin = None
for plugin in self.plugins.file_tracers:
if not plugin._coverage_enabled:
continue
try:
file_tracer = plugin.file_tracer(canonical)
if file_tracer is not None:
file_tracer._coverage_plugin = plugin
disp.trace = True
disp.file_tracer = file_tracer
if file_tracer.has_dynamic_source_filename():
disp.has_dynamic_filename = True
else:
disp.source_filename = canonical_filename(
file_tracer.source_filename()
)
break
except Exception:
self.warn(
"Disabling plug-in %r due to an exception:" % (plugin._coverage_plugin_name)
)
traceback.print_exc()
plugin._coverage_enabled = False
continue
else:
# No plugin wanted it: it's Python.
disp.trace = True
disp.source_filename = canonical
if not disp.has_dynamic_filename:
if not disp.source_filename:
raise CoverageException(
"Plugin %r didn't set source_filename for %r" %
(plugin, disp.original_filename)
)
reason = self.check_include_omit_etc(disp.source_filename, frame)
if reason:
nope(disp, reason)
return disp
def check_include_omit_etc(self, filename, frame):
"""Check a file name against the include, omit, etc, rules.
Returns a string or None. String means, don't trace, and is the reason
why. None means no reason found to not trace.
"""
modulename = name_for_module(filename, frame)
# If the user specified source or include, then that's authoritative
# about the outer bound of what to measure and we don't have to apply
# any canned exclusions. If they didn't, then we have to exclude the
# stdlib and coverage.py directories.
if self.source_match or self.source_pkgs_match:
extra = ""
ok = False
if self.source_pkgs_match:
if self.source_pkgs_match.match(modulename):
ok = True
if modulename in self.source_pkgs_unmatched:
self.source_pkgs_unmatched.remove(modulename)
else:
extra = "module {!r} ".format(modulename)
if not ok and self.source_match:
if self.source_match.match(filename):
ok = True
if not ok:
return extra + "falls outside the --source spec"
elif self.include_match:
if not self.include_match.match(filename):
return "falls outside the --include trees"
else:
# If we aren't supposed to trace installed code, then check if this
# is near the Python standard library and skip it if so.
if self.pylib_match and self.pylib_match.match(filename):
return "is in the stdlib"
# We exclude the coverage.py code itself, since a little of it
# will be measured otherwise.
if self.cover_match and self.cover_match.match(filename):
return "is part of coverage.py"
# Check the file against the omit pattern.
if self.omit_match and self.omit_match.match(filename):
return "is inside an --omit pattern"
# No point tracing a file we can't later write to SQLite.
try:
filename.encode("utf8")
except UnicodeEncodeError:
return "non-encodable filename"
# No reason found to skip this file.
return None
def warn_conflicting_settings(self):
"""Warn if there are settings that conflict."""
if self.include:
if self.source or self.source_pkgs:
self.warn("--include is ignored because --source is set", slug="include-ignored")
def warn_already_imported_files(self):
"""Warn if files have already been imported that we will be measuring."""
if self.include or self.source or self.source_pkgs:
warned = set()
for mod in list(sys.modules.values()):
filename = getattr(mod, "__file__", None)
if filename is None:
continue
if filename in warned:
continue
disp = self.should_trace(filename)
if disp.trace:
msg = "Already imported a file that will be measured: {}".format(filename)
self.warn(msg, slug="already-imported")
warned.add(filename)
def warn_unimported_source(self):
"""Warn about source packages that were of interest, but never traced."""
for pkg in self.source_pkgs_unmatched:
self._warn_about_unmeasured_code(pkg)
def _warn_about_unmeasured_code(self, pkg):
"""Warn about a package or module that we never traced.
`pkg` is a string, the name of the package or module.
"""
mod = sys.modules.get(pkg)
if mod is None:
self.warn("Module %s was never imported." % pkg, slug="module-not-imported")
return
if module_is_namespace(mod):
# A namespace package. It's OK for this not to have been traced,
# since there is no code directly in it.
return
if not module_has_file(mod):
self.warn("Module %s has no Python source." % pkg, slug="module-not-python")
return
# The module was in sys.modules, and seems like a module with code, but
# we never measured it. I guess that means it was imported before
# coverage even started.
self.warn(
"Module %s was previously imported, but not measured" % pkg,
slug="module-not-measured",
)
def find_possibly_unexecuted_files(self):
"""Find files in the areas of interest that might be untraced.
Yields pairs: file path, and responsible plug-in name.
"""
for pkg in self.source_pkgs:
if (not pkg in sys.modules or
not module_has_file(sys.modules[pkg])):
continue
pkg_file = source_for_file(sys.modules[pkg].__file__)
for ret in self._find_executable_files(canonical_path(pkg_file)):
yield ret
for src in self.source:
for ret in self._find_executable_files(src):
yield ret
def _find_plugin_files(self, src_dir):
"""Get executable files from the plugins."""
for plugin in self.plugins.file_tracers:
for x_file in plugin.find_executable_files(src_dir):
yield x_file, plugin._coverage_plugin_name
def _find_executable_files(self, src_dir):
"""Find executable files in `src_dir`.
Search for files in `src_dir` that can be executed because they
are probably importable. Don't include ones that have been omitted
by the configuration.
Yield the file path, and the plugin name that handles the file.
"""
py_files = ((py_file, None) for py_file in find_python_files(src_dir))
plugin_files = self._find_plugin_files(src_dir)
for file_path, plugin_name in itertools.chain(py_files, plugin_files):
file_path = canonical_filename(file_path)
if self.omit_match and self.omit_match.match(file_path):
# Turns out this file was omitted, so don't pull it back
# in as unexecuted.
continue
yield file_path, plugin_name
def sys_info(self):
"""Our information for Coverage.sys_info.
Returns a list of (key, value) pairs.
"""
info = [
('cover_paths', self.cover_paths),
('pylib_paths', self.pylib_paths),
]
matcher_names = [
'source_match', 'source_pkgs_match',
'include_match', 'omit_match',
'cover_match', 'pylib_match',
]
for matcher_name in matcher_names:
matcher = getattr(self, matcher_name)
if matcher:
matcher_info = matcher.info()
else:
matcher_info = '-none-'
info.append((matcher_name, matcher_info))
return info
|
from urllib.parse import urlparse
from django.contrib.auth.models import AnonymousUser
from django.db import models
from django.http.request import HttpRequest
from post_office import mail
from post_office.models import EmailTemplate
from shop.conf import app_settings
from shop.models.order import BaseOrder
from shop.models.notification import Notification
from shop.serializers.delivery import DeliverySerializer
from shop.serializers.order import OrderDetailSerializer
from shop.signals import email_queued
class EmulateHttpRequest(HttpRequest):
"""
Use this class to emulate a HttpRequest object, when templates must be rendered
asynchronously, for instance when an email must be generated out of an Order object.
"""
def __init__(self, customer, stored_request):
super().__init__()
parsedurl = urlparse(stored_request.get('absolute_base_uri'))
self.path = self.path_info = parsedurl.path
self.environ = {}
self.META['PATH_INFO'] = parsedurl.path
self.META['SCRIPT_NAME'] = ''
self.META['HTTP_HOST'] = parsedurl.netloc
self.META['HTTP_X_FORWARDED_PROTO'] = parsedurl.scheme
self.META['QUERY_STRING'] = parsedurl.query
self.META['HTTP_USER_AGENT'] = stored_request.get('user_agent')
self.META['REMOTE_ADDR'] = stored_request.get('remote_ip')
self.method = 'GET'
self.LANGUAGE_CODE = self.COOKIES['django_language'] = stored_request.get('language')
self.customer = customer
self.user = customer.is_anonymous and AnonymousUser or customer.user
self.current_page = None
def transition_change_notification(order):
"""
This function shall be called, after an Order object performed a transition change.
"""
if not isinstance(order, BaseOrder):
raise TypeError("Object order must inherit from class BaseOrder")
emails_in_queue = False
for notification in Notification.objects.filter(transition_target=order.status):
recipient = notification.get_recipient(order)
if recipient is None:
continue
# emulate a request object which behaves similar to that one, when the customer submitted its order
emulated_request = EmulateHttpRequest(order.customer, order.stored_request)
customer_serializer = app_settings.CUSTOMER_SERIALIZER(order.customer)
render_context = {'request': emulated_request, 'render_label': 'email'}
order_serializer = OrderDetailSerializer(order, context=render_context)
language = order.stored_request.get('language')
context = {
'customer': customer_serializer.data,
'order': order_serializer.data,
'ABSOLUTE_BASE_URI': emulated_request.build_absolute_uri().rstrip('/'),
'render_language': language,
}
try:
latest_delivery = order.delivery_set.latest()
context['latest_delivery'] = DeliverySerializer(latest_delivery, context=render_context).data
except (AttributeError, models.ObjectDoesNotExist):
pass
try:
template = notification.mail_template.translated_templates.get(language=language)
except EmailTemplate.DoesNotExist:
template = notification.mail_template
attachments = {}
for notiatt in notification.notificationattachment_set.all():
attachments[notiatt.attachment.original_filename] = notiatt.attachment.file.file
mail.send(recipient, template=template, context=context,
attachments=attachments, render_on_delivery=True)
emails_in_queue = True
if emails_in_queue:
email_queued()
|
import os
class Etree(object):
"""Etree wrapper using lxml.etree or standard xml.etree"""
def __init__(self):
"""Create the wrapper"""
from xml.etree import ElementTree as _py_etree
self._py_etree = _py_etree
try:
from lxml import etree as _lxml_etree
self._lxml_etree = _lxml_etree
except ImportError:
self._lxml_etree = None
if os.getenv('NO_LXML', None):
self._etree = self._py_etree
else:
self._etree = self._lxml_etree or self._py_etree
self.lxml = self._etree is self._lxml_etree
def __getattribute__(self, attr):
"""Retrieve attr from current active etree implementation"""
if (attr not in object.__getattribute__(self, '__dict__')
and attr not in Etree.__dict__):
return object.__getattribute__(self._etree, attr)
return object.__getattribute__(self, attr)
def to_lxml(self):
"""Force lxml.etree to be used"""
self._etree = self._lxml_etree
self.lxml = True
def to_etree(self):
"""Force xml.etree to be used"""
self._etree = self._py_etree
self.lxml = False
etree = Etree()
|
from __future__ import division
from math import exp
import warnings
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import axes3d
import numpy as np
from numpy.linalg import inv
import scipy
from scipy.spatial.distance import mahalanobis as scipy_mahalanobis
from filterpy.stats import norm_cdf, multivariate_gaussian, logpdf, mahalanobis
ITERS = 10000
def test_mahalanobis():
global a, b, S
# int test
a, b, S = 3, 1, 2
assert abs(mahalanobis(a, b, S) - scipy_mahalanobis(a, b, 1/S)) < 1.e-12
# int list
assert abs(mahalanobis([a], [b], [S]) - scipy_mahalanobis(a, b, 1/S)) < 1.e-12
assert abs(mahalanobis([a], b, S) - scipy_mahalanobis(a, b, 1/S)) < 1.e-12
# float
a, b, S = 3.123, 3.235235, .01234
assert abs(mahalanobis(a, b, S) - scipy_mahalanobis(a, b, 1/S)) < 1.e-12
assert abs(mahalanobis([a], [b], [S]) - scipy_mahalanobis(a, b, 1/S)) < 1.e-12
assert abs(mahalanobis([a], b, S) - scipy_mahalanobis(a, b, 1/S)) < 1.e-12
#float array
assert abs(mahalanobis(np.array([a]), b, S) - scipy_mahalanobis(a, b, 1/S)) < 1.e-12
#1d array
a = np.array([1., 2.])
b = np.array([1.4, 1.2])
S = np.array([[1., 2.], [2., 4.001]])
assert abs(mahalanobis(a, b, S) - scipy_mahalanobis(a, b, inv(S))) < 1.e-12
#2d array
a = np.array([[1., 2.]])
b = np.array([[1.4, 1.2]])
S = np.array([[1., 2.], [2., 4.001]])
assert abs(mahalanobis(a, b, S) - scipy_mahalanobis(a, b, inv(S))) < 1.e-12
assert abs(mahalanobis(a.T, b, S) - scipy_mahalanobis(a, b, inv(S))) < 1.e-12
assert abs(mahalanobis(a, b.T, S) - scipy_mahalanobis(a, b, inv(S))) < 1.e-12
assert abs(mahalanobis(a.T, b.T, S) - scipy_mahalanobis(a, b, inv(S))) < 1.e-12
try:
# mismatched shapes
mahalanobis([1], b, S)
assert "didn't catch vectors of different lengths"
except ValueError:
pass
except:
assert "raised exception other than ValueError"
# okay, now check for numerical accuracy
for _ in range(ITERS):
N = np.random.randint(1, 20)
a = np.random.randn(N)
b = np.random.randn(N)
S = np.random.randn(N, N)
S = np.dot(S, S.T) #ensure positive semi-definite
assert abs(mahalanobis(a, b, S) - scipy_mahalanobis(a, b, inv(S))) < 1.e-12
def test_multivariate_gaussian():
with warnings.catch_warnings():
warnings.simplefilter("ignore")
# test that we treat lists and arrays the same
mean= (0, 0)
cov=[[1, .5], [.5, 1]]
a = [[multivariate_gaussian((i, j), mean, cov)
for i in (-1, 0, 1)]
for j in (-1, 0, 1)]
b = [[multivariate_gaussian((i, j), mean, np.asarray(cov))
for i in (-1, 0, 1)]
for j in (-1, 0, 1)]
assert np.allclose(a, b)
a = [[multivariate_gaussian((i, j), np.asarray(mean), cov)
for i in (-1, 0, 1)]
for j in (-1, 0, 1)]
assert np.allclose(a, b)
try:
multivariate_gaussian(1, 1, -1)
except:
pass
else:
assert False, "negative variances are meaningless"
# test that we get the same results as scipy.stats.multivariate_normal
xs = np.random.randn(1000)
mean = np.random.randn(1000)
var = np.random.random(1000) * 5
for x, m, v in zip(xs, mean, var):
assert abs(multivariate_gaussian(x, m, v) - scipy.stats.multivariate_normal(m, v).pdf(x)) < 1.e-12
def _is_inside_ellipse(x, y, ex, ey, orientation, width, height):
co = np.cos(orientation)
so = np.sin(orientation)
xx = x*co + y*so
yy = y*co - x*so
return (xx / width)**2 + (yy / height)**2 <= 1.
def do_plot_test():
import matplotlib.pyplot as plt
from numpy.random import multivariate_normal as mnormal
from filterpy.stats import covariance_ellipse, plot_covariance
p = np.array([[32, 15], [15., 40.]])
x, y = mnormal(mean=(0, 0), cov=p, size=5000).T
sd = 2
a, w, h = covariance_ellipse(p, sd)
print(np.degrees(a), w, h)
count = 0
color = []
for i in range(len(x)):
if _is_inside_ellipse(x[i], y[i], 0, 0, a, w, h):
color.append('b')
count += 1
else:
color.append('r')
plt.scatter(x, y, alpha=0.2, c=color)
plt.axis('equal')
plot_covariance(mean=(0., 0.),
cov=p,
std=[1,2,3],
alpha=0.3,
facecolor='none')
print(count / len(x))
def test_norm_cdf():
# test using the 68-95-99.7 rule
mu = 5
std = 3
var = std*std
std_1 = (norm_cdf((mu-std, mu+std), mu, var))
assert abs(std_1 - .6827) < .0001
std_1 = (norm_cdf((mu+std, mu-std), mu, std=std))
assert abs(std_1 - .6827) < .0001
std_1half = (norm_cdf((mu+std, mu), mu, var))
assert abs(std_1half - .6827/2) < .0001
std_2 = (norm_cdf((mu-2*std, mu+2*std), mu, var))
assert abs(std_2 - .9545) < .0001
std_3 = (norm_cdf((mu-3*std, mu+3*std), mu, var))
assert abs(std_3 - .9973) < .0001
def test_logpdf():
assert 3.9 < exp(logpdf(1, 1, .01)) < 4.
assert 3.9 < exp(logpdf([1], [1], .01)) < 4.
assert 3.9 < exp(logpdf([[1]], [[1]], .01)) < 4.
logpdf([1., 2], [1.1, 2], cov=np.array([[1., 2], [2, 5]]), allow_singular=False)
logpdf([1., 2], [1.1, 2], cov=np.array([[1., 2], [2, 5]]), allow_singular=True)
def covariance_3d_plot_test():
import matplotlib.pyplot as plt
from filterpy.stats import plot_3d_covariance
mu = [13456.3,2320,672.5]
C = np.array([[1.0, .03, .2],
[.03, 4.0, .0],
[.2, .0, 16.1]])
sample = np.random.multivariate_normal(mu, C, size=1000)
fig = plt.gcf()
ax = fig.add_subplot(111, projection='3d')
ax.scatter(xs=sample[:, 0], ys=sample[:, 1], zs=sample[:, 2], s=1)
plot_3d_covariance(mu, C, alpha=.4, std=3, limit_xyz=True, ax=ax)
if __name__ == "__main__":
covariance_3d_plot_test()
plt.figure()
do_plot_test()
|
import configparser
import os
import sys
from gi.repository import GLib, GObject, Gtk, Pango
# This file started off as a Python translation of:
# * gedit/gedit/gedit-history-entry.c
# * libgnomeui/libgnomeui/gnome-file-entry.c
# roughly based on Colin Walters' Python translation of msgarea.py from Hotwire
MIN_ITEM_LEN = 3
HISTORY_ENTRY_HISTORY_LENGTH_DEFAULT = 10
def _remove_item(store, text):
if text is None:
return False
for row in store:
if row[1] == text:
store.remove(row.iter)
return True
return False
def _clamp_list_store(liststore, max_items):
try:
# -1 because TreePath counts from 0
it = liststore.get_iter(max_items - 1)
except ValueError:
return
valid = True
while valid:
valid = liststore.remove(it)
class HistoryCombo(Gtk.ComboBox):
__gtype_name__ = "HistoryCombo"
history_id = GObject.Property(
type=str,
nick="History ID",
blurb="Identifier associated with entry's history store",
default=None,
flags=GObject.ParamFlags.READWRITE,
)
history_length = GObject.Property(
type=int,
nick="History length",
blurb="Number of history items to display in the combo",
minimum=1, maximum=20,
default=HISTORY_ENTRY_HISTORY_LENGTH_DEFAULT,
)
def __init__(self, **kwargs):
super().__init__(**kwargs)
if sys.platform == "win32":
pref_dir = os.path.join(os.getenv("APPDATA"), "Meld")
else:
pref_dir = os.path.join(GLib.get_user_config_dir(), "meld")
if not os.path.exists(pref_dir):
os.makedirs(pref_dir)
self.history_file = os.path.join(pref_dir, "history.ini")
self.config = configparser.RawConfigParser()
if os.path.exists(self.history_file):
self.config.read(self.history_file, encoding='utf8')
self.set_model(Gtk.ListStore(str, str))
rentext = Gtk.CellRendererText()
rentext.props.width_chars = 60
rentext.props.ellipsize = Pango.EllipsizeMode.END
self.pack_start(rentext, True)
self.add_attribute(rentext, 'text', 0)
self.connect('notify::history-id',
lambda *args: self._load_history())
self.connect('notify::history-length',
lambda *args: self._load_history())
def prepend_history(self, text):
self._insert_history_item(text, True)
def append_history(self, text):
self._insert_history_item(text, False)
def clear(self):
self.get_model().clear()
self._save_history()
def _insert_history_item(self, text, prepend):
if not text or len(text) <= MIN_ITEM_LEN:
return
store = self.get_model()
if not _remove_item(store, text):
_clamp_list_store(store, self.props.history_length - 1)
row = (text.splitlines()[0], text)
if prepend:
store.insert(0, row)
else:
store.append(row)
self._save_history()
def _load_history(self):
section_key = self.props.history_id
if section_key is None or not self.config.has_section(section_key):
return
store = self.get_model()
store.clear()
messages = sorted(self.config.items(section_key))
for key, message in messages[:self.props.history_length - 1]:
message = message.encode('utf8')
message = message.decode('unicode-escape')
firstline = message.splitlines()[0]
store.append((firstline, message))
def _save_history(self):
section_key = self.props.history_id
if section_key is None:
return
self.config.remove_section(section_key)
self.config.add_section(section_key)
for i, row in enumerate(self.get_model()):
# This dance is to avoid newline, etc. issues in the ini file
message = row[1].encode('unicode-escape')
message = message.decode('utf8')
self.config.set(section_key, "item%d" % i, message)
with open(self.history_file, 'w', encoding='utf8') as f:
self.config.write(f)
|
import requests
import re
from requests.exceptions import ConnectionError
try:
from requests.exceptions import ReadTimeout as ReadTimeoutError
except ImportError:
try:
from requests.packages.urllib3.exceptions import ReadTimeoutError
except ImportError:
class ReadTimeoutError(Exception):
pass
# status codes that indicate request success
OK_CODES = [requests.codes.OK]
class RequestError(Exception):
"""Catch-all exception class for various connection and ACD server errors."""
class CODE(object):
CONN_EXCEPTION = 1000
FAILED_SUBREQUEST = 1002
INCOMPLETE_RESULT = 1003
REFRESH_FAILED = 1004
INVALID_TOKEN = 1005
codes = requests.codes
def __init__(self, status_code: int, msg: str):
self.status_code = status_code
if msg:
self.msg = msg
else:
self.msg = '[acd_api] no body received.'
def __str__(self):
return 'RequestError: ' + str(self.status_code) + ', ' + self.msg
def catch_conn_exception(func):
"""Request connection exception decorator
:raises RequestError"""
def decorated(*args, **kwargs):
try:
return func(*args, **kwargs)
except (ConnectionError, ReadTimeoutError) as e:
raise RequestError(RequestError.CODE.CONN_EXCEPTION, e.__str__())
return decorated
def is_valid_id(id: str) -> bool:
return bool(id) and len(id) == 22 and re.match('^[a-zA-Z0-9_-]*$', id)
|
from datetime import timedelta
import logging
from xml.parsers.expat import ExpatError
import async_timeout
import voluptuous as vol
import xmltodict
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_API_KEY,
CONF_MONITORED_VARIABLES,
CONF_NAME,
DATA_GIGABYTES,
HTTP_OK,
PERCENTAGE,
)
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "Start.ca"
CONF_TOTAL_BANDWIDTH = "total_bandwidth"
MIN_TIME_BETWEEN_UPDATES = timedelta(hours=1)
REQUEST_TIMEOUT = 5 # seconds
SENSOR_TYPES = {
"usage": ["Usage Ratio", PERCENTAGE, "mdi:percent"],
"usage_gb": ["Usage", DATA_GIGABYTES, "mdi:download"],
"limit": ["Data limit", DATA_GIGABYTES, "mdi:download"],
"used_download": ["Used Download", DATA_GIGABYTES, "mdi:download"],
"used_upload": ["Used Upload", DATA_GIGABYTES, "mdi:upload"],
"used_total": ["Used Total", DATA_GIGABYTES, "mdi:download"],
"grace_download": ["Grace Download", DATA_GIGABYTES, "mdi:download"],
"grace_upload": ["Grace Upload", DATA_GIGABYTES, "mdi:upload"],
"grace_total": ["Grace Total", DATA_GIGABYTES, "mdi:download"],
"total_download": ["Total Download", DATA_GIGABYTES, "mdi:download"],
"total_upload": ["Total Upload", DATA_GIGABYTES, "mdi:download"],
"used_remaining": ["Remaining", DATA_GIGABYTES, "mdi:download"],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_MONITORED_VARIABLES): vol.All(
cv.ensure_list, [vol.In(SENSOR_TYPES)]
),
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_TOTAL_BANDWIDTH): cv.positive_int,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the sensor platform."""
websession = async_get_clientsession(hass)
apikey = config.get(CONF_API_KEY)
bandwidthcap = config.get(CONF_TOTAL_BANDWIDTH)
ts_data = StartcaData(hass.loop, websession, apikey, bandwidthcap)
ret = await ts_data.async_update()
if ret is False:
_LOGGER.error("Invalid Start.ca API key: %s", apikey)
return
name = config.get(CONF_NAME)
sensors = []
for variable in config[CONF_MONITORED_VARIABLES]:
sensors.append(StartcaSensor(ts_data, variable, name))
async_add_entities(sensors, True)
class StartcaSensor(Entity):
"""Representation of Start.ca Bandwidth sensor."""
def __init__(self, startcadata, sensor_type, name):
"""Initialize the sensor."""
self.client_name = name
self.type = sensor_type
self._name = SENSOR_TYPES[sensor_type][0]
self._unit_of_measurement = SENSOR_TYPES[sensor_type][1]
self._icon = SENSOR_TYPES[sensor_type][2]
self.startcadata = startcadata
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return f"{self.client_name} {self._name}"
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return self._icon
async def async_update(self):
"""Get the latest data from Start.ca and update the state."""
await self.startcadata.async_update()
if self.type in self.startcadata.data:
self._state = round(self.startcadata.data[self.type], 2)
class StartcaData:
"""Get data from Start.ca API."""
def __init__(self, loop, websession, api_key, bandwidth_cap):
"""Initialize the data object."""
self.loop = loop
self.websession = websession
self.api_key = api_key
self.bandwidth_cap = bandwidth_cap
# Set unlimited users to infinite, otherwise the cap.
self.data = (
{"limit": self.bandwidth_cap}
if self.bandwidth_cap > 0
else {"limit": float("inf")}
)
@staticmethod
def bytes_to_gb(value):
"""Convert from bytes to GB.
:param value: The value in bytes to convert to GB.
:return: Converted GB value
"""
return float(value) * 10 ** -9
@Throttle(MIN_TIME_BETWEEN_UPDATES)
async def async_update(self):
"""Get the Start.ca bandwidth data from the web service."""
_LOGGER.debug("Updating Start.ca usage data")
url = f"https://www.start.ca/support/usage/api?key={self.api_key}"
with async_timeout.timeout(REQUEST_TIMEOUT):
req = await self.websession.get(url)
if req.status != HTTP_OK:
_LOGGER.error("Request failed with status: %u", req.status)
return False
data = await req.text()
try:
xml_data = xmltodict.parse(data)
except ExpatError:
return False
used_dl = self.bytes_to_gb(xml_data["usage"]["used"]["download"])
used_ul = self.bytes_to_gb(xml_data["usage"]["used"]["upload"])
grace_dl = self.bytes_to_gb(xml_data["usage"]["grace"]["download"])
grace_ul = self.bytes_to_gb(xml_data["usage"]["grace"]["upload"])
total_dl = self.bytes_to_gb(xml_data["usage"]["total"]["download"])
total_ul = self.bytes_to_gb(xml_data["usage"]["total"]["upload"])
limit = self.data["limit"]
if self.bandwidth_cap > 0:
self.data["usage"] = 100 * used_dl / self.bandwidth_cap
else:
self.data["usage"] = 0
self.data["usage_gb"] = used_dl
self.data["used_download"] = used_dl
self.data["used_upload"] = used_ul
self.data["used_total"] = used_dl + used_ul
self.data["grace_download"] = grace_dl
self.data["grace_upload"] = grace_ul
self.data["grace_total"] = grace_dl + grace_ul
self.data["total_download"] = total_dl
self.data["total_upload"] = total_ul
self.data["used_remaining"] = limit - used_dl
return True
|
from decimal import Decimal
import pygal
from pygal.graph.map import BaseMap
from pygal.util import cut
def get_data(i):
"""Return sample test data for an index"""
return [[(-1, 1), (2, 0), (0, 4)], [(0, 1), (None, 2), (3, 2)],
[(-3, 3), (1, 3), (1, 1)], [(1, 1), (Decimal('1.'), 1),
(1, 1)], [(3, 2), (2, 1), (1., 1)]][i]
def adapt(chart, data):
"""Adapt data to chart type"""
if isinstance(chart, pygal.XY):
return data
data = cut(data)
if isinstance(chart, BaseMap):
return list(
map(lambda x: chart.__class__.x_labels[
int(x) % len(chart.__class__.x_labels)]
if x is not None else None, data))
return data
def make_data(chart, datas):
"""Add sample data to the test chart"""
for i, data in enumerate(datas):
chart.add(data[0], adapt(chart, data[1]), secondary=bool(i % 2))
return chart
|
import numpy as np
import os.path as op
from io import BytesIO
from ...annotations import Annotations
from .res4 import _read_res4
from .info import _convert_time
def _get_markers(fname):
def consume(fid, predicate): # just a consumer to move around conveniently
while(predicate(fid.readline())):
pass
def parse_marker(string): # XXX: there should be a nicer way to do that
data = np.genfromtxt(
BytesIO(string.encode()), dtype=[('trial', int), ('sync', float)])
return int(data['trial']), float(data['sync'])
markers = dict()
with open(fname) as fid:
consume(fid, lambda l: not l.startswith('NUMBER OF MARKERS:'))
num_of_markers = int(fid.readline())
for _ in range(num_of_markers):
consume(fid, lambda l: not l.startswith('NAME:'))
label = fid.readline().strip('\n')
consume(fid, lambda l: not l.startswith('NUMBER OF SAMPLES:'))
n_markers = int(fid.readline())
consume(fid, lambda l: not l.startswith('LIST OF SAMPLES:'))
next(fid) # skip the samples header
markers[label] = [
parse_marker(next(fid)) for _ in range(n_markers)
]
return markers
def _get_res4_info_needed_by_markers(directory):
"""Get required information from CTF res4 information file."""
# we only need a few values from res4. Maybe we can read them directly
# instead of parsing the entire res4 file.
res4 = _read_res4(directory)
total_offset_duration = res4['pre_trig_pts'] / res4['sfreq']
trial_duration = res4['nsamp'] / res4['sfreq']
meas_date = (_convert_time(res4['data_date'],
res4['data_time']), 0)
return total_offset_duration, trial_duration, meas_date
def _read_annotations_ctf(directory):
total_offset, trial_duration, meas_date \
= _get_res4_info_needed_by_markers(directory)
return _read_annotations_ctf_call(directory, total_offset, trial_duration,
meas_date)
def _read_annotations_ctf_call(directory, total_offset, trial_duration,
meas_date):
fname = op.join(directory, 'MarkerFile.mrk')
if not op.exists(fname):
return Annotations(list(), list(), list(), orig_time=meas_date)
else:
markers = _get_markers(fname)
onset = [synctime + (trialnum * trial_duration) + total_offset
for _, m in markers.items() for (trialnum, synctime) in m]
description = np.concatenate([
np.repeat(label, len(m)) for label, m in markers.items()
])
return Annotations(onset=onset, duration=np.zeros_like(onset),
description=description, orig_time=meas_date)
|
from copy import deepcopy
from pygal import formatters
from pygal.interpolate import INTERPOLATIONS
from pygal.style import DefaultStyle, Style
CONFIG_ITEMS = []
callable = type(lambda: 1)
class Key(object):
"""
Represents a config parameter.
A config parameter has a name, a default value, a type,
a category, a documentation, an optional longer documentatation
and an optional subtype for list style option.
Most of these informations are used in cabaret to auto generate
forms representing these options.
"""
_categories = []
def __init__(
self, default_value, type_, category, doc, subdoc="", subtype=None
):
"""Create a configuration key"""
self.value = default_value
self.type = type_
self.doc = doc
self.category = category
self.subdoc = subdoc
self.subtype = subtype
self.name = "Unbound"
if category not in self._categories:
self._categories.append(category)
CONFIG_ITEMS.append(self)
def __repr__(self):
"""
Make a documentation repr.
This is a hack to generate doc from inner doc
"""
return """
Type: %s%s
Default: %r
%s%s
""" % (
self.type.__name__, (' of %s' % self.subtype.__name__)
if self.subtype else '', self.value, self.doc,
(' %s' % self.subdoc) if self.subdoc else ''
)
@property
def is_boolean(self):
"""Return `True` if this parameter is a boolean"""
return self.type == bool
@property
def is_numeric(self):
"""Return `True` if this parameter is numeric (int or float)"""
return self.type in (int, float)
@property
def is_string(self):
"""Return `True` if this parameter is a string"""
return self.type == str
@property
def is_dict(self):
"""Return `True` if this parameter is a mapping"""
return self.type == dict
@property
def is_list(self):
"""Return `True` if this parameter is a list"""
return self.type == list
def coerce(self, value):
"""Cast a string into this key type"""
if self.type == Style:
return value
elif self.type == list:
return self.type(
map(self.subtype, map(lambda x: x.strip(), value.split(',')))
)
elif self.type == dict:
rv = {}
for pair in value.split(','):
key, val = pair.split(':')
key = key.strip()
val = val.strip()
try:
rv[key] = self.subtype(val)
except Exception:
rv[key] = val
return rv
return self.type(value)
class MetaConfig(type):
"""Config metaclass. Used to get the key name and set it on the value."""
def __new__(mcs, classname, bases, classdict):
"""Get the name of the key and set it on the key"""
for k, v in classdict.items():
if isinstance(v, Key):
v.name = k
return type.__new__(mcs, classname, bases, classdict)
class BaseConfig(MetaConfig('ConfigBase', (object, ), {})):
"""
This class holds the common method for configs.
A config object can be instanciated with keyword arguments and
updated on call with keyword arguments.
"""
def __init__(self, **kwargs):
"""Can be instanciated with config kwargs"""
for k in dir(self):
v = getattr(self, k)
if (k not in self.__dict__ and not k.startswith('_')
and not hasattr(v, '__call__')):
if isinstance(v, Key):
if v.is_list and v.value is not None:
v = list(v.value)
else:
v = v.value
setattr(self, k, v)
self._update(kwargs)
def __call__(self, **kwargs):
"""Can be updated with kwargs"""
self._update(kwargs)
def _update(self, kwargs):
"""Update the config with the given dictionary"""
from pygal.util import merge
dir_self_set = set(dir(self))
merge(
self.__dict__,
dict([(k, v) for (k, v) in kwargs.items()
if not k.startswith('_') and k in dir_self_set])
)
def to_dict(self):
"""Export a JSON serializable dictionary of the config"""
config = {}
for attr in dir(self):
if not attr.startswith('__'):
value = getattr(self, attr)
if hasattr(value, 'to_dict'):
config[attr] = value.to_dict()
elif not hasattr(value, '__call__'):
config[attr] = value
return config
def copy(self):
"""Copy this config object into another"""
return deepcopy(self)
class CommonConfig(BaseConfig):
"""Class holding options used in both chart and serie configuration"""
stroke = Key(
True, bool, "Look", "Line dots (set it to false to get a scatter plot)"
)
show_dots = Key(True, bool, "Look", "Set to false to remove dots")
show_only_major_dots = Key(
False, bool, "Look",
"Set to true to show only major dots according to their majored label"
)
dots_size = Key(2.5, float, "Look", "Radius of the dots")
fill = Key(False, bool, "Look", "Fill areas under lines")
stroke_style = Key(
None, dict, "Look", "Stroke style of serie element.",
"This is a dict which can contain a "
"'width', 'linejoin', 'linecap', 'dasharray' "
"and 'dashoffset'"
)
rounded_bars = Key(
None, int, "Look",
"Set this to the desired radius in px (for Bar-like charts)"
)
inner_radius = Key(
0, float, "Look", "Piechart inner radius (donut), must be <.9"
)
allow_interruptions = Key(
False, bool, "Look", "Break lines on None values"
)
formatter = Key(
None, callable, "Value",
"A function to convert raw value to strings for this chart or serie",
"Default to value_formatter in most charts, it depends on dual charts."
"(Can be overriden by value with the formatter metadata.)"
)
class Config(CommonConfig):
"""Class holding config values"""
style = Key(
DefaultStyle, Style, "Style", "Style holding values injected in css"
)
css = Key(
('file://style.css', 'file://graph.css'), list, "Style",
"List of css file",
"It can be any uri from file:///tmp/style.css to //domain/style.css",
str
)
classes = Key(('pygal-chart', ), list, "Style",
"Classes of the root svg node", str)
defs = Key([], list, "Misc", "Extraneous defs to be inserted in svg",
"Useful for adding gradients / patterns…", str)
# Look #
title = Key(
None, str, "Look", "Graph title.", "Leave it to None to disable title."
)
x_title = Key(
None, str, "Look", "Graph X-Axis title.",
"Leave it to None to disable X-Axis title."
)
y_title = Key(
None, str, "Look", "Graph Y-Axis title.",
"Leave it to None to disable Y-Axis title."
)
width = Key(800, int, "Look", "Graph width")
height = Key(600, int, "Look", "Graph height")
show_x_guides = Key(
False, bool, "Look", "Set to true to always show x guide lines"
)
show_y_guides = Key(
True, bool, "Look", "Set to false to hide y guide lines"
)
show_legend = Key(True, bool, "Look", "Set to false to remove legend")
legend_at_bottom = Key(
False, bool, "Look", "Set to true to position legend at bottom"
)
legend_at_bottom_columns = Key(
None, int, "Look", "Set to true to position legend at bottom"
)
legend_box_size = Key(12, int, "Look", "Size of legend boxes")
rounded_bars = Key(
None, int, "Look", "Set this to the desired radius in px"
)
stack_from_top = Key(
False, bool, "Look", "Stack from top to zero, this makes the stacked "
"data match the legend order"
)
spacing = Key(10, int, "Look", "Space between titles/legend/axes")
margin = Key(20, int, "Look", "Margin around chart")
margin_top = Key(None, int, "Look", "Margin around top of chart")
margin_right = Key(None, int, "Look", "Margin around right of chart")
margin_bottom = Key(None, int, "Look", "Margin around bottom of chart")
margin_left = Key(None, int, "Look", "Margin around left of chart")
tooltip_border_radius = Key(0, int, "Look", "Tooltip border radius")
tooltip_fancy_mode = Key(
True, bool, "Look", "Fancy tooltips",
"Print legend, x label in tooltip and use serie color for value."
)
inner_radius = Key(
0, float, "Look", "Piechart inner radius (donut), must be <.9"
)
half_pie = Key(False, bool, "Look", "Create a half-pie chart")
x_labels = Key(
None, list, "Label", "X labels, must have same len than data.",
"Leave it to None to disable x labels display.", str
)
x_labels_major = Key(
None,
list,
"Label",
"X labels that will be marked major.",
subtype=str
)
x_labels_major_every = Key(
None, int, "Label", "Mark every n-th x label as major."
)
x_labels_major_count = Key(
None, int, "Label", "Mark n evenly distributed labels as major."
)
show_x_labels = Key(True, bool, "Label", "Set to false to hide x-labels")
show_minor_x_labels = Key(
True, bool, "Label", "Set to false to hide x-labels not marked major"
)
y_labels = Key(
None, list, "Label", "You can specify explicit y labels",
"Must be a list of numbers", float
)
y_labels_major = Key(
None,
list,
"Label",
"Y labels that will be marked major. Default: auto",
subtype=str
)
y_labels_major_every = Key(
None, int, "Label", "Mark every n-th y label as major."
)
y_labels_major_count = Key(
None, int, "Label", "Mark n evenly distributed y labels as major."
)
show_minor_y_labels = Key(
True, bool, "Label", "Set to false to hide y-labels not marked major"
)
show_y_labels = Key(True, bool, "Label", "Set to false to hide y-labels")
x_label_rotation = Key(
0, int, "Label", "Specify x labels rotation angles", "in degrees"
)
y_label_rotation = Key(
0, int, "Label", "Specify y labels rotation angles", "in degrees"
)
missing_value_fill_truncation = Key(
"x", str, "Look",
"Filled series with missing x and/or y values at the end of a series "
"are closed at the first value with a missing "
"'x' (default), 'y' or 'either'"
)
# Value #
x_value_formatter = Key(
formatters.default, callable, "Value",
"A function to convert abscissa numeric value to strings "
"(used in XY and Date charts)"
)
value_formatter = Key(
formatters.default, callable, "Value",
"A function to convert ordinate numeric value to strings"
)
logarithmic = Key(
False, bool, "Value", "Display values in logarithmic scale"
)
interpolate = Key(
None, str, "Value", "Interpolation",
"May be %s" % ' or '.join(INTERPOLATIONS)
)
interpolation_precision = Key(
250, int, "Value", "Number of interpolated points between two values"
)
interpolation_parameters = Key(
{}, dict, "Value", "Various parameters for parametric interpolations",
"ie: For hermite interpolation, you can set the cardinal tension with"
"{'type': 'cardinal', 'c': .5}", int
)
box_mode = Key(
'extremes', str, "Value", "Sets the mode to be used. "
"(Currently only supported on box plot)", "May be %s" %
' or '.join(["1.5IQR", "extremes", "tukey", "stdev", "pstdev"])
)
order_min = Key(
None, int, "Value", "Minimum order of scale, defaults to None"
)
min_scale = Key(
4, int, "Value", "Minimum number of scale graduation for auto scaling"
)
max_scale = Key(
16, int, "Value", "Maximum number of scale graduation for auto scaling"
)
range = Key(
None, list, "Value", "Explicitly specify min and max of values",
"(ie: (0, 100))", int
)
secondary_range = Key(
None, list, "Value",
"Explicitly specify min and max of secondary values", "(ie: (0, 100))",
int
)
xrange = Key(
None, list, "Value", "Explicitly specify min and max of x values "
"(used in XY and Date charts)", "(ie: (0, 100))", int
)
include_x_axis = Key(False, bool, "Value", "Always include x axis")
zero = Key(
0, int, "Value", "Set the ordinate zero value",
"Useful for filling to another base than abscissa"
)
# Text #
no_data_text = Key(
"No data", str, "Text", "Text to display when no data is given"
)
print_values = Key(False, bool, "Text", "Display values as text over plot")
dynamic_print_values = Key(
False, bool, "Text", "Show values only on hover"
)
print_values_position = Key(
'center', str, "Text", "Customize position of `print_values`. "
"(For bars: `top`, `center` or `bottom`)"
)
print_zeroes = Key(True, bool, "Text", "Display zero values as well")
print_labels = Key(False, bool, "Text", "Display value labels")
truncate_legend = Key(
None, int, "Text", "Legend string length truncation threshold",
"None = auto, Negative for none"
)
truncate_label = Key(
None, int, "Text", "Label string length truncation threshold",
"None = auto, Negative for none"
)
# Misc #
js = Key(('//kozea.github.io/pygal.js/2.0.x/pygal-tooltips.min.js', ),
list, "Misc", "List of js file",
"It can be any uri from file:///tmp/ext.js to //domain/ext.js",
str)
disable_xml_declaration = Key(
False, bool, "Misc",
"Don't write xml declaration and return str instead of string",
"useful for writing output directly in html"
)
force_uri_protocol = Key(
'https', str, "Misc", "Default uri protocol",
"Default protocol for external files. "
"Can be set to None to use a // uri"
)
explicit_size = Key(
False, bool, "Misc", "Write width and height attributes"
)
pretty_print = Key(False, bool, "Misc", "Pretty print the svg")
strict = Key(
False, bool, "Misc", "If True don't try to adapt / filter wrong values"
)
no_prefix = Key(False, bool, "Misc", "Don't prefix css")
inverse_y_axis = Key(False, bool, "Misc", "Inverse Y axis direction")
class SerieConfig(CommonConfig):
"""Class holding serie config values"""
title = Key(
None, str, "Look", "Serie title.", "Leave it to None to disable title."
)
secondary = Key(
False, bool, "Misc", "Set it to put the serie in a second axis"
)
|
from collections import namedtuple
from homeassistant.components import weather
from homeassistant.components.weather import (
ATTR_FORECAST,
ATTR_FORECAST_CONDITION,
ATTR_FORECAST_PRECIPITATION_PROBABILITY,
ATTR_FORECAST_TEMP,
ATTR_FORECAST_TEMP_LOW,
ATTR_FORECAST_TIME,
ATTR_FORECAST_WIND_BEARING,
ATTR_FORECAST_WIND_SPEED,
ATTR_WEATHER_HUMIDITY,
ATTR_WEATHER_PRESSURE,
ATTR_WEATHER_TEMPERATURE,
ATTR_WEATHER_WIND_BEARING,
ATTR_WEATHER_WIND_SPEED,
DOMAIN as WEATHER_DOMAIN,
)
from homeassistant.setup import async_setup_component
from homeassistant.util.dt import now
from tests.async_mock import patch
from tests.common import MockConfigEntry
TEST_CONFIG = {
"name": "HomeTown",
"latitude": "40.00",
"longitude": "-8.00",
"mode": "daily",
}
class MockLocation:
"""Mock Location from pyipma."""
async def observation(self, api):
"""Mock Observation."""
Observation = namedtuple(
"Observation",
[
"accumulated_precipitation",
"humidity",
"pressure",
"radiation",
"temperature",
"wind_direction",
"wind_intensity_km",
],
)
return Observation(0.0, 71.0, 1000.0, 0.0, 18.0, "NW", 3.94)
async def forecast(self, api):
"""Mock Forecast."""
Forecast = namedtuple(
"Forecast",
[
"feels_like_temperature",
"forecast_date",
"forecasted_hours",
"humidity",
"max_temperature",
"min_temperature",
"precipitation_probability",
"temperature",
"update_date",
"weather_type",
"wind_direction",
"wind_strength",
],
)
return [
Forecast(
None,
"2020-01-15T00:00:00",
24,
None,
16.2,
10.6,
"100.0",
13.4,
"2020-01-15T07:51:00",
9,
"S",
"10",
),
Forecast(
"7.7",
now().utcnow().strftime("%Y-%m-%dT%H:%M:%S"),
1,
"86.9",
None,
None,
"80.0",
10.6,
"2020-01-15T07:51:00",
10,
"S",
"32.7",
),
]
@property
def name(self):
"""Mock location."""
return "HomeTown"
@property
def station_latitude(self):
"""Mock latitude."""
return 0
@property
def global_id_local(self):
"""Mock global identifier of the location."""
return 1130600
@property
def id_station(self):
"""Mock identifier of the station."""
return 1200545
@property
def station_longitude(self):
"""Mock longitude."""
return 0
async def test_setup_configuration(hass):
"""Test for successfully setting up the IPMA platform."""
with patch(
"homeassistant.components.ipma.weather.async_get_location",
return_value=MockLocation(),
):
assert await async_setup_component(
hass,
weather.DOMAIN,
{"weather": {"name": "HomeTown", "platform": "ipma", "mode": "hourly"}},
)
await hass.async_block_till_done()
state = hass.states.get("weather.hometown")
assert state.state == "rainy"
data = state.attributes
assert data.get(ATTR_WEATHER_TEMPERATURE) == 18.0
assert data.get(ATTR_WEATHER_HUMIDITY) == 71
assert data.get(ATTR_WEATHER_PRESSURE) == 1000.0
assert data.get(ATTR_WEATHER_WIND_SPEED) == 3.94
assert data.get(ATTR_WEATHER_WIND_BEARING) == "NW"
assert state.attributes.get("friendly_name") == "HomeTown"
async def test_setup_config_flow(hass):
"""Test for successfully setting up the IPMA platform."""
with patch(
"homeassistant.components.ipma.weather.async_get_location",
return_value=MockLocation(),
):
entry = MockConfigEntry(domain="ipma", data=TEST_CONFIG)
await hass.config_entries.async_forward_entry_setup(entry, WEATHER_DOMAIN)
await hass.async_block_till_done()
state = hass.states.get("weather.hometown")
assert state.state == "rainy"
data = state.attributes
assert data.get(ATTR_WEATHER_TEMPERATURE) == 18.0
assert data.get(ATTR_WEATHER_HUMIDITY) == 71
assert data.get(ATTR_WEATHER_PRESSURE) == 1000.0
assert data.get(ATTR_WEATHER_WIND_SPEED) == 3.94
assert data.get(ATTR_WEATHER_WIND_BEARING) == "NW"
assert state.attributes.get("friendly_name") == "HomeTown"
async def test_daily_forecast(hass):
"""Test for successfully getting daily forecast."""
with patch(
"homeassistant.components.ipma.weather.async_get_location",
return_value=MockLocation(),
):
assert await async_setup_component(
hass,
weather.DOMAIN,
{"weather": {"name": "HomeTown", "platform": "ipma", "mode": "daily"}},
)
await hass.async_block_till_done()
state = hass.states.get("weather.hometown")
assert state.state == "rainy"
forecast = state.attributes.get(ATTR_FORECAST)[0]
assert forecast.get(ATTR_FORECAST_TIME) == "2020-01-15T00:00:00"
assert forecast.get(ATTR_FORECAST_CONDITION) == "rainy"
assert forecast.get(ATTR_FORECAST_TEMP) == 16.2
assert forecast.get(ATTR_FORECAST_TEMP_LOW) == 10.6
assert forecast.get(ATTR_FORECAST_PRECIPITATION_PROBABILITY) == "100.0"
assert forecast.get(ATTR_FORECAST_WIND_SPEED) == "10"
assert forecast.get(ATTR_FORECAST_WIND_BEARING) == "S"
async def test_hourly_forecast(hass):
"""Test for successfully getting daily forecast."""
with patch(
"homeassistant.components.ipma.weather.async_get_location",
return_value=MockLocation(),
):
assert await async_setup_component(
hass,
weather.DOMAIN,
{"weather": {"name": "HomeTown", "platform": "ipma", "mode": "hourly"}},
)
await hass.async_block_till_done()
state = hass.states.get("weather.hometown")
assert state.state == "rainy"
forecast = state.attributes.get(ATTR_FORECAST)[0]
assert forecast.get(ATTR_FORECAST_CONDITION) == "rainy"
assert forecast.get(ATTR_FORECAST_TEMP) == 7.7
assert forecast.get(ATTR_FORECAST_PRECIPITATION_PROBABILITY) == 80.0
assert forecast.get(ATTR_FORECAST_WIND_SPEED) == "32.7"
assert forecast.get(ATTR_FORECAST_WIND_BEARING) == "S"
|
import pandas as pd
import pytest
import pytz
from qstrader.asset.universe.static import StaticUniverse
@pytest.mark.parametrize(
'assets,dt,expected',
[
(
['EQ:SPY', 'EQ:AGG'],
pd.Timestamp('2019-01-01 15:00:00', tz=pytz.utc),
['EQ:SPY', 'EQ:AGG']
),
(
['EQ:GLD', 'EQ:GSG', 'EQ:TLT'],
pd.Timestamp('2020-05-01 15:00:00', tz=pytz.utc),
['EQ:GLD', 'EQ:GSG', 'EQ:TLT']
)
]
)
def test_static_universe(assets, dt, expected):
"""
Checks that the StaticUniverse correctly returns the
list of assets for a particular datetime.
"""
universe = StaticUniverse(assets)
assert universe.get_assets(dt) == expected
|
import asyncio
import datetime
import logging
import aiohttp
import async_timeout
from homeassistant.components.camera import SUPPORT_STREAM, Camera
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.util.dt as dt_util
from .const import (
DOMAIN,
DOOR_STATION,
DOOR_STATION_EVENT_ENTITY_IDS,
DOOR_STATION_INFO,
)
from .entity import DoorBirdEntity
_LAST_VISITOR_INTERVAL = datetime.timedelta(minutes=2)
_LAST_MOTION_INTERVAL = datetime.timedelta(seconds=30)
_LIVE_INTERVAL = datetime.timedelta(seconds=45)
_LOGGER = logging.getLogger(__name__)
_TIMEOUT = 15 # seconds
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the DoorBird camera platform."""
config_entry_id = config_entry.entry_id
config_data = hass.data[DOMAIN][config_entry_id]
doorstation = config_data[DOOR_STATION]
doorstation_info = config_data[DOOR_STATION_INFO]
device = doorstation.device
async_add_entities(
[
DoorBirdCamera(
doorstation,
doorstation_info,
device.live_image_url,
"live",
f"{doorstation.name} Live",
doorstation.doorstation_events,
_LIVE_INTERVAL,
device.rtsp_live_video_url,
),
DoorBirdCamera(
doorstation,
doorstation_info,
device.history_image_url(1, "doorbell"),
"last_ring",
f"{doorstation.name} Last Ring",
[],
_LAST_VISITOR_INTERVAL,
),
DoorBirdCamera(
doorstation,
doorstation_info,
device.history_image_url(1, "motionsensor"),
"last_motion",
f"{doorstation.name} Last Motion",
[],
_LAST_MOTION_INTERVAL,
),
]
)
class DoorBirdCamera(DoorBirdEntity, Camera):
"""The camera on a DoorBird device."""
def __init__(
self,
doorstation,
doorstation_info,
url,
camera_id,
name,
doorstation_events,
interval=None,
stream_url=None,
):
"""Initialize the camera on a DoorBird device."""
super().__init__(doorstation, doorstation_info)
self._url = url
self._stream_url = stream_url
self._name = name
self._last_image = None
self._supported_features = SUPPORT_STREAM if self._stream_url else 0
self._interval = interval or datetime.timedelta
self._last_update = datetime.datetime.min
self._unique_id = f"{self._mac_addr}_{camera_id}"
self._doorstation_events = doorstation_events
async def stream_source(self):
"""Return the stream source."""
return self._stream_url
@property
def unique_id(self):
"""Camera Unique id."""
return self._unique_id
@property
def supported_features(self):
"""Return supported features."""
return self._supported_features
@property
def name(self):
"""Get the name of the camera."""
return self._name
async def async_camera_image(self):
"""Pull a still image from the camera."""
now = dt_util.utcnow()
if self._last_image and now - self._last_update < self._interval:
return self._last_image
try:
websession = async_get_clientsession(self.hass)
with async_timeout.timeout(_TIMEOUT):
response = await websession.get(self._url)
self._last_image = await response.read()
self._last_update = now
return self._last_image
except asyncio.TimeoutError:
_LOGGER.error("DoorBird %s: Camera image timed out", self._name)
return self._last_image
except aiohttp.ClientError as error:
_LOGGER.error(
"DoorBird %s: Error getting camera image: %s", self._name, error
)
return self._last_image
async def async_added_to_hass(self):
"""Add callback after being added to hass.
Registers entity_id map for the logbook
"""
event_to_entity_id = self.hass.data[DOMAIN].setdefault(
DOOR_STATION_EVENT_ENTITY_IDS, {}
)
for event in self._doorstation_events:
event_to_entity_id[event] = self.entity_id
async def will_remove_from_hass(self):
"""Unregister entity_id map for the logbook."""
event_to_entity_id = self.hass.data[DOMAIN][DOOR_STATION_EVENT_ENTITY_IDS]
for event in self._doorstation_events:
if event in event_to_entity_id:
del event_to_entity_id[event]
|
import spacy
from scattertext import SampleCorpora, word_similarity_explorer
from scattertext.CorpusFromPandas import CorpusFromPandas
def main():
nlp = spacy.load('en')
convention_df = SampleCorpora.ConventionData2012.get_data()
corpus = CorpusFromPandas(convention_df,
category_col='party',
text_col='text',
nlp=nlp).build()
html = word_similarity_explorer(corpus,
category='democrat',
category_name='Democratic',
not_category_name='Republican',
target_term='jobs',
minimum_term_frequency=5,
width_in_pixels=1000,
metadata=convention_df['speaker'],
alpha=0.01,
max_p_val=0.1,
save_svg_button=True)
open('./demo_similarity.html', 'wb').write(html.encode('utf-8'))
print('Open ./demo_similarlity.html in Chrome or Firefox.')
if __name__ == '__main__':
main()
|
import unittest
import pandas as pd
import numpy as np
from pgmpy.estimators import ExhaustiveSearch, BDeuScore, BicScore
class TestBaseEstimator(unittest.TestCase):
def setUp(self):
self.rand_data = pd.DataFrame(
np.random.randint(0, 5, size=(5000, 2)), columns=list("AB")
)
self.rand_data["C"] = self.rand_data["B"]
self.est_rand = ExhaustiveSearch(self.rand_data)
self.est_rand_bdeu = ExhaustiveSearch(
self.rand_data, scoring_method=BDeuScore(self.rand_data)
)
self.est_rand_bic = ExhaustiveSearch(
self.rand_data, scoring_method=BicScore(self.rand_data)
)
# link to dataset: "https://www.kaggle.com/c/titanic/download/train.csv"
self.titanic_data = pd.read_csv(
"pgmpy/tests/test_estimators/testdata/titanic_train.csv"
)
self.titanic_data2 = self.titanic_data[["Survived", "Sex", "Pclass"]]
self.est_titanic = ExhaustiveSearch(self.titanic_data2)
def test_all_dags(self):
self.assertEqual(len(list(self.est_rand.all_dags(["A", "B", "C", "D"]))), 543)
# self.assertEqual(len(list(self.est_rand.all_dags(nodes=range(5)))), 29281) # takes ~30s
abc_dags = set(
map(tuple, [sorted(dag.edges()) for dag in self.est_rand.all_dags()])
)
abc_dags_ref = set(
[
(("A", "B"), ("C", "A"), ("C", "B")),
(("A", "C"), ("B", "C")),
(("B", "A"), ("B", "C")),
(("C", "B"),),
(("A", "C"), ("B", "A")),
(("B", "C"), ("C", "A")),
(("A", "B"), ("B", "C")),
(("A", "C"), ("B", "A"), ("B", "C")),
(("A", "B"),),
(("A", "B"), ("C", "A")),
(("B", "A"), ("C", "A"), ("C", "B")),
(("A", "C"), ("C", "B")),
(("A", "B"), ("A", "C"), ("C", "B")),
(("B", "A"), ("C", "B")),
(("A", "B"), ("A", "C")),
(("C", "A"), ("C", "B")),
(("A", "B"), ("A", "C"), ("B", "C")),
(("C", "A"),),
(("B", "A"), ("B", "C"), ("C", "A")),
(("B", "A"),),
(("A", "B"), ("C", "B")),
(),
(("B", "A"), ("C", "A")),
(("A", "C"),),
(("B", "C"),),
]
)
self.assertSetEqual(abc_dags, abc_dags_ref)
def test_estimate_rand(self):
est = self.est_rand.estimate()
self.assertSetEqual(set(est.nodes()), set(["A", "B", "C"]))
self.assertTrue(
list(est.edges()) == [("B", "C")] or list(est.edges()) == [("C", "B")]
)
est_bdeu = self.est_rand.estimate()
self.assertTrue(
list(est_bdeu.edges()) == [("B", "C")]
or list(est_bdeu.edges()) == [("C", "B")]
)
est_bic = self.est_rand.estimate()
self.assertTrue(
list(est_bic.edges()) == [("B", "C")]
or list(est_bic.edges()) == [("C", "B")]
)
def test_estimate_titanic(self):
e1 = self.est_titanic.estimate()
self.assertSetEqual(
set(e1.edges()),
set([("Survived", "Pclass"), ("Sex", "Pclass"), ("Sex", "Survived")]),
)
def test_all_scores(self):
scores = self.est_titanic.all_scores()
scores_ref = [
(-2072.9132364404695, []),
(-2069.071694164769, [("Pclass", "Sex")]),
(-2069.0144197068785, [("Sex", "Pclass")]),
(-2025.869489762676, [("Survived", "Pclass")]),
(-2025.8559302273054, [("Pclass", "Survived")]),
(-2022.0279474869753, [("Pclass", "Sex"), ("Survived", "Pclass")]),
(-2022.0143879516047, [("Pclass", "Sex"), ("Pclass", "Survived")]),
(-2021.9571134937144, [("Pclass", "Survived"), ("Sex", "Pclass")]),
(-2017.5258065853768, [("Sex", "Pclass"), ("Survived", "Pclass")]),
(-1941.3075053892837, [("Survived", "Sex")]),
(-1941.2720031713893, [("Sex", "Survived")]),
(-1937.4304608956886, [("Pclass", "Sex"), ("Sex", "Survived")]),
(-1937.4086886556927, [("Sex", "Pclass"), ("Survived", "Sex")]),
(-1937.3731864377983, [("Sex", "Pclass"), ("Sex", "Survived")]),
(-1934.1344850608882, [("Pclass", "Sex"), ("Survived", "Sex")]),
(-1894.2637587114903, [("Survived", "Pclass"), ("Survived", "Sex")]),
(-1894.2501991761198, [("Pclass", "Survived"), ("Survived", "Sex")]),
(-1894.2282564935958, [("Sex", "Survived"), ("Survived", "Pclass")]),
(-1891.0630673606006, [("Pclass", "Survived"), ("Sex", "Survived")]),
(
-1887.2215250849,
[("Pclass", "Sex"), ("Pclass", "Survived"), ("Sex", "Survived")],
),
(
-1887.1642506270096,
[("Pclass", "Survived"), ("Sex", "Pclass"), ("Sex", "Survived")],
),
(
-1887.0907383830947,
[("Pclass", "Sex"), ("Survived", "Pclass"), ("Survived", "Sex")],
),
(
-1887.0771788477243,
[("Pclass", "Sex"), ("Pclass", "Survived"), ("Survived", "Sex")],
),
(
-1885.9200755341915,
[("Sex", "Pclass"), ("Survived", "Pclass"), ("Survived", "Sex")],
),
(
-1885.884573316297,
[("Sex", "Pclass"), ("Sex", "Survived"), ("Survived", "Pclass")],
),
]
self.assertEqual(
[sorted(model.edges()) for score, model in scores],
[edges for score, edges in scores_ref],
)
# use assertAlmostEqual pointwise to avoid rounding issues
map(
lambda x, y: self.assertAlmostEqual(x, y),
[score for score, model in scores],
[score for score, edges in scores_ref],
)
def tearDown(self):
del self.rand_data
del self.est_rand
del self.est_rand_bdeu
del self.est_rand_bic
del self.titanic_data
del self.est_titanic
|
from django.contrib.auth import get_user_model
from django.forms import widgets, Media
from django.forms.utils import ErrorDict
from django.utils.translation import gettext_lazy as _
from djng.forms import fields
from sass_processor.processor import sass_processor
from shop.forms.base import DialogForm, DialogModelForm, UniqueEmailValidationMixin
from shop.forms.widgets import CheckboxInput, RadioSelect, Select
from shop.models.address import ShippingAddressModel, BillingAddressModel
from shop.models.customer import CustomerModel
from shop.modifiers.pool import cart_modifiers_pool
class CustomerForm(DialogModelForm):
scope_prefix = 'customer'
legend = _("Customer's Details")
email = fields.EmailField(label=_("Email address"))
first_name = fields.CharField(label=_("First Name"))
last_name = fields.CharField(label=_("Last Name"))
class Meta:
model = CustomerModel
exclude = ['user', 'recognized', 'number', 'last_access']
custom_fields = ['email', 'first_name', 'last_name']
def __init__(self, initial=None, instance=None, *args, **kwargs):
initial = dict(initial) if initial else {}
assert instance is not None
initial.update(dict((f, getattr(instance, f)) for f in self.Meta.custom_fields))
super().__init__(initial=initial, instance=instance, *args, **kwargs)
@property
def media(self):
return Media(css={'all': [sass_processor('shop/css/customer.scss')]})
def save(self, commit=True):
for f in self.Meta.custom_fields:
setattr(self.instance, f, self.cleaned_data[f])
return super().save(commit)
@classmethod
def form_factory(cls, request, data, cart):
customer_form = cls(data=data, instance=request.customer)
if customer_form.is_valid():
customer_form.instance.recognize_as_registered(request, commit=False)
customer_form.save()
return customer_form
class GuestForm(UniqueEmailValidationMixin, DialogModelForm):
scope_prefix = 'guest'
form_name = 'customer_form' # Override form name to reuse template `customer-form.html`
legend = _("Customer's Email")
email = fields.EmailField(label=_("Email address"))
class Meta:
model = get_user_model() # since we only use the email field, use the User model directly
fields = ['email']
def __init__(self, initial=None, instance=None, *args, **kwargs):
if isinstance(instance, CustomerModel):
instance = instance.user
super().__init__(initial=initial, instance=instance, *args, **kwargs)
@classmethod
def form_factory(cls, request, data, cart):
customer_form = cls(data=data, instance=request.customer.user)
if customer_form.is_valid():
request.customer.recognize_as_guest(request, commit=False)
customer_form.save()
return customer_form
class AddressForm(DialogModelForm):
# field to be superseeded by a select widget
active_priority = fields.CharField(
required=False,
widget=widgets.HiddenInput(),
)
use_primary_address = fields.BooleanField(
label="use primary address", # label will be overridden by Shipping/Billing/AddressForm
required=False,
initial=True,
widget=CheckboxInput(),
)
plugin_fields = ['plugin_id', 'plugin_order', 'use_primary_address']
class Meta:
exclude = ['customer', 'priority']
def __init__(self, initial=None, instance=None, cart=None, *args, **kwargs):
self.cart = cart
self.multi_addr = kwargs.pop('multi_addr', False)
self.allow_use_primary = kwargs.pop('allow_use_primary', False)
self.populate_siblings_summary()
if instance:
initial = dict(initial or {}, active_priority=instance.priority)
if instance.address_type == 'shipping':
initial['use_primary_address'] = cart.shipping_address is None
else: # address_type == billing
initial['use_primary_address'] = cart.billing_address is None
super().__init__(initial=initial, instance=instance, *args, **kwargs)
@property
def media(self):
return Media(css={'all': [sass_processor('shop/css/address.scss')]})
@classmethod
def get_model(cls):
return cls.Meta.model
@classmethod
def form_factory(cls, request, data, cart):
"""
From the given request, update the database model.
If the form data is invalid, return an error dictionary to update the response.
"""
# search for the associated address DB instance or create a new one
current_address = cls.get_address(cart)
try:
active_priority = int(data.get('active_priority'))
except (ValueError, TypeError):
if data.get('use_primary_address'):
active_priority = 'nop'
else:
active_priority = data.get('active_priority', 'add')
active_address = cls.get_model().objects.get_fallback(customer=request.customer)
else:
filter_args = dict(customer=request.customer, priority=active_priority)
active_address = cls.get_model().objects.filter(**filter_args).first()
if active_priority == 'add':
# Add a newly filled address for the given customer
address_form = cls(data=data, cart=cart)
if address_form.is_valid():
# prevent adding the same address twice
all_field_names = [f.name for f in cls.get_model()._meta.get_fields()]
filter_args = dict((attr, val) for attr, val in address_form.data.items()
if attr in all_field_names and val)
filter_args.update(customer=request.customer)
try:
existing_address = cls.get_model().objects.get(**filter_args)
except cls.get_model().DoesNotExist:
next_address = address_form.save(commit=False)
if next_address:
next_address.customer = request.customer
next_address.priority = cls.get_model().objects.get_max_priority(request.customer) + 1
next_address.save()
address_form.data.update(active_priority=str(next_address.priority))
else:
address_form.data.update(active_priority='nop')
address_form.set_address(cart, next_address)
else:
address_form.set_address(cart, existing_address)
address_form.populate_siblings_summary()
elif active_address is None and not data.get('use_primary_address'):
# customer selected 'Add another address', hence create a new empty form
initial = dict((key, val) for key, val in data.items() if key in cls.plugin_fields)
address_form = cls(initial=initial)
address_form.data.update(address_form.get_initial_data())
address_form.data.update(active_priority='add')
elif current_address == active_address:
# an existing entity of AddressModel was edited
address_form = cls(data=data, instance=active_address, cart=cart)
if address_form.is_valid():
next_address = address_form.save()
address_form.set_address(cart, next_address)
else:
# an address with another priority was selected
initial = dict(data)
for attr in cls().get_initial_data().keys():
if hasattr(active_address, attr):
initial.update({attr: getattr(active_address, attr)})
initial.update(active_priority=str(active_address.priority))
address_form = cls(data=initial, instance=current_address, cart=cart)
address_form.set_address(cart, active_address)
return address_form
def populate_siblings_summary(self):
"""
Build a list of value-labels to populate the address choosing element
"""
self.siblings_summary = []
if self.cart is not None:
AddressModel = self.get_model()
addresses = AddressModel.objects.filter(customer=self.cart.customer).order_by('priority')
for number, addr in enumerate(addresses, 1):
self.siblings_summary.append({
'value': str(addr.priority),
'label': "{}. {}".format(number, addr.as_text().strip().replace('\n', ' – '))
})
def full_clean(self):
super().full_clean()
if self.is_bound and self['use_primary_address'].value():
# reset errors, since then the form is always regarded as valid
self._errors = ErrorDict()
def save(self, commit=True):
if not self['use_primary_address'].value():
return super().save(commit)
def get_response_data(self):
return dict(self.data, siblings_summary=self.siblings_summary)
def as_div(self):
# Intentionally rendered without field `use_primary_address`, this must be added
# on top of the form template manually
self.fields.pop('use_primary_address', None)
return super().as_div()
def as_text(self):
bound_field = self['use_primary_address']
if bound_field.value():
return bound_field.field.widget.choice_label
return super().as_text()
class ShippingAddressForm(AddressForm):
scope_prefix = 'shipping_address'
legend = _("Shipping Address")
class Meta(AddressForm.Meta):
model = ShippingAddressModel
widgets = {
'country': Select(attrs={'ng-change': 'updateSiblingAddress()'}),
}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['use_primary_address'].label = _("Use billing address for shipping")
self.fields['use_primary_address'].widget.choice_label = self.fields['use_primary_address'].label # Django < 1.11
@classmethod
def get_address(cls, cart):
return cart.shipping_address
def set_address(self, cart, instance):
cart.shipping_address = instance if not self['use_primary_address'].value() else None
class BillingAddressForm(AddressForm):
scope_prefix = 'billing_address'
legend = _("Billing Address")
class Meta(AddressForm.Meta):
model = BillingAddressModel
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['use_primary_address'].label = _("Use shipping address for billing")
self.fields['use_primary_address'].widget.choice_label = self.fields['use_primary_address'].label # Django < 1.11
@classmethod
def get_address(cls, cart):
return cart.billing_address
def set_address(self, cart, instance):
cart.billing_address = instance if not self['use_primary_address'].value() else None
class PaymentMethodForm(DialogForm):
scope_prefix = 'payment_method'
payment_modifier = fields.ChoiceField(
label=_("Payment Method"),
widget=RadioSelect(attrs={'ng-change': 'updateMethod()'}),
)
def __init__(self, *args, **kwargs):
choices = [m.get_choice() for m in cart_modifiers_pool.get_payment_modifiers()
if not m.is_disabled(kwargs['cart'])]
self.base_fields['payment_modifier'].choices = choices
if len(choices) == 1:
# if there is only one shipping method available, always set it as default
try:
kwargs['initial']['payment_modifier'] = choices[0][0]
except KeyError:
pass
super().__init__(*args, **kwargs)
def has_choices(self):
return len(self.base_fields['payment_modifier'].choices) > 0
@classmethod
def form_factory(cls, request, data, cart):
cart.update(request)
payment_method_form = cls(data=data, cart=cart)
if payment_method_form.is_valid():
payment_data = data.get('payment_data') or {}
cart.extra.update(payment_method_form.cleaned_data, payment_extra_data=payment_data)
return payment_method_form
class ShippingMethodForm(DialogForm):
scope_prefix = 'shipping_method'
shipping_modifier = fields.ChoiceField(
label=_("Shipping Method"),
widget=RadioSelect(attrs={'ng-change': 'updateMethod()'}),
)
def __init__(self, *args, **kwargs):
choices = [m.get_choice() for m in cart_modifiers_pool.get_shipping_modifiers()
if not m.is_disabled(kwargs['cart'])]
self.base_fields['shipping_modifier'].choices = choices
if len(choices) == 1:
# with only one choice, initialize with it
try:
kwargs['initial']['shipping_modifier'] = choices[0][0]
except KeyError:
pass
super().__init__(*args, **kwargs)
def has_choices(self):
return len(self.base_fields['shipping_modifier'].choices) > 0
@classmethod
def form_factory(cls, request, data, cart):
cart.update(request)
shipping_method_form = cls(data=data, cart=cart)
if shipping_method_form.is_valid():
cart.extra.update(shipping_method_form.cleaned_data)
return shipping_method_form
class ExtraAnnotationForm(DialogForm):
scope_prefix = 'extra_annotation'
annotation = fields.CharField(
label=_("Extra annotation for this order"),
required=False,
widget=widgets.Textarea,
)
@classmethod
def form_factory(cls, request, data, cart):
extra_annotation_form = cls(data=data)
if extra_annotation_form.is_valid():
cart.extra.update(extra_annotation_form.cleaned_data)
return extra_annotation_form
class AcceptConditionForm(DialogForm):
scope_prefix = 'accept_condition'
accept = fields.BooleanField(
required=True,
widget=CheckboxInput(),
)
def __init__(self, data=None, initial=None, *args, **kwargs):
plugin_id = data and data.get('plugin_id') or initial and initial.get('plugin_id') or 'none'
scope_prefix = '{0}.plugin_{1}'.format(self.scope_prefix, plugin_id)
self.form_name = '{0}.plugin_{1}'.format(self.form_name, plugin_id)
super().__init__(data=data, initial=initial, scope_prefix=scope_prefix, *args, **kwargs)
@classmethod
def form_factory(cls, request, data, cart):
data = data or {'accept': False}
accept_form = cls(data=data)
return accept_form
|
import collections
from threading import Thread
import pyaudio
from . import snowboydetect
import time
import os
import logging
logging.basicConfig()
logger = logging.getLogger("kalliope")
TOP_DIR = os.path.dirname(os.path.abspath(__file__))
RESOURCE_FILE = os.path.join(TOP_DIR, "resources/common.res")
class SnowboyOpenAudioException(Exception):
pass
class RingBuffer(object):
"""Ring buffer to hold audio from PortAudio"""
def __init__(self, size = 4096):
self._buf = collections.deque(maxlen=size)
self.paused = False
def extend(self, data):
"""Adds data to the end of buffer"""
if not self.paused:
self._buf.extend(data)
def get(self):
"""Retrieves data from the beginning of buffer and clears it"""
tmp = bytes(bytearray(self._buf))
self._buf.clear()
return tmp
def pause(self):
self.paused = True
def unpause(self):
self.paused = False
class HotwordDetector(Thread):
"""
Snowboy decoder to detect whether a keyword specified by `decoder_model`
exists in a microphone input stream.
:param decoder_model: decoder model file path, a string or a list of strings
:param resource: resource file path.
:param sensitivity: decoder sensitivity, a float of a list of floats.
The bigger the value, the more senstive the
decoder. If an empty list is provided, then the
default sensitivity in the model will be used.
:param audio_gain: multiply input volume by this factor.
:param apply_frontend: applies the frontend processing algorithm if True.
"""
def __init__(self,
decoder_model,
resource=RESOURCE_FILE,
sensitivity=[],
audio_gain=1,
apply_frontend=False,
detected_callback=None,
interrupt_check=lambda: False):
super(HotwordDetector, self).__init__()
self.detected_callback = detected_callback
self.interrupt_check = interrupt_check
self.sleep_time = 0.03
self.kill_received = False
self.paused = False
def audio_callback(in_data, frame_count, time_info, status):
self.ring_buffer.extend(in_data)
play_data = chr(0) * len(in_data)
return play_data, pyaudio.paContinue
tm = type(decoder_model)
ts = type(sensitivity)
if tm is not list:
decoder_model = [decoder_model]
if ts is not list:
sensitivity = [sensitivity]
model_str = ",".join(decoder_model)
self.detector = snowboydetect.SnowboyDetect(
resource_filename=resource.encode(), model_str=model_str.encode())
self.detector.SetAudioGain(audio_gain)
self.detector.ApplyFrontend(apply_frontend)
self.num_hotwords = self.detector.NumHotwords()
if len(sensitivity) > self.num_hotwords: # If more sensitivities available as hotwords, it will raise an AssertionError
assert self.num_hotwords == len(sensitivity), \
"number of hotwords in decoder_model (%d) and sensitivity " \
"(%d) does not match" % (self.num_hotwords, len(sensitivity))
if len(sensitivity) != self.num_hotwords: # Some umdl model contains more then one keyword.
sensitivity_match_hotwords = False # If the user sets only one sensitivity, we add for the second model a default sensitivity of 0.5
while not sensitivity_match_hotwords:
sensitivity.append(0.5)
if len(sensitivity) == self.num_hotwords:
sensitivity_match_hotwords = True
if len(decoder_model) > 1 and len(sensitivity) == 1:
sensitivity = sensitivity*self.num_hotwords
sensitivity_str = ",".join([str(t) for t in sensitivity])
if len(sensitivity) != 0:
self.detector.SetSensitivity(sensitivity_str.encode())
self.ring_buffer = RingBuffer(
self.detector.NumChannels() * self.detector.SampleRate() * 5)
self.audio = pyaudio.PyAudio()
self.open_audio(audio_callback)
def open_audio(self, audio_callback, i=0):
try:
self.stream_in = self.audio.open(
input=True, output=False,
format=self.audio.get_format_from_width(
self.detector.BitsPerSample() / 8),
channels=self.detector.NumChannels(),
rate=self.detector.SampleRate(),
frames_per_buffer=2048,
stream_callback=audio_callback)
except IOError as error:
logger.debug("IOError raised, i = %s (error: %s)"
% (i, repr(error)))
if i == 5:
# Let's give up...
raise SnowboyOpenAudioException(
'Error while trying to open audio: %',
repr(error))
i = i + 1
time.sleep(i)
self.open_audio(audio_callback, i)
def run(self):
"""
Start the voice detector. For every `sleep_time` second it checks the
audio buffer for triggering keywords. If detected, then call
corresponding function in `detected_callback`, which can be a single
function (single model) or a list of callback functions (multiple
models). Every loop it also calls `interrupt_check` -- if it returns
True, then breaks from the loop and return.
:param detected_callback: a function or list of functions. The number of
items must match the number of models in
`decoder_model`.
:param interrupt_check: a function that returns True if the main loop
needs to stop.
:param float sleep_time: how much time in second every loop waits.
:return: None
"""
if self.interrupt_check():
logger.debug("detect voice return")
return
tc = type(self.detected_callback)
if tc is not list:
self.detected_callback = [self.detected_callback]
if len(self.detected_callback) == 1 and self.num_hotwords > 1:
self.detected_callback *= self.num_hotwords
assert self.num_hotwords == len(self.detected_callback), \
"Error: hotwords in your models (%d) do not match the number of " \
"callbacks (%d)" % (self.num_hotwords, len(self.detected_callback))
logger.debug("detecting...")
while not self.kill_received:
if not self.paused:
if self.interrupt_check():
logger.debug("detect voice break")
break
data = self.ring_buffer.get()
if len(data) == 0:
time.sleep(self.sleep_time)
continue
ans = self.detector.RunDetection(data)
if ans == -1:
logger.warning("Error initializing streams or reading audio data")
elif ans > 0:
message = "Keyword %s detected" % ans
logger.debug(message)
callback = self.detected_callback[ans-1]
if callback is not None:
callback()
else:
# take a little break
time.sleep(self.sleep_time)
logger.debug("[Snowboy] process finished.")
def terminate(self):
"""
Terminate audio stream. Users cannot call start() again to detect.
:return: None
"""
self.stream_in.stop_stream()
self.stream_in.close()
self.audio.terminate()
logger.debug("[Snowboy] Audio stream cleaned.")
def pause(self):
self.paused = True
self.ring_buffer.pause()
def unpause(self):
self.paused = False
self.ring_buffer.unpause()
|
from __future__ import division
import numpy as np
import PIL
import chainer
try:
import cv2
_cv2_available = True
except ImportError:
_cv2_available = False
def _rotate_cv2(img, angle, expand, fill, interpolation):
if interpolation == PIL.Image.NEAREST:
cv_interpolation = cv2.INTER_NEAREST
elif interpolation == PIL.Image.BILINEAR:
cv_interpolation = cv2.INTER_LINEAR
elif interpolation == PIL.Image.BICUBIC:
cv_interpolation = cv2.INTER_CUBIC
_, H, W = img.shape
affine_mat = cv2.getRotationMatrix2D((W / 2, H / 2), angle, 1)
# Logic borrowed from Pillow
if expand:
# calculate output size
yy = []
xx = []
for y, x in ((0, 0), (H, 0), (H, W), (0, W)):
yy.append(
affine_mat[1, 0] * x + affine_mat[1, 1] * y + affine_mat[1, 2])
xx.append(
affine_mat[0, 0] * x + affine_mat[0, 1] * y + affine_mat[0, 2])
out_H = int(np.ceil(max(yy)) - np.floor(min(yy)))
out_W = int(np.ceil(max(xx)) - np.floor(min(xx)))
affine_mat[1][2] += out_H / 2 - H / 2
affine_mat[0][2] += out_W / 2 - W / 2
else:
out_H = H
out_W = W
img = img.transpose((1, 2, 0))
img = cv2.warpAffine(
img, affine_mat, (out_W, out_H), flags=cv_interpolation,
borderValue=fill)
if img.ndim == 2:
img = img[:, :, None]
img = img.transpose((2, 0, 1))
return img
def _rotate_pil(img, angle, expand, fill, interpolation):
out = []
for ch in img:
ch = PIL.Image.fromarray(ch, mode='F')
out.append(np.array(
ch.rotate(
angle, expand=expand,
fillcolor=fill, resample=interpolation)))
out = np.stack(out)
if np.issubdtype(img.dtype, np.integer):
out = np.round(out)
return out.astype(img.dtype)
def rotate(img, angle, expand=True, fill=0, interpolation=PIL.Image.BILINEAR):
"""Rotate images by degrees.
The backend used by :func:`rotate` is configured by
:obj:`chainer.global_config.cv_rotate_backend`.
Two backends are supported: "cv2" and "PIL".
If this is :obj:`None`, "cv2" is used whenever "cv2" is installed,
and "PIL" is used when "cv2" is not installed.
Args:
img (~numpy.ndarray): An arrays that get rotated. This is in
CHW format.
angle (float): Counter clock-wise rotation angle (degree).
expand (bool): The output shaped is adapted or not.
If :obj:`True`, the input image is contained complete in
the output.
fill (float): The value used for pixels outside the boundaries.
interpolation (int): Determines sampling strategy. This is one of
:obj:`PIL.Image.NEAREST`, :obj:`PIL.Image.BILINEAR`,
:obj:`PIL.Image.BICUBIC`.
Bilinear interpolation is the default strategy.
Returns:
~numpy.ndarray:
returns an array :obj:`out_img` that is the result of rotation.
"""
if chainer.config.cv_rotate_backend is None:
if _cv2_available:
return _rotate_cv2(img, angle, expand, fill, interpolation)
else:
return _rotate_pil(img, angle, expand, fill, interpolation)
elif chainer.config.cv_rotate_backend == 'cv2':
if not _cv2_available:
raise ValueError('cv2 is not installed even though '
'chainer.config.cv_rotate_backend == \'cv2\'')
return _rotate_cv2(img, angle, expand, fill, interpolation)
elif chainer.config.cv_rotate_backend == 'PIL':
return _rotate_pil(img, angle, expand, fill, interpolation)
else:
raise ValueError('chainer.config.cv_rotate_backend should be '
'either "cv2" or "PIL".')
|
from typing import Any, Callable, Dict, List, Optional
import pyvera as veraApi
from homeassistant.components.scene import Scene
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity import Entity
from homeassistant.util import slugify
from .common import ControllerData, get_controller_data
from .const import VERA_ID_FORMAT
async def async_setup_entry(
hass: HomeAssistant,
entry: ConfigEntry,
async_add_entities: Callable[[List[Entity], bool], None],
) -> None:
"""Set up the sensor config entry."""
controller_data = get_controller_data(hass, entry)
async_add_entities(
[VeraScene(device, controller_data) for device in controller_data.scenes]
)
class VeraScene(Scene):
"""Representation of a Vera scene entity."""
def __init__(self, vera_scene: veraApi.VeraScene, controller_data: ControllerData):
"""Initialize the scene."""
self.vera_scene = vera_scene
self.controller = controller_data.controller
self._name = self.vera_scene.name
# Append device id to prevent name clashes in HA.
self.vera_id = VERA_ID_FORMAT.format(
slugify(vera_scene.name), vera_scene.scene_id
)
def update(self) -> None:
"""Update the scene status."""
self.vera_scene.refresh()
def activate(self, **kwargs: Any) -> None:
"""Activate the scene."""
self.vera_scene.activate()
@property
def name(self) -> str:
"""Return the name of the scene."""
return self._name
@property
def device_state_attributes(self) -> Optional[Dict[str, Any]]:
"""Return the state attributes of the scene."""
return {"vera_scene_id": self.vera_scene.vera_scene_id}
|
from datetime import datetime
from pytz import timezone
from homeassistant import data_entry_flow
from homeassistant.components.pvpc_hourly_pricing import ATTR_TARIFF, DOMAIN
from homeassistant.const import CONF_NAME
from homeassistant.helpers import entity_registry
from .conftest import check_valid_state
from tests.async_mock import patch
from tests.common import date_util
from tests.test_util.aiohttp import AiohttpClientMocker
async def test_config_flow(
hass, legacy_patchable_time, pvpc_aioclient_mock: AiohttpClientMocker
):
"""
Test config flow for pvpc_hourly_pricing.
- Create a new entry with tariff "normal"
- Check state and attributes
- Check abort when trying to config another with same tariff
- Check removal and add again to check state restoration
"""
hass.config.time_zone = timezone("Europe/Madrid")
mock_data = {"return_time": datetime(2019, 10, 26, 14, 0, tzinfo=date_util.UTC)}
def mock_now():
return mock_data["return_time"]
with patch("homeassistant.util.dt.utcnow", new=mock_now):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_NAME: "test", ATTR_TARIFF: "normal"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
await hass.async_block_till_done()
state = hass.states.get("sensor.test")
check_valid_state(state, tariff="normal")
assert pvpc_aioclient_mock.call_count == 1
# Check abort when configuring another with same tariff
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_NAME: "test", ATTR_TARIFF: "normal"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert pvpc_aioclient_mock.call_count == 1
# Check removal
registry = await entity_registry.async_get_registry(hass)
registry_entity = registry.async_get("sensor.test")
assert await hass.config_entries.async_remove(registry_entity.config_entry_id)
# and add it again with UI
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_NAME: "test", ATTR_TARIFF: "normal"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
await hass.async_block_till_done()
state = hass.states.get("sensor.test")
check_valid_state(state, tariff="normal")
assert pvpc_aioclient_mock.call_count == 2
|
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_SMOKE,
BinarySensorEntity,
)
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from . import (
CONF_OUTPUTS,
CONF_ZONE_NAME,
CONF_ZONE_TYPE,
CONF_ZONES,
DATA_SATEL,
SIGNAL_OUTPUTS_UPDATED,
SIGNAL_ZONES_UPDATED,
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Satel Integra binary sensor devices."""
if not discovery_info:
return
configured_zones = discovery_info[CONF_ZONES]
controller = hass.data[DATA_SATEL]
devices = []
for zone_num, device_config_data in configured_zones.items():
zone_type = device_config_data[CONF_ZONE_TYPE]
zone_name = device_config_data[CONF_ZONE_NAME]
device = SatelIntegraBinarySensor(
controller, zone_num, zone_name, zone_type, SIGNAL_ZONES_UPDATED
)
devices.append(device)
configured_outputs = discovery_info[CONF_OUTPUTS]
for zone_num, device_config_data in configured_outputs.items():
zone_type = device_config_data[CONF_ZONE_TYPE]
zone_name = device_config_data[CONF_ZONE_NAME]
device = SatelIntegraBinarySensor(
controller, zone_num, zone_name, zone_type, SIGNAL_OUTPUTS_UPDATED
)
devices.append(device)
async_add_entities(devices)
class SatelIntegraBinarySensor(BinarySensorEntity):
"""Representation of an Satel Integra binary sensor."""
def __init__(
self, controller, device_number, device_name, zone_type, react_to_signal
):
"""Initialize the binary_sensor."""
self._device_number = device_number
self._name = device_name
self._zone_type = zone_type
self._state = 0
self._react_to_signal = react_to_signal
self._satel = controller
async def async_added_to_hass(self):
"""Register callbacks."""
if self._react_to_signal == SIGNAL_OUTPUTS_UPDATED:
if self._device_number in self._satel.violated_outputs:
self._state = 1
else:
self._state = 0
else:
if self._device_number in self._satel.violated_zones:
self._state = 1
else:
self._state = 0
self.async_on_remove(
async_dispatcher_connect(
self.hass, self._react_to_signal, self._devices_updated
)
)
@property
def name(self):
"""Return the name of the entity."""
return self._name
@property
def icon(self):
"""Icon for device by its type."""
if self._zone_type == DEVICE_CLASS_SMOKE:
return "mdi:fire"
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def is_on(self):
"""Return true if sensor is on."""
return self._state == 1
@property
def device_class(self):
"""Return the class of this sensor, from DEVICE_CLASSES."""
return self._zone_type
@callback
def _devices_updated(self, zones):
"""Update the zone's state, if needed."""
if self._device_number in zones and self._state != zones[self._device_number]:
self._state = zones[self._device_number]
self.async_write_ha_state()
|
from homeassistant.components.binary_sensor import BinarySensorEntity
from homeassistant.const import CONF_NAME
from . import YetiEntity
from .const import BINARY_SENSOR_DICT, DATA_KEY_API, DATA_KEY_COORDINATOR, DOMAIN
PARALLEL_UPDATES = 0
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up the Goal Zero Yeti sensor."""
name = entry.data[CONF_NAME]
goalzero_data = hass.data[DOMAIN][entry.entry_id]
sensors = [
YetiBinarySensor(
goalzero_data[DATA_KEY_API],
goalzero_data[DATA_KEY_COORDINATOR],
name,
sensor_name,
entry.entry_id,
)
for sensor_name in BINARY_SENSOR_DICT
]
async_add_entities(sensors, True)
class YetiBinarySensor(YetiEntity, BinarySensorEntity):
"""Representation of a Goal Zero Yeti sensor."""
def __init__(self, api, coordinator, name, sensor_name, server_unique_id):
"""Initialize a Goal Zero Yeti sensor."""
super().__init__(api, coordinator, name, server_unique_id)
self._condition = sensor_name
variable_info = BINARY_SENSOR_DICT[sensor_name]
self._condition_name = variable_info[0]
self._icon = variable_info[2]
self._device_class = variable_info[1]
@property
def name(self):
"""Return the name of the sensor."""
return f"{self._name} {self._condition_name}"
@property
def unique_id(self):
"""Return the unique id of the sensor."""
return f"{self._server_unique_id}/{self._condition_name}"
@property
def is_on(self):
"""Return if the service is on."""
if self.api.data:
return self.api.data[self._condition] == 1
return False
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return self._icon
|
from absl import flags
from perfkitbenchmarker import linux_packages
from perfkitbenchmarker.linux_packages import speccpu
FLAGS = flags.FLAGS
flags.DEFINE_list('spec17_subset', ['intspeed', 'fpspeed', 'intrate', 'fprate'],
'Specify which speccpu2017 tests to run. Accepts a list of '
'benchmark suites (intspeed, fpspeed, intrate, fprate) '
'or individual benchmark names. Defaults to all suites.')
flags.DEFINE_integer('spec17_copies', None,
'Number of copies to run for rate tests. If not set '
'default to number of cpu cores using lscpu.')
flags.DEFINE_integer('spec17_threads', None,
'Number of threads to run for speed tests. If not set '
'default to number of cpu threads using lscpu.')
flags.DEFINE_boolean('spec17_fdo', False,
'Run with feedback directed optimization on peak. '
'Default to False.')
LLVM_TAR = 'clang+llvm-3.9.0-x86_64-linux-gnu-ubuntu-16.04.tar.xz'
LLVM_TAR_URL = 'https://releases.llvm.org/3.9.0/{0}'.format(LLVM_TAR)
OPENMP_TAR = 'libomp_20160808_oss.tgz'
OPENMP_TAR_URL = 'https://www.openmprtl.org/sites/default/files/{0}'.format(
OPENMP_TAR)
_PACKAGE_NAME = 'speccpu2017'
_MOUNT_DIR = 'cpu2017_mnt'
_SPECCPU2017_DIR = 'cpu2017'
_SPECCPU2017_TAR = 'speccpu2017.tgz'
_SPECCPU2017_ISO = 'cpu2017-1.1.0.iso'
_TAR_REQUIRED_MEMBERS = 'cpu2017', 'cpu2017/bin/runcpu'
_LOG_FORMAT = r'Est. (SPEC.*2017_.*_base)\s*(\S*)'
_DEFAULT_RUNSPEC_CONFIG = 'pkb-crosstool-llvm-linux-x86-fdo.cfg'
_DEFAULT_CLANG_FLAG = 'clang.xml'
PREPROVISIONED_DATA = {
_SPECCPU2017_TAR: None,
'cpu2017-gcc-x86.tgz': None, # x86-default
'cpu2017-optimized.tgz': None, # Optimized
'cpu2017-gcc-arm.tgz': None, # ARM-optimized
LLVM_TAR:
'e189a9e605ec035bfa1cfebf37374a92109b61291dc17c6f712398ecccb3498a',
OPENMP_TAR:
'a528f8949387ae8e2a05faa2f3a471fc4558142fac98bf5801659e695292e652'
}
PACKAGE_DATA_URL = {
LLVM_TAR: LLVM_TAR_URL,
OPENMP_TAR: OPENMP_TAR_URL
}
def GetSpecInstallConfig(scratch_dir):
"""Returns a SpecInstallConfigurations() for SPEC CPU 2017.
Args:
scratch_dir: The scratch directory on the VM that SPEC is installed on.
"""
install_config = speccpu.SpecInstallConfigurations()
install_config.package_name = _PACKAGE_NAME
install_config.base_mount_dir = _MOUNT_DIR
install_config.base_spec_dir = _SPECCPU2017_DIR
install_config.base_tar_file_path = (FLAGS.runspec_tar or _SPECCPU2017_TAR)
install_config.base_iso_file_path = _SPECCPU2017_ISO
install_config.base_clang_flag_file_path = _DEFAULT_CLANG_FLAG
install_config.required_members = _TAR_REQUIRED_MEMBERS
install_config.log_format = _LOG_FORMAT
install_config.runspec_config = (FLAGS.runspec_config or
_DEFAULT_RUNSPEC_CONFIG)
install_config.UpdateConfig(scratch_dir)
return install_config
def Install(vm):
"""Installs SPECCPU 2017."""
speccpu.InstallSPECCPU(vm, GetSpecInstallConfig(vm.GetScratchDir()))
vm.InstallPreprovisionedPackageData(_PACKAGE_NAME, [LLVM_TAR, OPENMP_TAR],
linux_packages.INSTALL_DIR)
vm.RemoteCommand('cd {0} && tar xf {1} && tar xf {2}'.format(
linux_packages.INSTALL_DIR, LLVM_TAR, OPENMP_TAR))
# spec17 tarball comes pre-packages with runner scripts for x86 architecture.
# But because we may have x86 or arm architecture machines, just rerun the
# install script to regenerate the runner scripts based on what spec detects
# to be the vm architecture.
vm.RemoteCommand('echo yes | {0}/cpu2017/install.sh'.format(
vm.GetScratchDir()))
def AptInstall(vm):
vm.InstallPackages('libjemalloc1 libjemalloc-dev libomp-dev')
Install(vm)
|
from datetime import timedelta
import io
from homeassistant.config import async_process_ha_core_config
from homeassistant.setup import async_setup_component
from homeassistant.util import dt as dt_util
from tests.common import async_fire_time_changed
async def test_bad_posting(hass, aiohttp_client):
"""Test that posting to wrong api endpoint fails."""
await async_process_ha_core_config(
hass,
{"external_url": "http://example.com"},
)
await async_setup_component(
hass,
"camera",
{
"camera": {
"platform": "push",
"name": "config_test",
"webhook_id": "camera.config_test",
}
},
)
await hass.async_block_till_done()
assert hass.states.get("camera.config_test") is not None
client = await aiohttp_client(hass.http.app)
# missing file
async with client.post("/api/webhook/camera.config_test") as resp:
assert resp.status == 200 # webhooks always return 200
camera_state = hass.states.get("camera.config_test")
assert camera_state.state == "idle" # no file supplied we are still idle
async def test_posting_url(hass, aiohttp_client):
"""Test that posting to api endpoint works."""
await async_process_ha_core_config(
hass,
{"external_url": "http://example.com"},
)
await async_setup_component(
hass,
"camera",
{
"camera": {
"platform": "push",
"name": "config_test",
"webhook_id": "camera.config_test",
}
},
)
await hass.async_block_till_done()
client = await aiohttp_client(hass.http.app)
files = {"image": io.BytesIO(b"fake")}
# initial state
camera_state = hass.states.get("camera.config_test")
assert camera_state.state == "idle"
# post image
resp = await client.post("/api/webhook/camera.config_test", data=files)
assert resp.status == 200
# state recording
camera_state = hass.states.get("camera.config_test")
assert camera_state.state == "recording"
# await timeout
shifted_time = dt_util.utcnow() + timedelta(seconds=15)
async_fire_time_changed(hass, shifted_time)
await hass.async_block_till_done()
# back to initial state
camera_state = hass.states.get("camera.config_test")
assert camera_state.state == "idle"
|
from django.http import Http404, HttpResponse
from django.shortcuts import get_object_or_404, redirect
from django.urls import reverse
from django.utils.html import escape
from django.utils.safestring import mark_safe
from django.views.decorators.cache import cache_control
from django.views.decorators.vary import vary_on_cookie
from weblate.lang.models import Language
from weblate.trans.forms import EngageForm
from weblate.trans.models import Component
from weblate.trans.util import render
from weblate.trans.widgets import WIDGETS, SiteOpenGraphWidget
from weblate.utils.site import get_site_url
from weblate.utils.stats import ProjectLanguage
from weblate.utils.views import get_component, get_project, try_set_language
def widgets_sorter(widget):
"""Provide better ordering of widgets."""
return WIDGETS[widget].order
def widgets(request, project):
obj = get_project(request, project)
# Parse possible language selection
form = EngageForm(request.user, obj, request.GET)
lang = None
component = None
if form.is_valid():
if form.cleaned_data["lang"]:
lang = Language.objects.get(code=form.cleaned_data["lang"]).code
if form.cleaned_data["component"]:
component = Component.objects.get(
slug=form.cleaned_data["component"], project=obj
).slug
kwargs = {"project": obj.slug}
if lang is not None:
kwargs["lang"] = lang
engage_url = get_site_url(reverse("engage", kwargs=kwargs))
engage_link = mark_safe(
'<a href="{0}" id="engage-link">{0}</a>'.format(escape(engage_url))
)
widget_base_url = get_site_url(reverse("widgets", kwargs={"project": obj.slug}))
widget_list = []
for widget_name in sorted(WIDGETS, key=widgets_sorter):
widget_class = WIDGETS[widget_name]
if not widget_class.show:
continue
color_list = []
for color in widget_class.colors:
kwargs = {
"project": obj.slug,
"widget": widget_name,
"color": color,
"extension": widget_class.extension,
}
if lang is not None:
kwargs["lang"] = lang
if component is not None:
kwargs["component"] = component
color_url = reverse("widget-image", kwargs=kwargs)
color_list.append({"name": color, "url": get_site_url(color_url)})
widget_list.append(
{"name": widget_name, "colors": color_list, "verbose": widget_class.verbose}
)
return render(
request,
"widgets.html",
{
"engage_url": engage_url,
"engage_link": engage_link,
"widget_list": widget_list,
"widget_base_url": widget_base_url,
"object": obj,
"project": obj,
"image_src": widget_list[0]["colors"][0]["url"],
"form": form,
},
)
@vary_on_cookie
@cache_control(max_age=3600)
def render_widget(
request,
project,
widget="287x66",
color=None,
lang=None,
component=None,
extension="png",
):
# We intentionally skip ACL here to allow widget sharing
if component is None:
obj = get_project(request, project, skip_acl=True)
elif component == "-":
project = get_project(request, project, skip_acl=True)
lang = get_object_or_404(Language, code=lang)
obj = ProjectLanguage(project, lang)
else:
obj = get_component(request, project, component, skip_acl=True)
# Handle language parameter
if lang is not None and isinstance(lang, str):
lang = Language.objects.fuzzy_get(code=lang, strict=True)
if lang is None:
raise Http404()
if "native" not in request.GET:
try_set_language(lang.code)
else:
try_set_language("en")
# Get widget class
try:
widget_class = WIDGETS[widget]
except KeyError:
raise Http404()
# Construct object
widget_obj = widget_class(obj, color, lang)
# Redirect widget
if hasattr(widget_obj, "redirect"):
return redirect(widget_obj.redirect(), permanent=True)
# Invalid extension
if extension != widget_obj.extension or color != widget_obj.color:
kwargs = {
"project": project,
"widget": widget,
"color": widget_obj.color,
"extension": widget_obj.extension,
}
if lang:
kwargs["lang"] = lang.code
return redirect("widget-image", permanent=True, **kwargs)
return redirect("widget-image", permanent=True, **kwargs)
# Render widget
response = HttpResponse(content_type=widget_obj.content_type)
widget_obj.render(response)
return response
@vary_on_cookie
@cache_control(max_age=3600)
def render_og(request):
# Construct object
widget_obj = SiteOpenGraphWidget()
# Render widget
response = HttpResponse(content_type=widget_obj.content_type)
widget_obj.render(response)
return response
|
from .environment import env
from .utils import silence_stderr
import os.path
from pylama.lint.extensions import LINTERS
try:
from pylama.lint.pylama_pylint import Linter
LINTERS['pylint'] = Linter()
except Exception: # noqa
pass
def code_check():
"""Run pylama and check current file.
:return bool:
"""
with silence_stderr():
from pylama.core import run
from pylama.config import parse_options
if not env.curbuf.name:
return env.stop()
linters = env.var('g:pymode_lint_checkers')
env.debug(linters)
# Fixed in v0.9.3: these two parameters may be passed as strings.
# DEPRECATE: v:0.10.0: need to be set as lists.
if isinstance(env.var('g:pymode_lint_ignore'), str):
raise ValueError ('g:pymode_lint_ignore should have a list type')
else:
ignore = env.var('g:pymode_lint_ignore')
if isinstance(env.var('g:pymode_lint_select'), str):
raise ValueError ('g:pymode_lint_select should have a list type')
else:
select = env.var('g:pymode_lint_select')
options = parse_options(
linters=linters, force=1,
ignore=ignore,
select=select,
)
env.debug(options)
for linter in linters:
opts = env.var('g:pymode_lint_options_%s' % linter, silence=True)
if opts:
options.linters_params[linter] = options.linters_params.get(
linter, {})
options.linters_params[linter].update(opts)
path = os.path.relpath(env.curbuf.name, env.curdir)
env.debug("Start code check: ", path)
if getattr(options, 'skip', None) and any(p.match(path) for p in options.skip): # noqa
env.message('Skip code checking.')
env.debug("Skipped")
return env.stop()
if env.options.get('debug'):
from pylama.core import LOGGER, logging
LOGGER.setLevel(logging.DEBUG)
errors = run(path, code='\n'.join(env.curbuf) + '\n', options=options)
env.debug("Find errors: ", len(errors))
sort_rules = env.var('g:pymode_lint_sort')
def __sort(e):
try:
return sort_rules.index(e.get('type'))
except ValueError:
return 999
if sort_rules:
env.debug("Find sorting: ", sort_rules)
errors = sorted(errors, key=__sort)
for e in errors:
e._info['bufnr'] = env.curbuf.number
if e._info['col'] is None:
e._info['col'] = 1
env.run('g:PymodeLocList.current().extend', [e._info for e in errors])
# pylama:ignore=W0212,E1103
|
import logging
from homeassistant.components.pi_hole.const import DOMAIN
from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER
from homeassistant.data_entry_flow import (
RESULT_TYPE_ABORT,
RESULT_TYPE_CREATE_ENTRY,
RESULT_TYPE_FORM,
)
from . import (
CONF_CONFIG_FLOW,
CONF_DATA,
NAME,
_create_mocked_hole,
_patch_config_flow_hole,
)
from tests.async_mock import patch
def _flow_next(hass, flow_id):
return next(
flow
for flow in hass.config_entries.flow.async_progress()
if flow["flow_id"] == flow_id
)
def _patch_setup():
return patch(
"homeassistant.components.pi_hole.async_setup_entry",
return_value=True,
)
async def test_flow_import(hass, caplog):
"""Test import flow."""
mocked_hole = _create_mocked_hole()
with _patch_config_flow_hole(mocked_hole), _patch_setup():
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=CONF_DATA
)
assert result["type"] == RESULT_TYPE_CREATE_ENTRY
assert result["title"] == NAME
assert result["data"] == CONF_DATA
# duplicated server
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=CONF_DATA
)
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_flow_import_invalid(hass, caplog):
"""Test import flow with invalid server."""
mocked_hole = _create_mocked_hole(True)
with _patch_config_flow_hole(mocked_hole), _patch_setup():
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=CONF_DATA
)
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "cannot_connect"
assert len([x for x in caplog.records if x.levelno == logging.ERROR]) == 1
async def test_flow_user(hass):
"""Test user initialized flow."""
mocked_hole = _create_mocked_hole()
with _patch_config_flow_hole(mocked_hole), _patch_setup():
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
)
assert result["type"] == RESULT_TYPE_FORM
assert result["step_id"] == "user"
assert result["errors"] == {}
_flow_next(hass, result["flow_id"])
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input=CONF_CONFIG_FLOW,
)
assert result["type"] == RESULT_TYPE_CREATE_ENTRY
assert result["title"] == NAME
assert result["data"] == CONF_DATA
# duplicated server
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data=CONF_CONFIG_FLOW,
)
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_flow_user_invalid(hass):
"""Test user initialized flow with invalid server."""
mocked_hole = _create_mocked_hole(True)
with _patch_config_flow_hole(mocked_hole), _patch_setup():
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data=CONF_CONFIG_FLOW
)
assert result["type"] == RESULT_TYPE_FORM
assert result["step_id"] == "user"
assert result["errors"] == {"base": "cannot_connect"}
|
from io import StringIO
from tokenize import generate_tokens
import json
def to_json(s):
"""Return a valid json string, given a jsarray string.
:param s: string of jsarray data
"""
out = []
for t in generate_tokens(StringIO(s).readline):
if out and any(((',' == t[1] == out[-1]), # double comma
(out[-1] == '[' and t[1] == ','), # comma opening array
)):
out.append('null')
out.append(t[1])
return ''.join(out)
def loads(s):
return json.loads(to_json(s))
|
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.const import (
ATTR_STATE,
CONF_ALIAS,
CONF_HOST,
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
CONF_RESOURCES,
CONF_USERNAME,
STATE_UNKNOWN,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import (
COORDINATOR,
DEFAULT_HOST,
DEFAULT_NAME,
DEFAULT_PORT,
DOMAIN,
KEY_STATUS,
KEY_STATUS_DISPLAY,
PYNUT_DATA,
PYNUT_FIRMWARE,
PYNUT_MANUFACTURER,
PYNUT_MODEL,
PYNUT_NAME,
PYNUT_UNIQUE_ID,
SENSOR_DEVICE_CLASS,
SENSOR_ICON,
SENSOR_NAME,
SENSOR_TYPES,
SENSOR_UNIT,
STATE_TYPES,
)
_LOGGER = logging.getLogger(__name__)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_ALIAS): cv.string,
vol.Optional(CONF_USERNAME): cv.string,
vol.Optional(CONF_PASSWORD): cv.string,
vol.Required(CONF_RESOURCES): vol.All(cv.ensure_list, [vol.In(SENSOR_TYPES)]),
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Import the platform into a config entry."""
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=config
)
)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the NUT sensors."""
pynut_data = hass.data[DOMAIN][config_entry.entry_id]
unique_id = pynut_data[PYNUT_UNIQUE_ID]
manufacturer = pynut_data[PYNUT_MANUFACTURER]
model = pynut_data[PYNUT_MODEL]
firmware = pynut_data[PYNUT_FIRMWARE]
name = pynut_data[PYNUT_NAME]
coordinator = pynut_data[COORDINATOR]
data = pynut_data[PYNUT_DATA]
status = data.status
entities = []
if CONF_RESOURCES in config_entry.options:
resources = config_entry.options[CONF_RESOURCES]
else:
resources = config_entry.data[CONF_RESOURCES]
for resource in resources:
sensor_type = resource.lower()
# Display status is a special case that falls back to the status value
# of the UPS instead.
if sensor_type in status or (
sensor_type == KEY_STATUS_DISPLAY and KEY_STATUS in status
):
entities.append(
NUTSensor(
coordinator,
data,
name.title(),
sensor_type,
unique_id,
manufacturer,
model,
firmware,
)
)
else:
_LOGGER.warning(
"Sensor type: %s does not appear in the NUT status "
"output, cannot add",
sensor_type,
)
async_add_entities(entities, True)
class NUTSensor(CoordinatorEntity):
"""Representation of a sensor entity for NUT status values."""
def __init__(
self,
coordinator,
data,
name,
sensor_type,
unique_id,
manufacturer,
model,
firmware,
):
"""Initialize the sensor."""
super().__init__(coordinator)
self._type = sensor_type
self._manufacturer = manufacturer
self._firmware = firmware
self._model = model
self._device_name = name
self._name = f"{name} {SENSOR_TYPES[sensor_type][SENSOR_NAME]}"
self._unit = SENSOR_TYPES[sensor_type][SENSOR_UNIT]
self._data = data
self._unique_id = unique_id
@property
def device_info(self):
"""Device info for the ups."""
if not self._unique_id:
return None
device_info = {
"identifiers": {(DOMAIN, self._unique_id)},
"name": self._device_name,
}
if self._model:
device_info["model"] = self._model
if self._manufacturer:
device_info["manufacturer"] = self._manufacturer
if self._firmware:
device_info["sw_version"] = self._firmware
return device_info
@property
def unique_id(self):
"""Sensor Unique id."""
if not self._unique_id:
return None
return f"{self._unique_id}_{self._type}"
@property
def name(self):
"""Return the name of the UPS sensor."""
return self._name
@property
def icon(self):
"""Icon to use in the frontend, if any."""
if SENSOR_TYPES[self._type][SENSOR_DEVICE_CLASS]:
# The UI will assign an icon
# if it has a class
return None
return SENSOR_TYPES[self._type][SENSOR_ICON]
@property
def device_class(self):
"""Device class of the sensor."""
return SENSOR_TYPES[self._type][SENSOR_DEVICE_CLASS]
@property
def state(self):
"""Return entity state from ups."""
if not self._data.status:
return None
if self._type == KEY_STATUS_DISPLAY:
return _format_display_state(self._data.status)
return self._data.status.get(self._type)
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit
@property
def device_state_attributes(self):
"""Return the sensor attributes."""
return {ATTR_STATE: _format_display_state(self._data.status)}
def _format_display_state(status):
"""Return UPS display state."""
if status is None:
return STATE_TYPES["OFF"]
try:
return " ".join(STATE_TYPES[state] for state in status[KEY_STATUS].split())
except KeyError:
return STATE_UNKNOWN
|
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_EXCLUDE
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
from .const import DATA_CONFIG, IZONE
from .discovery import async_start_discovery_service, async_stop_discovery_service
CONFIG_SCHEMA = vol.Schema(
{
IZONE: vol.Schema(
{
vol.Optional(CONF_EXCLUDE, default=[]): vol.All(
cv.ensure_list, [cv.string]
)
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass: HomeAssistantType, config: ConfigType):
"""Register the iZone component config."""
conf = config.get(IZONE)
if not conf:
return True
hass.data[DATA_CONFIG] = conf
# Explicitly added in the config file, create a config entry.
hass.async_create_task(
hass.config_entries.flow.async_init(
IZONE, context={"source": config_entries.SOURCE_IMPORT}
)
)
return True
async def async_setup_entry(hass, entry):
"""Set up from a config entry."""
await async_start_discovery_service(hass)
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, "climate")
)
return True
async def async_unload_entry(hass, entry):
"""Unload the config entry and stop discovery process."""
await async_stop_discovery_service(hass)
await hass.config_entries.async_forward_entry_unload(entry, "climate")
return True
|
from django.db import migrations
from django.db.models import F
def update_source_unit(apps, schema_editor):
Unit = apps.get_model("trans", "Unit")
Project = apps.get_model("trans", "Project")
db_alias = schema_editor.connection.alias
source_units = Unit.objects.using(db_alias).filter(
translation__language=F("translation__component__source_language")
)
total = source_units.count()
processed = 0
for project in Project.objects.using(db_alias).iterator():
has_labels = project.label_set.exists()
for source in source_units.filter(
translation__component__project=project
).iterator():
processed += 1
if processed % 1000 == 0:
percent = int(100 * processed / total)
print(f"Updating source units {percent}% [{processed}/{total}]...")
# Filter matching translation units
translations = (
Unit.objects.using(db_alias)
.filter(
translation__component=source.translation.component,
id_hash=source.id_hash,
)
.exclude(pk=source.pk)
)
# Update source_unit attribute and wipe extra_flags and explanation
update = {"source_unit": source}
if source.extra_flags:
update["extra_flags"] = ""
if source.explanation:
update["explanation"] = ""
translations.update(**update)
# Wipe labels link to translations
if has_labels and source.labels.exists():
Unit.labels.through.objects.using(db_alias).filter(
unit__in=translations
).delete()
if total:
print(f"Updating source units completed [{processed}/{total}]")
class Migration(migrations.Migration):
dependencies = [
("trans", "0102_unit_source_unit"),
]
operations = [migrations.RunPython(update_source_unit, elidable=True)]
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
slim = tf.contrib.slim
trunc_normal = lambda stddev: tf.truncated_normal_initializer(0.0, stddev)
def alexnet_v2_arg_scope(weight_decay=0.0005):
with slim.arg_scope([slim.conv2d, slim.fully_connected],
activation_fn=tf.nn.relu,
biases_initializer=tf.constant_initializer(0.1),
weights_regularizer=slim.l2_regularizer(weight_decay)):
with slim.arg_scope([slim.conv2d], padding='SAME'):
with slim.arg_scope([slim.max_pool2d], padding='VALID') as arg_sc:
return arg_sc
def alexnet_v2(inputs,
num_classes=1000,
is_training=True,
dropout_keep_prob=0.5,
spatial_squeeze=True,
scope='alexnet_v2'):
"""AlexNet version 2.
Described in: http://arxiv.org/pdf/1404.5997v2.pdf
Parameters from:
github.com/akrizhevsky/cuda-convnet2/blob/master/layers/
layers-imagenet-1gpu.cfg
Note: All the fully_connected layers have been transformed to conv2d layers.
To use in classification mode, resize input to 224x224. To use in fully
convolutional mode, set spatial_squeeze to false.
The LRN layers have been removed and change the initializers from
random_normal_initializer to xavier_initializer.
Args:
inputs: a tensor of size [batch_size, height, width, channels].
num_classes: number of predicted classes.
is_training: whether or not the model is being trained.
dropout_keep_prob: the probability that activations are kept in the dropout
layers during training.
spatial_squeeze: whether or not should squeeze the spatial dimensions of the
outputs. Useful to remove unnecessary dimensions for classification.
scope: Optional scope for the variables.
Returns:
the last op containing the log predictions and end_points dict.
"""
with tf.variable_scope(scope, 'alexnet_v2', [inputs]) as sc:
end_points_collection = sc.name + '_end_points'
# Collect outputs for conv2d, fully_connected and max_pool2d.
with slim.arg_scope([slim.conv2d, slim.fully_connected, slim.max_pool2d],
outputs_collections=[end_points_collection]):
net = slim.conv2d(inputs, 64, [11, 11], 4, padding='VALID',
scope='conv1')
net = slim.max_pool2d(net, [3, 3], 2, scope='pool1')
net = slim.conv2d(net, 192, [5, 5], scope='conv2')
net = slim.max_pool2d(net, [3, 3], 2, scope='pool2')
net = slim.conv2d(net, 384, [3, 3], scope='conv3')
net = slim.conv2d(net, 384, [3, 3], scope='conv4')
net = slim.conv2d(net, 256, [3, 3], scope='conv5')
net = slim.max_pool2d(net, [3, 3], 2, scope='pool5')
# Use conv2d instead of fully_connected layers.
with slim.arg_scope([slim.conv2d],
weights_initializer=trunc_normal(0.005),
biases_initializer=tf.constant_initializer(0.1)):
net = slim.conv2d(net, 4096, [5, 5], padding='VALID',
scope='fc6')
net = slim.dropout(net, dropout_keep_prob, is_training=is_training,
scope='dropout6')
net = slim.conv2d(net, 4096, [1, 1], scope='fc7')
net = slim.dropout(net, dropout_keep_prob, is_training=is_training,
scope='dropout7')
net = slim.conv2d(net, num_classes, [1, 1],
activation_fn=None,
normalizer_fn=None,
biases_initializer=tf.zeros_initializer(),
scope='fc8')
# Convert end_points_collection into a end_point dict.
end_points = slim.utils.convert_collection_to_dict(end_points_collection)
if spatial_squeeze:
net = tf.squeeze(net, [1, 2], name='fc8/squeezed')
end_points[sc.name + '/fc8'] = net
return net, end_points
alexnet_v2.default_image_size = 224
|
import logging
from sense_energy import ASyncSenseable, SenseAuthenticationException
import voluptuous as vol
from homeassistant import config_entries, core
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, CONF_TIMEOUT
from .const import ACTIVE_UPDATE_RATE, DEFAULT_TIMEOUT, SENSE_TIMEOUT_EXCEPTIONS
from .const import DOMAIN # pylint:disable=unused-import; pylint:disable=unused-import
_LOGGER = logging.getLogger(__name__)
DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_EMAIL): str,
vol.Required(CONF_PASSWORD): str,
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): vol.Coerce(int),
}
)
async def validate_input(hass: core.HomeAssistant, data):
"""Validate the user input allows us to connect.
Data has the keys from DATA_SCHEMA with values provided by the user.
"""
timeout = data[CONF_TIMEOUT]
gateway = ASyncSenseable(api_timeout=timeout, wss_timeout=timeout)
gateway.rate_limit = ACTIVE_UPDATE_RATE
await gateway.authenticate(data[CONF_EMAIL], data[CONF_PASSWORD])
# Return info that you want to store in the config entry.
return {"title": data[CONF_EMAIL]}
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Sense."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
async def async_step_user(self, user_input=None):
"""Handle the initial step."""
errors = {}
if user_input is not None:
try:
info = await validate_input(self.hass, user_input)
await self.async_set_unique_id(user_input[CONF_EMAIL])
return self.async_create_entry(title=info["title"], data=user_input)
except SENSE_TIMEOUT_EXCEPTIONS:
errors["base"] = "cannot_connect"
except SenseAuthenticationException:
errors["base"] = "invalid_auth"
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
return self.async_show_form(
step_id="user", data_schema=DATA_SCHEMA, errors=errors
)
async def async_step_import(self, user_input):
"""Handle import."""
await self.async_set_unique_id(user_input[CONF_EMAIL])
self._abort_if_unique_id_configured()
return await self.async_step_user(user_input)
|
from homeassistant.components.device_tracker import SOURCE_TYPE_GPS
from homeassistant.components.device_tracker.config_entry import TrackerEntity
from homeassistant.const import ATTR_LATITUDE, ATTR_LONGITUDE
from homeassistant.core import callback
from homeassistant.helpers import device_registry
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.restore_state import RestoreEntity
from . import DOMAIN as GF_DOMAIN, TRACKER_UPDATE
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Geofency config entry."""
@callback
def _receive_data(device, gps, location_name, attributes):
"""Fire HA event to set location."""
if device in hass.data[GF_DOMAIN]["devices"]:
return
hass.data[GF_DOMAIN]["devices"].add(device)
async_add_entities([GeofencyEntity(device, gps, location_name, attributes)])
hass.data[GF_DOMAIN]["unsub_device_tracker"][
config_entry.entry_id
] = async_dispatcher_connect(hass, TRACKER_UPDATE, _receive_data)
# Restore previously loaded devices
dev_reg = await device_registry.async_get_registry(hass)
dev_ids = {
identifier[1]
for device in dev_reg.devices.values()
for identifier in device.identifiers
if identifier[0] == GF_DOMAIN
}
if dev_ids:
hass.data[GF_DOMAIN]["devices"].update(dev_ids)
async_add_entities(GeofencyEntity(dev_id) for dev_id in dev_ids)
return True
class GeofencyEntity(TrackerEntity, RestoreEntity):
"""Represent a tracked device."""
def __init__(self, device, gps=None, location_name=None, attributes=None):
"""Set up Geofency entity."""
self._attributes = attributes or {}
self._name = device
self._location_name = location_name
self._gps = gps
self._unsub_dispatcher = None
self._unique_id = device
@property
def device_state_attributes(self):
"""Return device specific attributes."""
return self._attributes
@property
def latitude(self):
"""Return latitude value of the device."""
return self._gps[0]
@property
def longitude(self):
"""Return longitude value of the device."""
return self._gps[1]
@property
def location_name(self):
"""Return a location name for the current location of the device."""
return self._location_name
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def unique_id(self):
"""Return the unique ID."""
return self._unique_id
@property
def device_info(self):
"""Return the device info."""
return {"name": self._name, "identifiers": {(GF_DOMAIN, self._unique_id)}}
@property
def source_type(self):
"""Return the source type, eg gps or router, of the device."""
return SOURCE_TYPE_GPS
async def async_added_to_hass(self):
"""Register state update callback."""
await super().async_added_to_hass()
self._unsub_dispatcher = async_dispatcher_connect(
self.hass, TRACKER_UPDATE, self._async_receive_data
)
if self._attributes:
return
state = await self.async_get_last_state()
if state is None:
self._gps = (None, None)
return
attr = state.attributes
self._gps = (attr.get(ATTR_LATITUDE), attr.get(ATTR_LONGITUDE))
async def async_will_remove_from_hass(self):
"""Clean up after entity before removal."""
await super().async_will_remove_from_hass()
self._unsub_dispatcher()
self.hass.data[GF_DOMAIN]["devices"].remove(self._unique_id)
@callback
def _async_receive_data(self, device, gps, location_name, attributes):
"""Mark the device as seen."""
if device != self.name:
return
self._attributes.update(attributes)
self._location_name = location_name
self._gps = gps
self.async_write_ha_state()
|
import homeassistant.components.sleepiq.sensor as sleepiq
from homeassistant.setup import async_setup_component
from tests.async_mock import MagicMock
from tests.components.sleepiq.test_init import mock_responses
CONFIG = {"username": "foo", "password": "bar"}
async def test_setup(hass, requests_mock):
"""Test for successfully setting up the SleepIQ platform."""
mock_responses(requests_mock)
assert await async_setup_component(hass, "sleepiq", {"sleepiq": CONFIG})
device_mock = MagicMock()
sleepiq.setup_platform(hass, CONFIG, device_mock, MagicMock())
devices = device_mock.call_args[0][0]
assert 2 == len(devices)
left_side = devices[1]
assert "SleepNumber ILE Test1 SleepNumber" == left_side.name
assert 40 == left_side.state
right_side = devices[0]
assert "SleepNumber ILE Test2 SleepNumber" == right_side.name
assert 80 == right_side.state
async def test_setup_sigle(hass, requests_mock):
"""Test for successfully setting up the SleepIQ platform."""
mock_responses(requests_mock, single=True)
assert await async_setup_component(hass, "sleepiq", {"sleepiq": CONFIG})
device_mock = MagicMock()
sleepiq.setup_platform(hass, CONFIG, device_mock, MagicMock())
devices = device_mock.call_args[0][0]
assert 1 == len(devices)
right_side = devices[0]
assert "SleepNumber ILE Test1 SleepNumber" == right_side.name
assert 40 == right_side.state
|
import logging
import mock
import unittest
import smart_open.ssh
def mock_ssh(func):
def wrapper(*args, **kwargs):
smart_open.ssh._SSH.clear()
return func(*args, **kwargs)
return mock.patch("paramiko.client.SSHClient.get_transport")(
mock.patch("paramiko.client.SSHClient.connect")(wrapper)
)
class SSHOpen(unittest.TestCase):
@mock_ssh
def test_open(self, mock_connect, get_transp_mock):
smart_open.open("ssh://user:pass@some-host/")
mock_connect.assert_called_with("some-host", 22, username="user", password="pass")
@mock_ssh
def test_percent_encoding(self, mock_connect, get_transp_mock):
smart_open.open("ssh://user%3a:pass%40@some-host/")
mock_connect.assert_called_with("some-host", 22, username="user:", password="pass@")
@mock_ssh
def test_open_without_password(self, mock_connect, get_transp_mock):
smart_open.open("ssh://user@some-host/")
mock_connect.assert_called_with("some-host", 22, username="user", password=None)
@mock_ssh
def test_open_with_transport_params(self, mock_connect, get_transp_mock):
smart_open.open(
"ssh://user:pass@some-host/",
transport_params={"connect_kwargs": {"username": "ubuntu", "password": "pwd"}},
)
mock_connect.assert_called_with("some-host", 22, username="ubuntu", password="pwd")
@mock_ssh
def test_open_with_key_filename(self, mock_connect, get_transp_mock):
smart_open.open(
"ssh://user@some-host/",
transport_params={"connect_kwargs": {"key_filename": "key"}},
)
mock_connect.assert_called_with("some-host", 22, username="user", key_filename="key")
if __name__ == "__main__":
logging.basicConfig(format="%(asctime)s : %(levelname)s : %(message)s", level=logging.DEBUG)
unittest.main()
|
from homeassistant.const import DEVICE_CLASS_BATTERY, PERCENTAGE
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .base import AcmedaBase
from .const import ACMEDA_HUB_UPDATE, DOMAIN
from .helpers import async_add_acmeda_entities
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Acmeda Rollers from a config entry."""
hub = hass.data[DOMAIN][config_entry.entry_id]
current = set()
@callback
def async_add_acmeda_sensors():
async_add_acmeda_entities(
hass, AcmedaBattery, config_entry, current, async_add_entities
)
hub.cleanup_callbacks.append(
async_dispatcher_connect(
hass,
ACMEDA_HUB_UPDATE.format(config_entry.entry_id),
async_add_acmeda_sensors,
)
)
class AcmedaBattery(AcmedaBase):
"""Representation of a Acmeda cover device."""
device_class = DEVICE_CLASS_BATTERY
unit_of_measurement = PERCENTAGE
@property
def name(self):
"""Return the name of roller."""
return f"{super().name} Battery"
@property
def state(self):
"""Return the state of the device."""
return self.roller.battery
|
import logging
# Initialize the repository
if not hasattr(logging, 'statistics'): logging.statistics = {}
# Initialize my namespace
mystats = logging.statistics.setdefault('My Stuff', {})
# Initialize my namespace's scalars and collections
mystats.update({
'Enabled': True,
'Start Time': time.time(),
'Important Events': 0,
'Events/Second': lambda s: (
(s['Important Events'] / (time.time() - s['Start Time']))),
})
...
for event in events:
...
# Collect stats
if mystats.get('Enabled', False):
mystats['Important Events'] += 1
To report statistics::
root.cpstats = cpstats.StatsPage()
To format statistics reports::
See 'Reporting', above.
"""
import logging
import os
import sys
import threading
import time
import cherrypy
from cherrypy._json import json
# ------------------------------- Statistics -------------------------------- #
if not hasattr(logging, 'statistics'):
logging.statistics = {}
def extrapolate_statistics(scope):
"""Return an extrapolated copy of the given scope."""
c = {}
for k, v in scope.copy().items():
if isinstance(v, dict):
v = extrapolate_statistics(v)
elif isinstance(v, (list, tuple)):
v = [extrapolate_statistics(record) for record in v]
elif hasattr(v, '__call__'):
v = v(scope)
c[k] = v
return c
# -------------------- CherryPy Applications Statistics --------------------- #
appstats = logging.statistics.setdefault('CherryPy Applications', {})
appstats.update({
'Enabled': True,
'Bytes Read/Request': lambda s: (
s['Total Requests'] and
(s['Total Bytes Read'] / float(s['Total Requests'])) or
0.0
),
'Bytes Read/Second': lambda s: s['Total Bytes Read'] / s['Uptime'](s),
'Bytes Written/Request': lambda s: (
s['Total Requests'] and
(s['Total Bytes Written'] / float(s['Total Requests'])) or
0.0
),
'Bytes Written/Second': lambda s: (
s['Total Bytes Written'] / s['Uptime'](s)
),
'Current Time': lambda s: time.time(),
'Current Requests': 0,
'Requests/Second': lambda s: float(s['Total Requests']) / s['Uptime'](s),
'Server Version': cherrypy.__version__,
'Start Time': time.time(),
'Total Bytes Read': 0,
'Total Bytes Written': 0,
'Total Requests': 0,
'Total Time': 0,
'Uptime': lambda s: time.time() - s['Start Time'],
'Requests': {},
})
def proc_time(s):
return time.time() - s['Start Time']
class ByteCountWrapper(object):
"""Wraps a file-like object, counting the number of bytes read."""
def __init__(self, rfile):
self.rfile = rfile
self.bytes_read = 0
def read(self, size=-1):
data = self.rfile.read(size)
self.bytes_read += len(data)
return data
def readline(self, size=-1):
data = self.rfile.readline(size)
self.bytes_read += len(data)
return data
def readlines(self, sizehint=0):
# Shamelessly stolen from StringIO
total = 0
lines = []
line = self.readline()
while line:
lines.append(line)
total += len(line)
if 0 < sizehint <= total:
break
line = self.readline()
return lines
def close(self):
self.rfile.close()
def __iter__(self):
return self
def next(self):
data = self.rfile.next()
self.bytes_read += len(data)
return data
def average_uriset_time(s):
return s['Count'] and (s['Sum'] / s['Count']) or 0
def _get_threading_ident():
if sys.version_info >= (3, 3):
return threading.get_ident()
return threading._get_ident()
class StatsTool(cherrypy.Tool):
"""Record various information about the current request."""
def __init__(self):
cherrypy.Tool.__init__(self, 'on_end_request', self.record_stop)
def _setup(self):
"""Hook this tool into cherrypy.request.
The standard CherryPy request object will automatically call this
method when the tool is "turned on" in config.
"""
if appstats.get('Enabled', False):
cherrypy.Tool._setup(self)
self.record_start()
def record_start(self):
"""Record the beginning of a request."""
request = cherrypy.serving.request
if not hasattr(request.rfile, 'bytes_read'):
request.rfile = ByteCountWrapper(request.rfile)
request.body.fp = request.rfile
r = request.remote
appstats['Current Requests'] += 1
appstats['Total Requests'] += 1
appstats['Requests'][_get_threading_ident()] = {
'Bytes Read': None,
'Bytes Written': None,
# Use a lambda so the ip gets updated by tools.proxy later
'Client': lambda s: '%s:%s' % (r.ip, r.port),
'End Time': None,
'Processing Time': proc_time,
'Request-Line': request.request_line,
'Response Status': None,
'Start Time': time.time(),
}
def record_stop(
self, uriset=None, slow_queries=1.0, slow_queries_count=100,
debug=False, **kwargs):
"""Record the end of a request."""
resp = cherrypy.serving.response
w = appstats['Requests'][_get_threading_ident()]
r = cherrypy.request.rfile.bytes_read
w['Bytes Read'] = r
appstats['Total Bytes Read'] += r
if resp.stream:
w['Bytes Written'] = 'chunked'
else:
cl = int(resp.headers.get('Content-Length', 0))
w['Bytes Written'] = cl
appstats['Total Bytes Written'] += cl
w['Response Status'] = \
getattr(resp, 'output_status', resp.status).decode()
w['End Time'] = time.time()
p = w['End Time'] - w['Start Time']
w['Processing Time'] = p
appstats['Total Time'] += p
appstats['Current Requests'] -= 1
if debug:
cherrypy.log('Stats recorded: %s' % repr(w), 'TOOLS.CPSTATS')
if uriset:
rs = appstats.setdefault('URI Set Tracking', {})
r = rs.setdefault(uriset, {
'Min': None, 'Max': None, 'Count': 0, 'Sum': 0,
'Avg': average_uriset_time})
if r['Min'] is None or p < r['Min']:
r['Min'] = p
if r['Max'] is None or p > r['Max']:
r['Max'] = p
r['Count'] += 1
r['Sum'] += p
if slow_queries and p > slow_queries:
sq = appstats.setdefault('Slow Queries', [])
sq.append(w.copy())
if len(sq) > slow_queries_count:
sq.pop(0)
cherrypy.tools.cpstats = StatsTool()
# ---------------------- CherryPy Statistics Reporting ---------------------- #
thisdir = os.path.abspath(os.path.dirname(__file__))
missing = object()
def locale_date(v):
return time.strftime('%c', time.gmtime(v))
def iso_format(v):
return time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(v))
def pause_resume(ns):
def _pause_resume(enabled):
pause_disabled = ''
resume_disabled = ''
if enabled:
resume_disabled = 'disabled="disabled" '
else:
pause_disabled = 'disabled="disabled" '
return """
<form action="pause" method="POST" style="display:inline">
<input type="hidden" name="namespace" value="%s" />
<input type="submit" value="Pause" %s/>
</form>
<form action="resume" method="POST" style="display:inline">
<input type="hidden" name="namespace" value="%s" />
<input type="submit" value="Resume" %s/>
</form>
""" % (ns, pause_disabled, ns, resume_disabled)
return _pause_resume
class StatsPage(object):
formatting = {
'CherryPy Applications': {
'Enabled': pause_resume('CherryPy Applications'),
'Bytes Read/Request': '%.3f',
'Bytes Read/Second': '%.3f',
'Bytes Written/Request': '%.3f',
'Bytes Written/Second': '%.3f',
'Current Time': iso_format,
'Requests/Second': '%.3f',
'Start Time': iso_format,
'Total Time': '%.3f',
'Uptime': '%.3f',
'Slow Queries': {
'End Time': None,
'Processing Time': '%.3f',
'Start Time': iso_format,
},
'URI Set Tracking': {
'Avg': '%.3f',
'Max': '%.3f',
'Min': '%.3f',
'Sum': '%.3f',
},
'Requests': {
'Bytes Read': '%s',
'Bytes Written': '%s',
'End Time': None,
'Processing Time': '%.3f',
'Start Time': None,
},
},
'CherryPy WSGIServer': {
'Enabled': pause_resume('CherryPy WSGIServer'),
'Connections/second': '%.3f',
'Start time': iso_format,
},
}
@cherrypy.expose
def index(self):
# Transform the raw data into pretty output for HTML
yield """
<html>
<head>
<title>Statistics</title>
<style>
th, td {
padding: 0.25em 0.5em;
border: 1px solid #666699;
}
table {
border-collapse: collapse;
}
table.stats1 {
width: 100%;
}
table.stats1 th {
font-weight: bold;
text-align: right;
background-color: #CCD5DD;
}
table.stats2, h2 {
margin-left: 50px;
}
table.stats2 th {
font-weight: bold;
text-align: center;
background-color: #CCD5DD;
}
</style>
</head>
<body>
"""
for title, scalars, collections in self.get_namespaces():
yield """
<h1>%s</h1>
<table class='stats1'>
<tbody>
""" % title
for i, (key, value) in enumerate(scalars):
colnum = i % 3
if colnum == 0:
yield """
<tr>"""
yield (
"""
<th>%(key)s</th><td id='%(title)s-%(key)s'>%(value)s</td>""" %
vars()
)
if colnum == 2:
yield """
</tr>"""
if colnum == 0:
yield """
<th></th><td></td>
<th></th><td></td>
</tr>"""
elif colnum == 1:
yield """
<th></th><td></td>
</tr>"""
yield """
</tbody>
</table>"""
for subtitle, headers, subrows in collections:
yield """
<h2>%s</h2>
<table class='stats2'>
<thead>
<tr>""" % subtitle
for key in headers:
yield """
<th>%s</th>""" % key
yield """
</tr>
</thead>
<tbody>"""
for subrow in subrows:
yield """
<tr>"""
for value in subrow:
yield """
<td>%s</td>""" % value
yield """
</tr>"""
yield """
</tbody>
</table>"""
yield """
</body>
</html>
"""
def get_namespaces(self):
"""Yield (title, scalars, collections) for each namespace."""
s = extrapolate_statistics(logging.statistics)
for title, ns in sorted(s.items()):
scalars = []
collections = []
ns_fmt = self.formatting.get(title, {})
for k, v in sorted(ns.items()):
fmt = ns_fmt.get(k, {})
if isinstance(v, dict):
headers, subrows = self.get_dict_collection(v, fmt)
collections.append((k, ['ID'] + headers, subrows))
elif isinstance(v, (list, tuple)):
headers, subrows = self.get_list_collection(v, fmt)
collections.append((k, headers, subrows))
else:
format = ns_fmt.get(k, missing)
if format is None:
# Don't output this column.
continue
if hasattr(format, '__call__'):
v = format(v)
elif format is not missing:
v = format % v
scalars.append((k, v))
yield title, scalars, collections
def get_dict_collection(self, v, formatting):
"""Return ([headers], [rows]) for the given collection."""
# E.g., the 'Requests' dict.
headers = []
vals = v.values()
for record in vals:
for k3 in record:
format = formatting.get(k3, missing)
if format is None:
# Don't output this column.
continue
if k3 not in headers:
headers.append(k3)
headers.sort()
subrows = []
for k2, record in sorted(v.items()):
subrow = [k2]
for k3 in headers:
v3 = record.get(k3, '')
format = formatting.get(k3, missing)
if format is None:
# Don't output this column.
continue
if hasattr(format, '__call__'):
v3 = format(v3)
elif format is not missing:
v3 = format % v3
subrow.append(v3)
subrows.append(subrow)
return headers, subrows
def get_list_collection(self, v, formatting):
"""Return ([headers], [subrows]) for the given collection."""
# E.g., the 'Slow Queries' list.
headers = []
for record in v:
for k3 in record:
format = formatting.get(k3, missing)
if format is None:
# Don't output this column.
continue
if k3 not in headers:
headers.append(k3)
headers.sort()
subrows = []
for record in v:
subrow = []
for k3 in headers:
v3 = record.get(k3, '')
format = formatting.get(k3, missing)
if format is None:
# Don't output this column.
continue
if hasattr(format, '__call__'):
v3 = format(v3)
elif format is not missing:
v3 = format % v3
subrow.append(v3)
subrows.append(subrow)
return headers, subrows
if json is not None:
@cherrypy.expose
def data(self):
s = extrapolate_statistics(logging.statistics)
cherrypy.response.headers['Content-Type'] = 'application/json'
return json.dumps(s, sort_keys=True, indent=4).encode('utf-8')
@cherrypy.expose
def pause(self, namespace):
logging.statistics.get(namespace, {})['Enabled'] = False
raise cherrypy.HTTPRedirect('./')
pause.cp_config = {'tools.allow.on': True,
'tools.allow.methods': ['POST']}
@cherrypy.expose
def resume(self, namespace):
logging.statistics.get(namespace, {})['Enabled'] = True
raise cherrypy.HTTPRedirect('./')
resume.cp_config = {'tools.allow.on': True,
'tools.allow.methods': ['POST']}
|
import numpy as np
def assert_is_point(point, visible=None, size=None, n_point=None):
"""Checks if points satisfy the format.
This function checks if given points satisfy the format and
raises an :class:`AssertionError` when the points violate the convention.
Args:
point (~numpy.ndarray): Points to be checked.
visible (~numpy.ndarray): Visibility of the points.
If this is :obj:`None`, all points are regarded as visible.
size (tuple of ints): The size of an image.
If this argument is specified,
the coordinates of visible points are checked to be
within the image.
n_point (int): If specified, the number of points in each object is
expected to be :obj:`n_point`.
"""
for i, pnt in enumerate(point):
assert isinstance(pnt, np.ndarray), \
'pnt must be a numpy.ndarray.'
assert pnt.dtype == np.float32, \
'The type of pnt must be numpy.float32.'
assert pnt.shape[1:] == (2,), \
'The shape of pnt must be (*, 2).'
if n_point is not None:
assert pnt.shape[0] == n_point, \
'The number of points should always be n_point'
if visible is not None:
assert len(point) == len(visible), \
'The length of point and visible should be the same.'
vsble = visible[i]
assert isinstance(vsble, np.ndarray), \
'pnt should be a numpy.ndarray.'
assert vsble.dtype == np.bool, \
'The type of visible must be numpy.bool.'
assert vsble.ndim == 1, \
'The dimensionality of a visible must be one.'
assert vsble.shape[0] == pnt.shape[0], \
'The size of the first axis should be the same for ' \
'corresponding pnt and vsble.'
visible_pnt = pnt[vsble]
else:
visible_pnt = pnt
if size is not None:
assert (visible_pnt >= 0).all() and (visible_pnt <= size).all(),\
'The coordinates of visible points ' \
'should not exceed the size of image.'
|
from homeassistant.components.kodi.const import DEFAULT_SSL
TEST_HOST = {
"host": "1.1.1.1",
"port": 8080,
"ssl": DEFAULT_SSL,
}
TEST_CREDENTIALS = {"username": "username", "password": "password"}
TEST_WS_PORT = {"ws_port": 9090}
UUID = "11111111-1111-1111-1111-111111111111"
TEST_DISCOVERY = {
"host": "1.1.1.1",
"port": 8080,
"hostname": "hostname.local.",
"type": "_xbmc-jsonrpc-h._tcp.local.",
"name": "hostname._xbmc-jsonrpc-h._tcp.local.",
"properties": {"uuid": UUID},
}
TEST_IMPORT = {
"name": "name",
"host": "1.1.1.1",
"port": 8080,
"ws_port": 9090,
"username": "username",
"password": "password",
"ssl": True,
"timeout": 7,
}
def get_kodi_connection(
host, port, ws_port, username, password, ssl=False, timeout=5, session=None
):
"""Get Kodi connection."""
if ws_port is None:
return MockConnection()
else:
return MockWSConnection()
class MockConnection:
"""A mock kodi connection."""
def __init__(self, connected=True):
"""Mock the Kodi connection."""
self._connected = connected
async def connect(self):
"""Mock connect."""
pass
@property
def connected(self):
"""Mock connected."""
return self._connected
@property
def can_subscribe(self):
"""Mock can_subscribe."""
return False
async def close(self):
"""Mock close."""
pass
@property
def server(self):
"""Mock server."""
return None
class MockWSConnection:
"""A mock kodi websocket connection."""
def __init__(self, connected=True):
"""Mock the websocket connection."""
self._connected = connected
async def connect(self):
"""Mock connect."""
pass
@property
def connected(self):
"""Mock connected."""
return self._connected
@property
def can_subscribe(self):
"""Mock can_subscribe."""
return False
async def close(self):
"""Mock close."""
pass
@property
def server(self):
"""Mock server."""
return None
|
from copy import deepcopy
from homeassistant.components.deconz import device_trigger
from homeassistant.components.deconz.const import DOMAIN as DECONZ_DOMAIN
from homeassistant.components.deconz.device_trigger import CONF_SUBTYPE
from homeassistant.components.deconz.gateway import get_gateway_from_config_entry
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.const import (
ATTR_BATTERY_LEVEL,
ATTR_ENTITY_ID,
CONF_DEVICE_ID,
CONF_DOMAIN,
CONF_PLATFORM,
CONF_TYPE,
)
from .test_gateway import DECONZ_WEB_REQUEST, setup_deconz_integration
from tests.common import assert_lists_same, async_get_device_automations
SENSORS = {
"1": {
"config": {
"alert": "none",
"battery": 60,
"group": "10",
"on": True,
"reachable": True,
},
"ep": 1,
"etag": "1b355c0b6d2af28febd7ca9165881952",
"manufacturername": "IKEA of Sweden",
"mode": 1,
"modelid": "TRADFRI on/off switch",
"name": "TRÅDFRI on/off switch ",
"state": {"buttonevent": 2002, "lastupdated": "2019-09-07T07:39:39"},
"swversion": "1.4.018",
CONF_TYPE: "ZHASwitch",
"uniqueid": "d0:cf:5e:ff:fe:71:a4:3a-01-1000",
}
}
async def test_get_triggers(hass):
"""Test triggers work."""
data = deepcopy(DECONZ_WEB_REQUEST)
data["sensors"] = deepcopy(SENSORS)
config_entry = await setup_deconz_integration(hass, get_state_response=data)
gateway = get_gateway_from_config_entry(hass, config_entry)
device_id = gateway.events[0].device_id
triggers = await async_get_device_automations(hass, "trigger", device_id)
expected_triggers = [
{
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DECONZ_DOMAIN,
CONF_PLATFORM: "device",
CONF_TYPE: device_trigger.CONF_SHORT_PRESS,
CONF_SUBTYPE: device_trigger.CONF_TURN_ON,
},
{
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DECONZ_DOMAIN,
CONF_PLATFORM: "device",
CONF_TYPE: device_trigger.CONF_LONG_PRESS,
CONF_SUBTYPE: device_trigger.CONF_TURN_ON,
},
{
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DECONZ_DOMAIN,
CONF_PLATFORM: "device",
CONF_TYPE: device_trigger.CONF_LONG_RELEASE,
CONF_SUBTYPE: device_trigger.CONF_TURN_ON,
},
{
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DECONZ_DOMAIN,
CONF_PLATFORM: "device",
CONF_TYPE: device_trigger.CONF_SHORT_PRESS,
CONF_SUBTYPE: device_trigger.CONF_TURN_OFF,
},
{
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DECONZ_DOMAIN,
CONF_PLATFORM: "device",
CONF_TYPE: device_trigger.CONF_LONG_PRESS,
CONF_SUBTYPE: device_trigger.CONF_TURN_OFF,
},
{
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DECONZ_DOMAIN,
CONF_PLATFORM: "device",
CONF_TYPE: device_trigger.CONF_LONG_RELEASE,
CONF_SUBTYPE: device_trigger.CONF_TURN_OFF,
},
{
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: SENSOR_DOMAIN,
ATTR_ENTITY_ID: "sensor.tradfri_on_off_switch_battery_level",
CONF_PLATFORM: "device",
CONF_TYPE: ATTR_BATTERY_LEVEL,
},
]
assert_lists_same(triggers, expected_triggers)
async def test_helper_successful(hass):
"""Verify trigger helper."""
data = deepcopy(DECONZ_WEB_REQUEST)
data["sensors"] = deepcopy(SENSORS)
config_entry = await setup_deconz_integration(hass, get_state_response=data)
gateway = get_gateway_from_config_entry(hass, config_entry)
device_id = gateway.events[0].device_id
deconz_event = device_trigger._get_deconz_event_from_device_id(hass, device_id)
assert deconz_event == gateway.events[0]
async def test_helper_no_match(hass):
"""Verify trigger helper returns None when no event could be matched."""
await setup_deconz_integration(hass)
deconz_event = device_trigger._get_deconz_event_from_device_id(hass, "mock-id")
assert deconz_event is None
async def test_helper_no_gateway_exist(hass):
"""Verify trigger helper returns None when no gateway exist."""
deconz_event = device_trigger._get_deconz_event_from_device_id(hass, "mock-id")
assert deconz_event is None
|
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_MOTION,
BinarySensorEntity,
)
from . import CONF_HOST, CONF_NAME, DATA_IP_WEBCAM, KEY_MAP, AndroidIPCamEntity
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the IP Webcam binary sensors."""
if discovery_info is None:
return
host = discovery_info[CONF_HOST]
name = discovery_info[CONF_NAME]
ipcam = hass.data[DATA_IP_WEBCAM][host]
async_add_entities([IPWebcamBinarySensor(name, host, ipcam, "motion_active")], True)
class IPWebcamBinarySensor(AndroidIPCamEntity, BinarySensorEntity):
"""Representation of an IP Webcam binary sensor."""
def __init__(self, name, host, ipcam, sensor):
"""Initialize the binary sensor."""
super().__init__(host, ipcam)
self._sensor = sensor
self._mapped_name = KEY_MAP.get(self._sensor, self._sensor)
self._name = f"{name} {self._mapped_name}"
self._state = None
self._unit = None
@property
def name(self):
"""Return the name of the binary sensor, if any."""
return self._name
@property
def is_on(self):
"""Return true if the binary sensor is on."""
return self._state
async def async_update(self):
"""Retrieve latest state."""
state, _ = self._ipcam.export_sensor(self._sensor)
self._state = state == 1.0
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
return DEVICE_CLASS_MOTION
|
import logging
import voluptuous as vol
from zoneminder.monitor import MonitorState
from homeassistant.components.switch import PLATFORM_SCHEMA, SwitchEntity
from homeassistant.const import CONF_COMMAND_OFF, CONF_COMMAND_ON
import homeassistant.helpers.config_validation as cv
from . import DOMAIN as ZONEMINDER_DOMAIN
_LOGGER = logging.getLogger(__name__)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_COMMAND_ON): cv.string,
vol.Required(CONF_COMMAND_OFF): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the ZoneMinder switch platform."""
on_state = MonitorState(config.get(CONF_COMMAND_ON))
off_state = MonitorState(config.get(CONF_COMMAND_OFF))
switches = []
for zm_client in hass.data[ZONEMINDER_DOMAIN].values():
monitors = zm_client.get_monitors()
if not monitors:
_LOGGER.warning("Could not fetch monitors from ZoneMinder")
return
for monitor in monitors:
switches.append(ZMSwitchMonitors(monitor, on_state, off_state))
add_entities(switches)
class ZMSwitchMonitors(SwitchEntity):
"""Representation of a ZoneMinder switch."""
icon = "mdi:record-rec"
def __init__(self, monitor, on_state, off_state):
"""Initialize the switch."""
self._monitor = monitor
self._on_state = on_state
self._off_state = off_state
self._state = None
@property
def name(self):
"""Return the name of the switch."""
return f"{self._monitor.name} State"
def update(self):
"""Update the switch value."""
self._state = self._monitor.function == self._on_state
@property
def is_on(self):
"""Return True if entity is on."""
return self._state
def turn_on(self, **kwargs):
"""Turn the entity on."""
self._monitor.function = self._on_state
def turn_off(self, **kwargs):
"""Turn the entity off."""
self._monitor.function = self._off_state
|
from pymochad import device
import voluptuous as vol
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
PLATFORM_SCHEMA,
SUPPORT_BRIGHTNESS,
LightEntity,
)
from homeassistant.const import CONF_ADDRESS, CONF_DEVICES, CONF_NAME, CONF_PLATFORM
from homeassistant.helpers import config_validation as cv
from . import CONF_COMM_TYPE, DOMAIN, REQ_LOCK
CONF_BRIGHTNESS_LEVELS = "brightness_levels"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_PLATFORM): DOMAIN,
CONF_DEVICES: [
{
vol.Optional(CONF_NAME): cv.string,
vol.Required(CONF_ADDRESS): cv.x10_address,
vol.Optional(CONF_COMM_TYPE): cv.string,
vol.Optional(CONF_BRIGHTNESS_LEVELS, default=32): vol.All(
vol.Coerce(int), vol.In([32, 64, 256])
),
}
],
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up X10 dimmers over a mochad controller."""
mochad_controller = hass.data[DOMAIN]
devs = config.get(CONF_DEVICES)
add_entities([MochadLight(hass, mochad_controller.ctrl, dev) for dev in devs])
return True
class MochadLight(LightEntity):
"""Representation of a X10 dimmer over Mochad."""
def __init__(self, hass, ctrl, dev):
"""Initialize a Mochad Light Device."""
self._controller = ctrl
self._address = dev[CONF_ADDRESS]
self._name = dev.get(CONF_NAME, f"x10_light_dev_{self._address}")
self._comm_type = dev.get(CONF_COMM_TYPE, "pl")
self.light = device.Device(ctrl, self._address, comm_type=self._comm_type)
self._brightness = 0
self._state = self._get_device_status()
self._brightness_levels = dev.get(CONF_BRIGHTNESS_LEVELS) - 1
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
return self._brightness
def _get_device_status(self):
"""Get the status of the light from mochad."""
with REQ_LOCK:
status = self.light.get_status().rstrip()
return status == "on"
@property
def name(self):
"""Return the display name of this light."""
return self._name
@property
def is_on(self):
"""Return true if the light is on."""
return self._state
@property
def supported_features(self):
"""Return supported features."""
return SUPPORT_BRIGHTNESS
@property
def assumed_state(self):
"""X10 devices are normally 1-way so we have to assume the state."""
return True
def _calculate_brightness_value(self, value):
return int(value * (float(self._brightness_levels) / 255.0))
def _adjust_brightness(self, brightness):
if self._brightness > brightness:
bdelta = self._brightness - brightness
mochad_brightness = self._calculate_brightness_value(bdelta)
self.light.send_cmd(f"dim {mochad_brightness}")
self._controller.read_data()
elif self._brightness < brightness:
bdelta = brightness - self._brightness
mochad_brightness = self._calculate_brightness_value(bdelta)
self.light.send_cmd(f"bright {mochad_brightness}")
self._controller.read_data()
def turn_on(self, **kwargs):
"""Send the command to turn the light on."""
brightness = kwargs.get(ATTR_BRIGHTNESS, 255)
with REQ_LOCK:
if self._brightness_levels > 32:
out_brightness = self._calculate_brightness_value(brightness)
self.light.send_cmd(f"xdim {out_brightness}")
self._controller.read_data()
else:
self.light.send_cmd("on")
self._controller.read_data()
# There is no persistence for X10 modules so a fresh on command
# will be full brightness
if self._brightness == 0:
self._brightness = 255
self._adjust_brightness(brightness)
self._brightness = brightness
self._state = True
def turn_off(self, **kwargs):
"""Send the command to turn the light on."""
with REQ_LOCK:
self.light.send_cmd("off")
self._controller.read_data()
# There is no persistence for X10 modules so we need to prepare
# to track a fresh on command will full brightness
if self._brightness_levels == 31:
self._brightness = 0
self._state = False
|
import logging
from aiohttp import web
from homeassistant.const import URL_API
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_send
from .const import (
CONF_CLOUDHOOK_URL,
CONF_WEBHOOK_ID,
DOMAIN,
KEY_EXTERNAL_ID,
KEY_TYPE,
SIGNAL_RACHIO_CONTROLLER_UPDATE,
SIGNAL_RACHIO_RAIN_DELAY_UPDATE,
SIGNAL_RACHIO_RAIN_SENSOR_UPDATE,
SIGNAL_RACHIO_SCHEDULE_UPDATE,
SIGNAL_RACHIO_ZONE_UPDATE,
)
# Device webhook values
TYPE_CONTROLLER_STATUS = "DEVICE_STATUS"
SUBTYPE_OFFLINE = "OFFLINE"
SUBTYPE_ONLINE = "ONLINE"
SUBTYPE_OFFLINE_NOTIFICATION = "OFFLINE_NOTIFICATION"
SUBTYPE_COLD_REBOOT = "COLD_REBOOT"
SUBTYPE_SLEEP_MODE_ON = "SLEEP_MODE_ON"
SUBTYPE_SLEEP_MODE_OFF = "SLEEP_MODE_OFF"
SUBTYPE_BROWNOUT_VALVE = "BROWNOUT_VALVE"
# Rain delay values
TYPE_RAIN_DELAY_STATUS = "RAIN_DELAY"
SUBTYPE_RAIN_DELAY_ON = "RAIN_DELAY_ON"
SUBTYPE_RAIN_DELAY_OFF = "RAIN_DELAY_OFF"
# Rain sensor values
TYPE_RAIN_SENSOR_STATUS = "RAIN_SENSOR_DETECTION"
SUBTYPE_RAIN_SENSOR_DETECTION_ON = "RAIN_SENSOR_DETECTION_ON"
SUBTYPE_RAIN_SENSOR_DETECTION_OFF = "RAIN_SENSOR_DETECTION_OFF"
# Schedule webhook values
TYPE_SCHEDULE_STATUS = "SCHEDULE_STATUS"
SUBTYPE_SCHEDULE_STARTED = "SCHEDULE_STARTED"
SUBTYPE_SCHEDULE_STOPPED = "SCHEDULE_STOPPED"
SUBTYPE_SCHEDULE_COMPLETED = "SCHEDULE_COMPLETED"
SUBTYPE_WEATHER_NO_SKIP = "WEATHER_INTELLIGENCE_NO_SKIP"
SUBTYPE_WEATHER_SKIP = "WEATHER_INTELLIGENCE_SKIP"
SUBTYPE_WEATHER_CLIMATE_SKIP = "WEATHER_INTELLIGENCE_CLIMATE_SKIP"
SUBTYPE_WEATHER_FREEZE = "WEATHER_INTELLIGENCE_FREEZE"
# Zone webhook values
TYPE_ZONE_STATUS = "ZONE_STATUS"
SUBTYPE_ZONE_STARTED = "ZONE_STARTED"
SUBTYPE_ZONE_STOPPED = "ZONE_STOPPED"
SUBTYPE_ZONE_COMPLETED = "ZONE_COMPLETED"
SUBTYPE_ZONE_CYCLING = "ZONE_CYCLING"
SUBTYPE_ZONE_CYCLING_COMPLETED = "ZONE_CYCLING_COMPLETED"
SUBTYPE_ZONE_PAUSED = "ZONE_PAUSED"
# Webhook callbacks
LISTEN_EVENT_TYPES = [
"DEVICE_STATUS_EVENT",
"ZONE_STATUS_EVENT",
"RAIN_DELAY_EVENT",
"RAIN_SENSOR_DETECTION_EVENT",
"SCHEDULE_STATUS_EVENT",
]
WEBHOOK_CONST_ID = "homeassistant.rachio:"
WEBHOOK_PATH = URL_API + DOMAIN
SIGNAL_MAP = {
TYPE_CONTROLLER_STATUS: SIGNAL_RACHIO_CONTROLLER_UPDATE,
TYPE_RAIN_DELAY_STATUS: SIGNAL_RACHIO_RAIN_DELAY_UPDATE,
TYPE_RAIN_SENSOR_STATUS: SIGNAL_RACHIO_RAIN_SENSOR_UPDATE,
TYPE_SCHEDULE_STATUS: SIGNAL_RACHIO_SCHEDULE_UPDATE,
TYPE_ZONE_STATUS: SIGNAL_RACHIO_ZONE_UPDATE,
}
_LOGGER = logging.getLogger(__name__)
@callback
def async_register_webhook(hass, webhook_id, entry_id):
"""Register a webhook."""
async def _async_handle_rachio_webhook(hass, webhook_id, request):
"""Handle webhook calls from the server."""
data = await request.json()
try:
auth = data.get(KEY_EXTERNAL_ID, "").split(":")[1]
assert auth == hass.data[DOMAIN][entry_id].rachio.webhook_auth
except (AssertionError, IndexError):
return web.Response(status=web.HTTPForbidden.status_code)
update_type = data[KEY_TYPE]
if update_type in SIGNAL_MAP:
async_dispatcher_send(hass, SIGNAL_MAP[update_type], data)
return web.Response(status=web.HTTPNoContent.status_code)
hass.components.webhook.async_register(
DOMAIN, "Rachio", webhook_id, _async_handle_rachio_webhook
)
async def async_get_or_create_registered_webhook_id_and_url(hass, entry):
"""Generate webhook ID."""
config = entry.data.copy()
updated_config = False
webhook_url = None
webhook_id = config.get(CONF_WEBHOOK_ID)
if not webhook_id:
webhook_id = hass.components.webhook.async_generate_id()
config[CONF_WEBHOOK_ID] = webhook_id
updated_config = True
if hass.components.cloud.async_active_subscription():
cloudhook_url = config.get(CONF_CLOUDHOOK_URL)
if not cloudhook_url:
cloudhook_url = await hass.components.cloud.async_create_cloudhook(
webhook_id
)
config[CONF_CLOUDHOOK_URL] = cloudhook_url
updated_config = True
webhook_url = cloudhook_url
if not webhook_url:
webhook_url = hass.components.webhook.async_generate_url(webhook_id)
if updated_config:
hass.config_entries.async_update_entry(entry, data=config)
return webhook_id, webhook_url
|
import os
import os.path
import html
import functools
import collections
from typing import MutableMapping
from PyQt5.QtCore import pyqtSignal, QUrl, QObject
from qutebrowser.utils import (message, usertypes, qtutils, urlutils,
standarddir, objreg, log)
from qutebrowser.api import cmdutils
from qutebrowser.misc import lineparser
class Error(Exception):
"""Base class for all errors in this module."""
class InvalidUrlError(Error):
"""Exception emitted when a URL is invalid."""
class DoesNotExistError(Error):
"""Exception emitted when a given URL does not exist."""
class AlreadyExistsError(Error):
"""Exception emitted when a given URL does already exist."""
class UrlMarkManager(QObject):
"""Base class for BookmarkManager and QuickmarkManager.
Attributes:
marks: An OrderedDict of all quickmarks/bookmarks.
_lineparser: The LineParser used for the marks
Signals:
changed: Emitted when anything changed.
"""
changed = pyqtSignal()
def __init__(self, parent=None):
"""Initialize and read quickmarks."""
super().__init__(parent)
self.marks: MutableMapping[str, str] = collections.OrderedDict()
self._init_lineparser()
for line in self._lineparser:
if not line.strip() or line.startswith('#'):
# Ignore empty or whitespace-only lines and comments.
continue
self._parse_line(line)
self._init_savemanager(objreg.get('save-manager'))
def _init_lineparser(self):
raise NotImplementedError
def _parse_line(self, line):
raise NotImplementedError
def _init_savemanager(self, _save_manager):
raise NotImplementedError
def save(self):
"""Save the marks to disk."""
self._lineparser.data = [' '.join(tpl) for tpl in self.marks.items()]
self._lineparser.save()
def delete(self, key):
"""Delete a quickmark/bookmark.
Args:
key: The key to delete (name for quickmarks, URL for bookmarks.)
"""
del self.marks[key]
self.changed.emit()
class QuickmarkManager(UrlMarkManager):
"""Manager for quickmarks.
The primary key for quickmarks is their *name*, this means:
- self.marks maps names to URLs.
- changed gets emitted with the name as first argument and the URL as
second argument.
"""
def _init_lineparser(self):
self._lineparser = lineparser.LineParser(
standarddir.config(), 'quickmarks', parent=self)
def _init_savemanager(self, save_manager):
filename = os.path.join(standarddir.config(), 'quickmarks')
save_manager.add_saveable('quickmark-manager', self.save, self.changed,
filename=filename)
def _parse_line(self, line):
try:
key, url = line.rsplit(maxsplit=1)
except ValueError:
message.error("Invalid quickmark '{}'".format(line))
else:
self.marks[key] = url
def prompt_save(self, url):
"""Prompt for a new quickmark name to be added and add it.
Args:
url: The quickmark url as a QUrl.
"""
if not url.isValid():
urlutils.invalid_url_error(url, "save quickmark")
return
urlstr = url.toString(QUrl.RemovePassword | QUrl.FullyEncoded)
message.ask_async(
"Add quickmark:", usertypes.PromptMode.text,
functools.partial(self.quickmark_add, urlstr),
text="Please enter a quickmark name for<br/><b>{}</b>".format(
html.escape(url.toDisplayString())), url=urlstr)
@cmdutils.register(instance='quickmark-manager')
def quickmark_add(self, url, name):
"""Add a new quickmark.
You can view all saved quickmarks on the
link:qute://bookmarks[bookmarks page].
Args:
url: The url to add as quickmark.
name: The name for the new quickmark.
"""
# We don't raise cmdutils.CommandError here as this can be called async
# via prompt_save.
if not name:
message.error("Can't set mark with empty name!")
return
if not url:
message.error("Can't set mark with empty URL!")
return
def set_mark():
"""Really set the quickmark."""
self.marks[name] = url
self.changed.emit()
log.misc.debug("Added quickmark {} for {}".format(name, url))
if name in self.marks:
message.confirm_async(
title="Override existing quickmark?",
yes_action=set_mark, default=True, url=url)
else:
set_mark()
def get_by_qurl(self, url):
"""Look up a quickmark by QUrl, returning its name.
Takes O(n) time, where n is the number of quickmarks.
Use a name instead where possible.
"""
qtutils.ensure_valid(url)
urlstr = url.toString(QUrl.RemovePassword | QUrl.FullyEncoded)
try:
index = list(self.marks.values()).index(urlstr)
key = list(self.marks.keys())[index]
except ValueError:
raise DoesNotExistError(
"Quickmark for '{}' not found!".format(urlstr))
return key
def get(self, name):
"""Get the URL of the quickmark named name as a QUrl."""
if name not in self.marks:
raise DoesNotExistError(
"Quickmark '{}' does not exist!".format(name))
urlstr = self.marks[name]
try:
url = urlutils.fuzzy_url(urlstr, do_search=False)
except urlutils.InvalidUrlError as e:
raise InvalidUrlError(
"Invalid URL for quickmark {}: {}".format(name, str(e)))
return url
class BookmarkManager(UrlMarkManager):
"""Manager for bookmarks.
The primary key for bookmarks is their *url*, this means:
- self.marks maps URLs to titles.
- changed gets emitted with the URL as first argument and the title as
second argument.
"""
def _init_lineparser(self):
bookmarks_directory = os.path.join(standarddir.config(), 'bookmarks')
os.makedirs(bookmarks_directory, exist_ok=True)
bookmarks_subdir = os.path.join('bookmarks', 'urls')
self._lineparser = lineparser.LineParser(
standarddir.config(), bookmarks_subdir, parent=self)
def _init_savemanager(self, save_manager):
filename = os.path.join(standarddir.config(), 'bookmarks', 'urls')
save_manager.add_saveable('bookmark-manager', self.save, self.changed,
filename=filename)
def _parse_line(self, line):
parts = line.split(maxsplit=1)
if len(parts) == 2:
self.marks[parts[0]] = parts[1]
elif len(parts) == 1:
self.marks[parts[0]] = ''
def add(self, url, title, *, toggle=False):
"""Add a new bookmark.
Args:
url: The url to add as bookmark.
title: The title for the new bookmark.
toggle: remove the bookmark instead of raising an error if it
already exists.
Return:
True if the bookmark was added, and False if it was
removed (only possible if toggle is True).
"""
if not url.isValid():
errstr = urlutils.get_errstring(url)
raise InvalidUrlError(errstr)
urlstr = url.toString(QUrl.RemovePassword | QUrl.FullyEncoded)
if urlstr in self.marks:
if toggle:
self.delete(urlstr)
return False
else:
raise AlreadyExistsError("Bookmark already exists!")
else:
self.marks[urlstr] = title
self.changed.emit()
return True
|
import argparse
import base64
import logging
import uuid
from pymongo import MongoClient
from arctic.arctic import Arctic
from .utils import do_db_auth
from ..hooks import get_mongodb_uri
logger = logging.getLogger(__name__)
def main():
usage = """arctic_create_user --host research [--db mongoose_user] [--write] user
Creates the user's personal Arctic mongo database
Or add a user to an existing Mongo Database.
"""
parser = argparse.ArgumentParser(usage=usage)
parser.add_argument("--host", default='localhost', help="Hostname, or clustername. Default: localhost")
parser.add_argument("--db", default=None, help="Database to add user on. Default: mongoose_<user>")
parser.add_argument("--password", default=None, help="Password. Default: random")
parser.add_argument("--write", action='store_true', default=False, help="Used for granting write access to someone else's DB")
parser.add_argument("users", nargs='+', help="Users to add.")
args = parser.parse_args()
c = MongoClient(get_mongodb_uri(args.host))
if not do_db_auth(args.host, c, args.db if args.db else 'admin'):
logger.error("Failed to authenticate to '%s'. Check your admin password!" % (args.host))
return
for user in args.users:
write_access = args.write
p = args.password
if p is None:
p = base64.b64encode(uuid.uuid4().bytes).replace(b'/', b'')[:12]
db = args.db
if not db:
# Users always have write access to their database
write_access = True
db = Arctic.DB_PREFIX + '_' + user
# Add the user to the database
c[db].add_user(user, p, read_only=not write_access)
logger.info("Granted: {user} [{permission}] to {db}".format(user=user,
permission='WRITE' if write_access else 'READ',
db=db))
logger.info("User creds: {db}/{user}/{password}".format(user=user,
db=db,
password=p,
))
if __name__ == '__main__':
main()
|
from pymyq.errors import InvalidCredentialsError, MyQError
from homeassistant import config_entries, setup
from homeassistant.components.myq.const import DOMAIN
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from tests.async_mock import patch
from tests.common import MockConfigEntry
async def test_form_user(hass):
"""Test we get the user form."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
with patch(
"homeassistant.components.myq.config_flow.pymyq.login",
return_value=True,
), patch(
"homeassistant.components.myq.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.myq.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"username": "test-username", "password": "test-password"},
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "test-username"
assert result2["data"] == {
"username": "test-username",
"password": "test-password",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_import(hass):
"""Test we can import."""
await setup.async_setup_component(hass, "persistent_notification", {})
with patch(
"homeassistant.components.myq.config_flow.pymyq.login",
return_value=True,
), patch(
"homeassistant.components.myq.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.myq.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={"username": "test-username", "password": "test-password"},
)
await hass.async_block_till_done()
assert result["type"] == "create_entry"
assert result["title"] == "test-username"
assert result["data"] == {
"username": "test-username",
"password": "test-password",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_invalid_auth(hass):
"""Test we handle invalid auth."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.myq.config_flow.pymyq.login",
side_effect=InvalidCredentialsError,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"username": "test-username", "password": "test-password"},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "invalid_auth"}
async def test_form_cannot_connect(hass):
"""Test we handle cannot connect error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.myq.config_flow.pymyq.login",
side_effect=MyQError,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"username": "test-username", "password": "test-password"},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "cannot_connect"}
async def test_form_homekit(hass):
"""Test that we abort from homekit if myq is already setup."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": "homekit"},
data={"properties": {"id": "AA:BB:CC:DD:EE:FF"}},
)
assert result["type"] == "form"
assert result["errors"] == {}
flow = next(
flow
for flow in hass.config_entries.flow.async_progress()
if flow["flow_id"] == result["flow_id"]
)
assert flow["context"]["unique_id"] == "AA:BB:CC:DD:EE:FF"
entry = MockConfigEntry(
domain=DOMAIN, data={CONF_USERNAME: "mock", CONF_PASSWORD: "mock"}
)
entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": "homekit"},
data={"properties": {"id": "AA:BB:CC:DD:EE:FF"}},
)
assert result["type"] == "abort"
|
from __future__ import absolute_import, division
from math import sqrt
import numpy as np
from filterpy.kalman import pretty_str
class LeastSquaresFilter(object):
"""Implements a Least Squares recursive filter. Formulation is per
Zarchan [1]_.
Filter may be of order 0 to 2. Order 0 assumes the value being tracked is
a constant, order 1 assumes that it moves in a line, and order 2 assumes
that it is tracking a second order polynomial.
Parameters
----------
dt : float
time step per update
order : int
order of filter 0..2
noise_sigma : float
sigma (std dev) in x. This allows us to calculate the error of
the filter, it does not influence the filter output.
Attributes
----------
n : int
step in the recursion. 0 prior to first call, 1 after the first call,
etc.
K : np.array
Gains for the filter. K[0] for all orders, K[1] for orders 0 and 1, and
K[2] for order 2
x: np.array (order + 1, 1)
estimate(s) of the output. It is a vector containing the estimate x
and the derivatives of x: [x x' x''].T. It contains as many
derivatives as the order allows. That is, a zero order filter has
no derivatives, a first order has one derivative, and a second order
has two.
y : float
residual (difference between measurement projection of previous
estimate to current time).
Examples
--------
.. code-block:: Python
from filterpy.leastsq import LeastSquaresFilter
lsq = LeastSquaresFilter(dt=0.1, order=1, noise_sigma=2.3)
while True:
z = sensor_reading() # get a measurement
x = lsq.update(z) # get the filtered estimate.
print('error: {}, velocity error: {}'.format(
lsq.error, lsq.derror))
References
----------
.. [1] Zarchan and Musoff. "Fundamentals of Kalman Filtering: A Practical
Approach." Third Edition. AIAA, 2009.
"""
def __init__(self, dt, order, noise_sigma=0.):
if order < 0 or order > 2:
raise ValueError('order must be between 0 and 2')
self.dt = dt
self.sigma = noise_sigma
self._order = order
self.reset()
def reset(self):
""" reset filter back to state at time of construction"""
self.n = 0 # nth step in the recursion
self.x = np.zeros(self._order + 1)
self.K = np.zeros(self._order + 1)
self.y = 0 # residual
def update(self, z):
""" Update filter with new measurement `z`
Returns
-------
x : np.array
estimate for this time step (same as self.x)
"""
self.n += 1
# rename for readability
n = self.n
dt = self.dt
x = self.x
K = self.K
y = self.y
if self._order == 0:
K[0] = 1. / n
y = z - x
x[0] += K[0] * y
elif self._order == 1:
K[0] = 2. * (2*n - 1) / (n*(n + 1))
K[1] = 6. / (n*(n + 1)*dt)
y = z - x[0] - (dt * x[1])
x[0] += (K[0] * y) + (dt * x[1])
x[1] += (K[1] * y)
else:
den = n * (n+1) * (n+2)
K[0] = 3. * (3*n**2 - 3*n + 2) / den
K[1] = 18. * (2*n-1) / (den*dt)
K[2] = 60. / (den*dt**2)
y = z - x[0] - (dt * x[1]) - (0.5 * dt**2 * x[2])
x[0] += (K[0] * y) + (x[1] * dt) + (.5 * dt**2 * x[2])
x[1] += (K[1] * y) + (x[2] * dt)
x[2] += (K[2] * y)
return self.x
def errors(self):
"""
Computes and returns the error and standard deviation of the
filter at this time step.
Returns
-------
error : np.array size 1xorder+1
std : np.array size 1xorder+1
"""
n = self.n
dt = self.dt
order = self._order
sigma = self.sigma
error = np.zeros(order + 1)
std = np.zeros(order + 1)
if n == 0:
return (error, std)
if order == 0:
error[0] = sigma/sqrt(n)
std[0] = sigma/sqrt(n)
elif order == 1:
if n > 1:
error[0] = sigma * sqrt(2*(2*n-1) / (n*(n+1)))
error[1] = sigma * sqrt(12. / (n*(n*n-1)*dt*dt))
std[0] = sigma * sqrt((2*(2*n-1)) / (n*(n+1)))
std[1] = (sigma/dt) * sqrt(12. / (n*(n*n-1)))
elif order == 2:
dt2 = dt * dt
if n >= 3:
error[0] = sigma * sqrt(3*(3*n*n-3*n+2) / (n*(n+1)*(n+2)))
error[1] = sigma * sqrt(12*(16*n*n-30*n+11) /
(n*(n*n-1)*(n*n-4)*dt2))
error[2] = sigma * sqrt(720/(n*(n*n-1)*(n*n-4)*dt2*dt2))
std[0] = sigma * sqrt((3*(3*n*n - 3*n + 2)) / (n*(n+1)*(n+2)))
std[1] = (sigma/dt) * sqrt((12*(16*n*n - 30*n + 11)) /
(n*(n*n - 1)*(n*n - 4)))
std[2] = (sigma/dt2) * sqrt(720 / (n*(n*n-1)*(n*n-4)))
return error, std
def __repr__(self):
return '\n'.join([
'LeastSquaresFilter object',
pretty_str('dt', self.dt),
pretty_str('sigma', self.sigma),
pretty_str('_order', self._order),
pretty_str('x', self.x),
pretty_str('K', self.K)
])
|
from . import async_setup_auth
from tests.async_mock import patch
from tests.common import CLIENT_ID, CLIENT_REDIRECT_URI
async def test_fetch_auth_providers(hass, aiohttp_client):
"""Test fetching auth providers."""
client = await async_setup_auth(hass, aiohttp_client)
resp = await client.get("/auth/providers")
assert resp.status == 200
assert await resp.json() == [
{"name": "Example", "type": "insecure_example", "id": None}
]
async def test_fetch_auth_providers_onboarding(hass, aiohttp_client):
"""Test fetching auth providers."""
client = await async_setup_auth(hass, aiohttp_client)
with patch(
"homeassistant.components.onboarding.async_is_user_onboarded",
return_value=False,
):
resp = await client.get("/auth/providers")
assert resp.status == 400
assert await resp.json() == {
"message": "Onboarding not finished",
"code": "onboarding_required",
}
async def test_cannot_get_flows_in_progress(hass, aiohttp_client):
"""Test we cannot get flows in progress."""
client = await async_setup_auth(hass, aiohttp_client, [])
resp = await client.get("/auth/login_flow")
assert resp.status == 405
async def test_invalid_username_password(hass, aiohttp_client):
"""Test we cannot get flows in progress."""
client = await async_setup_auth(hass, aiohttp_client)
resp = await client.post(
"/auth/login_flow",
json={
"client_id": CLIENT_ID,
"handler": ["insecure_example", None],
"redirect_uri": CLIENT_REDIRECT_URI,
},
)
assert resp.status == 200
step = await resp.json()
# Incorrect username
resp = await client.post(
f"/auth/login_flow/{step['flow_id']}",
json={
"client_id": CLIENT_ID,
"username": "wrong-user",
"password": "test-pass",
},
)
assert resp.status == 200
step = await resp.json()
assert step["step_id"] == "init"
assert step["errors"]["base"] == "invalid_auth"
# Incorrect password
resp = await client.post(
f"/auth/login_flow/{step['flow_id']}",
json={
"client_id": CLIENT_ID,
"username": "test-user",
"password": "wrong-pass",
},
)
assert resp.status == 200
step = await resp.json()
assert step["step_id"] == "init"
assert step["errors"]["base"] == "invalid_auth"
async def test_login_exist_user(hass, aiohttp_client):
"""Test logging in with exist user."""
client = await async_setup_auth(hass, aiohttp_client, setup_api=True)
cred = await hass.auth.auth_providers[0].async_get_or_create_credentials(
{"username": "test-user"}
)
await hass.auth.async_get_or_create_user(cred)
resp = await client.post(
"/auth/login_flow",
json={
"client_id": CLIENT_ID,
"handler": ["insecure_example", None],
"redirect_uri": CLIENT_REDIRECT_URI,
},
)
assert resp.status == 200
step = await resp.json()
resp = await client.post(
f"/auth/login_flow/{step['flow_id']}",
json={"client_id": CLIENT_ID, "username": "test-user", "password": "test-pass"},
)
assert resp.status == 200
step = await resp.json()
assert step["type"] == "create_entry"
assert len(step["result"]) > 1
|
import contextlib
import os
import time
from itertools import chain
from tempfile import TemporaryDirectory
from radicale import pathutils, storage
from radicale.storage.multifilesystem.cache import CollectionCacheMixin
from radicale.storage.multifilesystem.create_collection import \
StorageCreateCollectionMixin
from radicale.storage.multifilesystem.delete import CollectionDeleteMixin
from radicale.storage.multifilesystem.discover import StorageDiscoverMixin
from radicale.storage.multifilesystem.get import CollectionGetMixin
from radicale.storage.multifilesystem.history import CollectionHistoryMixin
from radicale.storage.multifilesystem.lock import (CollectionLockMixin,
StorageLockMixin)
from radicale.storage.multifilesystem.meta import CollectionMetaMixin
from radicale.storage.multifilesystem.move import StorageMoveMixin
from radicale.storage.multifilesystem.sync import CollectionSyncMixin
from radicale.storage.multifilesystem.upload import CollectionUploadMixin
from radicale.storage.multifilesystem.verify import StorageVerifyMixin
class Collection(
CollectionCacheMixin, CollectionDeleteMixin, CollectionGetMixin,
CollectionHistoryMixin, CollectionLockMixin, CollectionMetaMixin,
CollectionSyncMixin, CollectionUploadMixin, storage.BaseCollection):
def __init__(self, storage_, path, filesystem_path=None):
self._storage = storage_
folder = self._storage._get_collection_root_folder()
# Path should already be sanitized
self._path = pathutils.strip_path(path)
self._encoding = self._storage.configuration.get("encoding", "stock")
if filesystem_path is None:
filesystem_path = pathutils.path_to_filesystem(folder, self.path)
self._filesystem_path = filesystem_path
self._etag_cache = None
super().__init__()
@property
def path(self):
return self._path
@contextlib.contextmanager
def _atomic_write(self, path, mode="w", newline=None):
parent_dir, name = os.path.split(path)
# Do not use mkstemp because it creates with permissions 0o600
with TemporaryDirectory(
prefix=".Radicale.tmp-", dir=parent_dir) as tmp_dir:
with open(os.path.join(tmp_dir, name), mode, newline=newline,
encoding=None if "b" in mode else self._encoding) as tmp:
yield tmp
tmp.flush()
self._storage._fsync(tmp)
os.replace(os.path.join(tmp_dir, name), path)
self._storage._sync_directory(parent_dir)
@property
def last_modified(self):
relevant_files = chain(
(self._filesystem_path,),
(self._props_path,) if os.path.exists(self._props_path) else (),
(os.path.join(self._filesystem_path, h) for h in self._list()))
last = max(map(os.path.getmtime, relevant_files))
return time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(last))
@property
def etag(self):
# reuse cached value if the storage is read-only
if self._storage._lock.locked == "w" or self._etag_cache is None:
self._etag_cache = super().etag
return self._etag_cache
class Storage(
StorageCreateCollectionMixin, StorageDiscoverMixin, StorageLockMixin,
StorageMoveMixin, StorageVerifyMixin, storage.BaseStorage):
_collection_class = Collection
def __init__(self, configuration):
super().__init__(configuration)
folder = configuration.get("storage", "filesystem_folder")
self._makedirs_synced(folder)
def _get_collection_root_folder(self):
filesystem_folder = self.configuration.get(
"storage", "filesystem_folder")
return os.path.join(filesystem_folder, "collection-root")
def _fsync(self, f):
if self.configuration.get("storage", "_filesystem_fsync"):
try:
pathutils.fsync(f.fileno())
except OSError as e:
raise RuntimeError("Fsync'ing file %r failed: %s" %
(f.name, e)) from e
def _sync_directory(self, path):
"""Sync directory to disk.
This only works on POSIX and does nothing on other systems.
"""
if not self.configuration.get("storage", "_filesystem_fsync"):
return
if os.name == "posix":
try:
fd = os.open(path, 0)
try:
pathutils.fsync(fd)
finally:
os.close(fd)
except OSError as e:
raise RuntimeError("Fsync'ing directory %r failed: %s" %
(path, e)) from e
def _makedirs_synced(self, filesystem_path):
"""Recursively create a directory and its parents in a sync'ed way.
This method acts silently when the folder already exists.
"""
if os.path.isdir(filesystem_path):
return
parent_filesystem_path = os.path.dirname(filesystem_path)
# Prevent infinite loop
if filesystem_path != parent_filesystem_path:
# Create parent dirs recursively
self._makedirs_synced(parent_filesystem_path)
# Possible race!
os.makedirs(filesystem_path, exist_ok=True)
self._sync_directory(parent_filesystem_path)
|
import logging
from basicmodem.basicmodem import BasicModem as bm
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_DEVICE,
CONF_NAME,
EVENT_HOMEASSISTANT_STOP,
STATE_IDLE,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "Modem CallerID"
ICON = "mdi:phone-classic"
DEFAULT_DEVICE = "/dev/ttyACM0"
STATE_RING = "ring"
STATE_CALLERID = "callerid"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_DEVICE, default=DEFAULT_DEVICE): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up modem caller ID sensor platform."""
name = config.get(CONF_NAME)
port = config.get(CONF_DEVICE)
modem = bm(port)
if modem.state == modem.STATE_FAILED:
_LOGGER.error("Unable to initialize modem")
return
add_entities([ModemCalleridSensor(hass, name, port, modem)])
class ModemCalleridSensor(Entity):
"""Implementation of USB modem caller ID sensor."""
def __init__(self, hass, name, port, modem):
"""Initialize the sensor."""
self._attributes = {"cid_time": 0, "cid_number": "", "cid_name": ""}
self._name = name
self.port = port
self.modem = modem
self._state = STATE_IDLE
modem.registercallback(self._incomingcallcallback)
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, self._stop_modem)
def set_state(self, state):
"""Set the state."""
self._state = state
def set_attributes(self, attributes):
"""Set the state attributes."""
self._attributes = attributes
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def icon(self):
"""Return icon."""
return ICON
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def device_state_attributes(self):
"""Return the state attributes."""
return self._attributes
def _stop_modem(self, event):
"""HA is shutting down, close modem port."""
if self.modem:
self.modem.close()
self.modem = None
def _incomingcallcallback(self, newstate):
"""Handle new states."""
if newstate == self.modem.STATE_RING:
if self.state == self.modem.STATE_IDLE:
att = {
"cid_time": self.modem.get_cidtime,
"cid_number": "",
"cid_name": "",
}
self.set_attributes(att)
self._state = STATE_RING
self.schedule_update_ha_state()
elif newstate == self.modem.STATE_CALLERID:
att = {
"cid_time": self.modem.get_cidtime,
"cid_number": self.modem.get_cidnumber,
"cid_name": self.modem.get_cidname,
}
self.set_attributes(att)
self._state = STATE_CALLERID
self.schedule_update_ha_state()
elif newstate == self.modem.STATE_IDLE:
self._state = STATE_IDLE
self.schedule_update_ha_state()
|
import numpy as np
from scipy import sparse
from ..utils import _validate_type, _check_option
from ..utils.check import int_like
def combine_adjacency(*structure):
"""Create a sparse binary adjacency/neighbors matrix.
Parameters
----------
*structure : list
The adjacency along each dimension. Each entry can be:
- ndarray or sparse matrix
A square binary adjacency matrix for the given dimension.
- int
The number of elements along the given dimension. A lattice
adjacency will be generated.
Returns
-------
adjacency : scipy.sparse.coo_matrix, shape (n_features, n_features)
The adjacency matrix.
"""
structure = list(structure)
for di, dim in enumerate(structure):
name = f'structure[{di}]'
_validate_type(dim, ('int-like', np.ndarray, sparse.spmatrix), name)
if isinstance(dim, int_like):
dim = int(dim)
# Don't add the diagonal, because we explicitly remove it later:
# dim = sparse.eye(dim, format='coo')
# dim += sparse.eye(dim.shape[0], k=1, format='coo')
# dim += sparse.eye(dim.shape[0], k=-1, format='coo')
ii, jj = np.arange(0, dim - 1), np.arange(1, dim)
edges = np.vstack([np.hstack([ii, jj]), np.hstack([jj, ii])])
dim = sparse.coo_matrix(
(np.ones(edges.shape[1]), edges), (dim, dim), float)
else:
_check_option(f'{name}.ndim', dim.ndim, [2])
if dim.shape[0] != dim.shape[1]:
raise ValueError(
f'{name} must be square, got shape {dim.shape}')
if not isinstance(dim, sparse.coo_matrix):
dim = sparse.coo_matrix(dim)
else:
dim = dim.copy()
dim.data[dim.row == dim.col] = 0. # remove diagonal, will add later
dim.eliminate_zeros()
if not (dim.data == 1).all():
raise ValueError('All adjacency values must be 0 or 1')
structure[di] = dim
# list of coo
assert all(isinstance(dim, sparse.coo_matrix) for dim in structure)
shape = np.array([d.shape[0] for d in structure], int)
n_others = np.array([np.prod(np.concatenate([shape[:di], shape[di + 1:]]))
for di in range(len(structure))], int)
n_each = np.array([dim.data.size for dim in structure], int) * n_others
n_off = n_each.sum() # off-diagonal terms
n_diag = np.prod(shape)
vertices = np.arange(n_diag).reshape(shape)
edges = np.empty((2, n_off + n_diag), int)
used = np.zeros(n_off, bool)
weights = np.empty(n_off + n_diag, float) # even though just 0/1
offset = 0
for di, dim in enumerate(structure):
s_l = [slice(None)] * len(shape)
s_r = [slice(None)] * len(shape)
s_l[di] = dim.row
s_r[di] = dim.col
assert dim.row.shape == dim.col.shape == dim.data.shape
sl = slice(offset, offset + n_each[di])
edges[:, sl] = [vertices[tuple(s_l)].ravel(),
vertices[tuple(s_r)].ravel()]
weights[sl] = np.tile(dim.data, n_others[di])
offset += n_each[di]
assert not used[sl].any()
used[sl] = True
assert used.all()
# Handle the diagonal separately at the end to avoid duplicate entries
edges[:, n_off:] = vertices.ravel()
weights[n_off:] = 1.
graph = sparse.coo_matrix((weights, edges),
(vertices.size, vertices.size))
return graph
|
import asyncio
from homeassistant import config_entries, setup
from homeassistant.components.hlk_sw16.const import DOMAIN
from tests.async_mock import patch
class MockSW16Client:
"""Class to mock the SW16Client client."""
def __init__(self, fail):
"""Initialise client with failure modes."""
self.fail = fail
self.disconnect_callback = None
self.in_transaction = False
self.active_transaction = None
async def setup(self):
"""Mock successful setup."""
fut = asyncio.Future()
fut.set_result(True)
return fut
async def status(self):
"""Mock status based on failure mode."""
self.in_transaction = True
self.active_transaction = asyncio.Future()
if self.fail:
if self.disconnect_callback:
self.disconnect_callback()
return await self.active_transaction
else:
self.active_transaction.set_result(True)
return self.active_transaction
def stop(self):
"""Mock client stop."""
self.in_transaction = False
self.active_transaction = None
async def create_mock_hlk_sw16_connection(fail):
"""Create a mock HLK-SW16 client."""
client = MockSW16Client(fail)
await client.setup()
return client
async def test_form(hass):
"""Test we get the form."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
conf = {
"host": "127.0.0.1",
"port": 8080,
}
mock_hlk_sw16_connection = await create_mock_hlk_sw16_connection(False)
with patch(
"homeassistant.components.hlk_sw16.config_flow.create_hlk_sw16_connection",
return_value=mock_hlk_sw16_connection,
), patch(
"homeassistant.components.hlk_sw16.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.hlk_sw16.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
conf,
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "127.0.0.1:8080"
assert result2["data"] == {
"host": "127.0.0.1",
"port": 8080,
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
mock_hlk_sw16_connection = await create_mock_hlk_sw16_connection(False)
with patch(
"homeassistant.components.hlk_sw16.config_flow.create_hlk_sw16_connection",
return_value=mock_hlk_sw16_connection,
):
result3 = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result3["type"] == "form"
assert result3["errors"] == {}
result4 = await hass.config_entries.flow.async_configure(
result3["flow_id"],
conf,
)
assert result4["type"] == "form"
assert result4["errors"] == {"base": "already_configured"}
await hass.async_block_till_done()
async def test_import(hass):
"""Test we get the form."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_IMPORT}
)
assert result["type"] == "form"
assert result["errors"] == {}
conf = {
"host": "127.0.0.1",
"port": 8080,
}
mock_hlk_sw16_connection = await create_mock_hlk_sw16_connection(False)
with patch(
"homeassistant.components.hlk_sw16.config_flow.connect_client",
return_value=mock_hlk_sw16_connection,
), patch(
"homeassistant.components.hlk_sw16.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.hlk_sw16.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
conf,
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "127.0.0.1:8080"
assert result2["data"] == {
"host": "127.0.0.1",
"port": 8080,
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_invalid_data(hass):
"""Test we handle invalid auth."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
mock_hlk_sw16_connection = await create_mock_hlk_sw16_connection(True)
conf = {
"host": "127.0.0.1",
"port": 8080,
}
with patch(
"homeassistant.components.hlk_sw16.config_flow.connect_client",
return_value=mock_hlk_sw16_connection,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
conf,
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "cannot_connect"}
async def test_form_cannot_connect(hass):
"""Test we handle cannot connect error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
conf = {
"host": "127.0.0.1",
"port": 8080,
}
with patch(
"homeassistant.components.hlk_sw16.config_flow.connect_client",
side_effect=asyncio.TimeoutError,
return_value=None,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
conf,
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "cannot_connect"}
|
from django.db import migrations, models
can_use_postgres_fields = False
chapters_field = models.Field() # Dummy field
try:
from django.contrib.postgres.fields import ArrayField, JSONField
chapters_field = ArrayField(base_field=models.CharField(max_length=100), default=list, size=None)
data_field = JSONField(null=True)
can_use_postgres_fields = True
except ImportError:
# We can't use ArrayField if psycopg2 is not installed
pass
class Migration(migrations.Migration):
dependencies = [
('core', '0003_withfloatfield'),
]
operations = []
pg_only_operations = [
migrations.CreateModel(
name='BookWithChapters',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100, verbose_name='Book name')),
('chapters', chapters_field),
('data', data_field)
],
),
]
def apply(self, project_state, schema_editor, collect_sql=False):
if can_use_postgres_fields and schema_editor.connection.vendor.startswith("postgres"):
self.operations = self.operations + self.pg_only_operations
return super().apply(project_state, schema_editor, collect_sql)
|
try:
from unittest.mock import Mock, patch, MagicMock, create_autospec, sentinel, call
except:
from mock import Mock, patch, MagicMock, create_autospec, sentinel, call
from datetime import datetime as dt
import numpy as np
import pandas as pd
import pytest
from dateutil.rrule import rrule, DAILY
from mockextras import when
from pandas.util.testing import assert_frame_equal
from arctic.date import DateRange, mktz
from arctic.exceptions import OverlappingDataException
from arctic.exceptions import UnhandledDtypeException
from arctic.tickstore.tickstore import TickStore
from arctic.tickstore.toplevel import TopLevelTickStore, TickStoreLibrary
utc = mktz('UTC')
def test_raise_exception_if_daterange_is_not_provided():
store = TopLevelTickStore(Mock())
with pytest.raises(Exception) as e:
store._get_library_metadata(None)
assert "A date range must be provided" in str(e.value)
def test_raise_exception_if_date_range_does_not_contain_start_date():
store = TopLevelTickStore(Mock())
dr = DateRange(start=None, end=dt(2011, 1, 1))
with pytest.raises(Exception) as e:
store._get_library_metadata(dr)
assert "The date range {0} must contain a start and end date".format(dr) in str(e.value)
def test_raise_exception_if_date_range_does_not_contain_end_date():
store = TopLevelTickStore(Mock())
dr = DateRange(start=dt(2011, 1, 1), end=None)
with pytest.raises(Exception) as e:
store._get_library_metadata(dr)
assert "The date range {0} must contain a start and end date".format(dr) in str(e.value)
def test_raise_exception_if_date_range_does_not_contain_start_and_end_date():
store = TopLevelTickStore(Mock())
dr = DateRange(start=None, end=None)
with pytest.raises(Exception) as e:
store._get_library_metadata(dr)
assert "The date range {0} must contain a start and end date".format(dr) in str(e.value)
def test_raise_exception_and_log_an_error_if_an_invalid_library_name_is_added():
arctic_lib = MagicMock()
arctic_lib.arctic.__getitem__.side_effect = Exception()
store = TopLevelTickStore(arctic_lib)
with patch("arctic.tickstore.toplevel.logger") as mock_logger:
with pytest.raises(Exception):
store.add(None, "blah")
mock_logger.error.assert_called_once_with("Could not load library")
def test_raise_exception_if_date_range_overlaps():
self = create_autospec(TopLevelTickStore, _arctic_lib=MagicMock())
self._get_library_metadata.return_value = [TickStoreLibrary('lib1', None), ]
with pytest.raises(OverlappingDataException) as e:
TopLevelTickStore.add(self, DateRange(start=dt(2010, 1, 1), end=dt(2011, 1, 1, 23, 59, 59, 999000)), "blah")
assert "There are libraries that overlap with the date range:" in str(e.value)
@pytest.mark.parametrize(('start', 'end', 'expected_start', 'expected_end'),
[(dt(2010, 1, 1, tzinfo=mktz('UTC')), dt(2010, 12, 31, 23, 59, 59, 999000, tzinfo=mktz('UTC')),
dt(2010, 1, 1, tzinfo=mktz('UTC')), dt(2010, 12, 31, 23, 59, 59, 999000, tzinfo=mktz('UTC'))),
(dt(2010, 1, 1), dt(2010, 12, 31, 23, 59, 59, 999000), dt(2010, 1, 1, tzinfo=mktz('UTC')),
dt(2010, 12, 31, 23, 59, 59, 999000, tzinfo=mktz('UTC'))),
(dt(2009, 12, 31, 19, tzinfo=mktz('America/New_York')), dt(2010, 12, 31, 18, 59, 59, 999000, tzinfo=mktz('America/New_York')),
dt(2010, 1, 1, tzinfo=mktz('UTC')), dt(2010, 12, 31, 23, 59, 59, 999000, tzinfo=mktz('UTC')))
])
def test_add_library_to_colllection_if_date_range_is_on_UTC_or_naive_day_boundaries(start, end, expected_start, expected_end):
self = create_autospec(TopLevelTickStore, _arctic_lib=MagicMock(), _collection=MagicMock())
self._get_library_metadata.return_value = []
TopLevelTickStore.add(self, DateRange(start=start, end=end), "blah")
self._collection.update_one.assert_called_once_with({'library_name': "blah"},
{'$set':
{'start': expected_start,
'end': expected_end}}, upsert=True)
@pytest.mark.parametrize(('start', 'end'),
[(dt(2010, 1, 1, 2, tzinfo=mktz('UTC')), dt(2011, 1, 1, tzinfo=mktz('UTC'))),
(dt(2010, 1, 1, tzinfo=mktz('UTC')), dt(2011, 1, 1, 2, tzinfo=mktz('UTC'))),
(dt(2010, 1, 1, 2, tzinfo=mktz('UTC')), dt(2011, 1, 1, 2, tzinfo=mktz('UTC'))),
(dt(2010, 1, 1, 2), dt(2011, 1, 1)),
(dt(2010, 1, 1), dt(2011, 1, 1, 2)),
(dt(2010, 1, 1, 2), dt(2011, 1, 1, 2)),
(dt(2009, 12, 31, 21, 10, tzinfo=mktz('America/New_York')), dt(2010, 12, 31, tzinfo=mktz('America/New_York'))),
(dt(2009, 12, 31, tzinfo=mktz('America/New_York')), dt(2010, 12, 31, tzinfo=mktz('America/New_York'))),
(dt(2009, 12, 31, 21, 10, tzinfo=mktz('America/New_York')), dt(2010, 12, 31, 9, 21, tzinfo=mktz('America/New_York')))
])
def test_raise_error_add_library_is_called_with_a_date_range_not_on_day_boundaries(start, end):
with pytest.raises(AssertionError) as e:
self = create_autospec(TopLevelTickStore, _arctic_lib=MagicMock(), _collection=MagicMock())
self._get_library_metadata.return_value = []
TopLevelTickStore.add(self, DateRange(start=start, end=end), "blah")
assert "Date range should fall on UTC day boundaries" in str(e.value)
@pytest.mark.parametrize(('start', 'end', 'expected_start_index', 'expected_end_index'),
[(dt(2010, 1, 1), dt(2010, 1, 5), 0, 3),
(dt(2010, 1, 1), dt(2010, 1, 6), 0, 3),
(dt(2010, 1, 1, 1), dt(2010, 1, 6), 1, 3),
(dt(2010, 1, 1, 1), dt(2010, 1, 4, 2), 1, 2),
(dt(2009, 1, 1), dt(2010, 1, 5), 0, 3),
])
def test_slice_pandas_dataframe(start, end, expected_start_index, expected_end_index):
top_level_tick_store = TopLevelTickStore(Mock())
dates = pd.date_range('20100101', periods=5, freq='2D')
data = pd.DataFrame(np.random.randn(5, 4), index=dates, columns=list('ABCD'))
expected = data.iloc[expected_start_index:expected_end_index]
result = top_level_tick_store._slice(data, start, end)
assert_frame_equal(expected, result), '{}\n{}'.format(expected, result)
@pytest.mark.parametrize(('start', 'end', 'expected_start_index', 'expected_end_index'),
[(dt(2010, 1, 1, tzinfo=utc), dt(2010, 1, 5, tzinfo=utc), 0, 3),
(dt(2010, 1, 1, tzinfo=utc), dt(2010, 1, 6, tzinfo=utc), 0, 3),
(dt(2010, 1, 1, 1, tzinfo=utc), dt(2010, 1, 6, tzinfo=utc), 1, 3),
(dt(2010, 1, 1, 1, tzinfo=utc), dt(2010, 1, 4, 2, tzinfo=utc), 1, 2),
(dt(2009, 1, 1, tzinfo=utc), dt(2010, 1, 5, tzinfo=utc), 0, 3),
])
def test_slice_list_of_dicts(start, end, expected_start_index, expected_end_index):
top_level_tick_store = TopLevelTickStore(Mock())
dates = list(rrule(DAILY, count=5, dtstart=dt(2010, 1, 1, tzinfo=utc), interval=2))
data = [{'index': date, 'A': val} for date, val in zip(dates, range(5))]
expected = data[expected_start_index:expected_end_index]
result = top_level_tick_store._slice(data, start, end)
assert expected == result
def test_write_pandas_data_to_right_libraries():
self = create_autospec(TopLevelTickStore, _arctic_lib=MagicMock(), _collection=MagicMock())
self._collection.find.return_value = [{'library_name': sentinel.libname1, 'start': sentinel.st1, 'end': sentinel.end1},
{'library_name': sentinel.libname2, 'start': sentinel.st2, 'end': sentinel.end2}]
slice1 = range(2)
slice2 = range(4)
when(self._slice).called_with(sentinel.data, sentinel.st1, sentinel.end1).then(slice1)
when(self._slice).called_with(sentinel.data, sentinel.st2, sentinel.end2).then(slice2)
mock_lib1 = Mock()
mock_lib2 = Mock()
when(self._arctic_lib.arctic.__getitem__).called_with(sentinel.libname1).then(mock_lib1)
when(self._arctic_lib.arctic.__getitem__).called_with(sentinel.libname2).then(mock_lib2)
with patch("arctic.tickstore.toplevel.to_dt") as patch_to_dt:
patch_to_dt.side_effect = [sentinel.st1, sentinel.end1, sentinel.st2, sentinel.end2]
TopLevelTickStore.write(self, 'blah', sentinel.data)
mock_lib1.write.assert_called_once_with('blah', slice1)
mock_lib2.write.assert_called_once_with('blah', slice2)
def test_read():
self = create_autospec(TopLevelTickStore)
tsl = TickStoreLibrary(create_autospec(TickStore), create_autospec(DateRange))
self._get_libraries.return_value = [tsl, tsl]
dr = create_autospec(DateRange)
with patch('pandas.concat') as concat:
res = TopLevelTickStore.read(self, sentinel.symbol, dr,
columns=sentinel.include_columns,
include_images=sentinel.include_images)
assert concat.call_args_list == [call([tsl.library.read.return_value,
tsl.library.read.return_value])]
assert res == concat.return_value
assert tsl.library.read.call_args_list == [call(sentinel.symbol, tsl.date_range.intersection.return_value,
sentinel.include_columns, include_images=sentinel.include_images),
call(sentinel.symbol, tsl.date_range.intersection.return_value,
sentinel.include_columns, include_images=sentinel.include_images)]
def test_slice_raises():
m = TopLevelTickStore(Mock())
with pytest.raises(UnhandledDtypeException) as e:
m._slice("abc", 1, 2)
assert("Can't persist type" in str(e.value))
|
from functools import partial
from itertools import chain
import json
import re
from typing import Dict
import voluptuous as vol
from voluptuous.humanize import humanize_error
import homeassistant.helpers.config_validation as cv
from homeassistant.util import slugify
from script.translations import upload
from .model import Config, Integration
UNDEFINED = 0
REQUIRED = 1
REMOVED = 2
RE_REFERENCE = r"\[\%key:(.+)\%\]"
REMOVED_TITLE_MSG = (
"config.title key has been moved out of config and into the root of strings.json. "
"Starting Home Assistant 0.109 you only need to define this key in the root "
"if the title needs to be different than the name of your integration in the "
"manifest."
)
MOVED_TRANSLATIONS_DIRECTORY_MSG = (
"The '.translations' directory has been moved, the new name is 'translations', "
"starting with Home Assistant 0.112 your translations will no longer "
"load if you do not move/rename this "
)
def check_translations_directory_name(integration: Integration) -> None:
"""Check that the correct name is used for the translations directory."""
legacy_translations = integration.path / ".translations"
translations = integration.path / "translations"
if translations.is_dir():
# No action required
return
if legacy_translations.is_dir():
integration.add_error("translations", MOVED_TRANSLATIONS_DIRECTORY_MSG)
def find_references(strings, prefix, found):
"""Find references."""
for key, value in strings.items():
if isinstance(value, dict):
find_references(value, f"{prefix}::{key}", found)
continue
match = re.match(RE_REFERENCE, value)
if match:
found.append({"source": f"{prefix}::{key}", "ref": match.groups()[0]})
def removed_title_validator(config, integration, value):
"""Mark removed title."""
if not config.specific_integrations:
raise vol.Invalid(REMOVED_TITLE_MSG)
# Don't mark it as an error yet for custom components to allow backwards compat.
integration.add_warning("translations", REMOVED_TITLE_MSG)
return value
def lowercase_validator(value):
"""Validate value is lowercase."""
if value.lower() != value:
raise vol.Invalid("Needs to be lowercase")
return value
def gen_data_entry_schema(
*,
config: Config,
integration: Integration,
flow_title: int,
require_step_title: bool,
):
"""Generate a data entry schema."""
step_title_class = vol.Required if require_step_title else vol.Optional
schema = {
vol.Optional("flow_title"): cv.string_with_no_html,
vol.Required("step"): {
str: {
step_title_class("title"): cv.string_with_no_html,
vol.Optional("description"): cv.string_with_no_html,
vol.Optional("data"): {str: cv.string_with_no_html},
}
},
vol.Optional("error"): {str: cv.string_with_no_html},
vol.Optional("abort"): {str: cv.string_with_no_html},
vol.Optional("create_entry"): {str: cv.string_with_no_html},
}
if flow_title == REQUIRED:
schema[vol.Required("title")] = cv.string_with_no_html
elif flow_title == REMOVED:
schema[vol.Optional("title", msg=REMOVED_TITLE_MSG)] = partial(
removed_title_validator, config, integration
)
return schema
def gen_strings_schema(config: Config, integration: Integration):
"""Generate a strings schema."""
return vol.Schema(
{
vol.Optional("title"): cv.string_with_no_html,
vol.Optional("config"): gen_data_entry_schema(
config=config,
integration=integration,
flow_title=REMOVED,
require_step_title=False,
),
vol.Optional("options"): gen_data_entry_schema(
config=config,
integration=integration,
flow_title=UNDEFINED,
require_step_title=False,
),
vol.Optional("device_automation"): {
vol.Optional("action_type"): {str: cv.string_with_no_html},
vol.Optional("condition_type"): {str: cv.string_with_no_html},
vol.Optional("trigger_type"): {str: cv.string_with_no_html},
vol.Optional("trigger_subtype"): {str: cv.string_with_no_html},
},
vol.Optional("state"): cv.schema_with_slug_keys(
cv.schema_with_slug_keys(str, slug_validator=lowercase_validator),
slug_validator=vol.Any("_", cv.slug),
),
}
)
def gen_auth_schema(config: Config, integration: Integration):
"""Generate auth schema."""
return vol.Schema(
{
vol.Optional("mfa_setup"): {
str: gen_data_entry_schema(
config=config,
integration=integration,
flow_title=REQUIRED,
require_step_title=True,
)
}
}
)
def gen_platform_strings_schema(config: Config, integration: Integration):
"""Generate platform strings schema like strings.sensor.json.
Example of valid data:
{
"state": {
"moon__phase": {
"full": "Full"
}
}
}
"""
def device_class_validator(value):
"""Key validator for platorm states.
Platform states are only allowed to provide states for device classes they prefix.
"""
if not value.startswith(f"{integration.domain}__"):
raise vol.Invalid(
f"Device class need to start with '{integration.domain}__'. Key {value} is invalid. See https://developers.home-assistant.io/docs/internationalization/core#stringssensorjson"
)
slug_friendly = value.replace("__", "_", 1)
slugged = slugify(slug_friendly)
if slug_friendly != slugged:
raise vol.Invalid(
f"invalid device class {value}. After domain__, needs to be all lowercase, no spaces."
)
return value
return vol.Schema(
{
vol.Optional("state"): cv.schema_with_slug_keys(
cv.schema_with_slug_keys(str, slug_validator=lowercase_validator),
slug_validator=device_class_validator,
)
}
)
ONBOARDING_SCHEMA = vol.Schema({vol.Required("area"): {str: cv.string_with_no_html}})
def validate_translation_file(config: Config, integration: Integration, all_strings):
"""Validate translation files for integration."""
if config.specific_integrations:
check_translations_directory_name(integration)
strings_files = [integration.path / "strings.json"]
# Also validate translations for custom integrations
if config.specific_integrations:
# Only English needs to be always complete
strings_files.append(integration.path / "translations/en.json")
references = []
if integration.domain == "auth":
strings_schema = gen_auth_schema(config, integration)
elif integration.domain == "onboarding":
strings_schema = ONBOARDING_SCHEMA
else:
strings_schema = gen_strings_schema(config, integration)
for strings_file in strings_files:
if not strings_file.is_file():
continue
name = str(strings_file.relative_to(integration.path))
try:
strings = json.loads(strings_file.read_text())
except ValueError as err:
integration.add_error("translations", f"Invalid JSON in {name}: {err}")
continue
try:
strings_schema(strings)
except vol.Invalid as err:
integration.add_error(
"translations", f"Invalid {name}: {humanize_error(strings, err)}"
)
else:
if strings_file.name == "strings.json":
find_references(strings, name, references)
platform_string_schema = gen_platform_strings_schema(config, integration)
platform_strings = [integration.path.glob("strings.*.json")]
if config.specific_integrations:
platform_strings.append(integration.path.glob("translations/*.en.json"))
for path in chain(*platform_strings):
name = str(path.relative_to(integration.path))
try:
strings = json.loads(path.read_text())
except ValueError as err:
integration.add_error("translations", f"Invalid JSON in {name}: {err}")
continue
try:
platform_string_schema(strings)
except vol.Invalid as err:
msg = f"Invalid {path.name}: {humanize_error(strings, err)}"
if config.specific_integrations:
integration.add_warning("translations", msg)
else:
integration.add_error("translations", msg)
else:
find_references(strings, path.name, references)
if config.specific_integrations:
return
# Validate references
for reference in references:
parts = reference["ref"].split("::")
search = all_strings
key = parts.pop(0)
while parts and key in search:
search = search[key]
key = parts.pop(0)
if parts or key not in search:
integration.add_error(
"translations",
f"{reference['source']} contains invalid reference {reference['ref']}: Could not find {key}",
)
def validate(integrations: Dict[str, Integration], config: Config):
"""Handle JSON files inside integrations."""
if config.specific_integrations:
all_strings = None
else:
all_strings = upload.generate_upload_data()
for integration in integrations.values():
validate_translation_file(config, integration, all_strings)
|
from django.test import TestCase
from zinnia.preview import HTMLPreview
class HTMLPreviewTestCase(TestCase):
def test_splitters(self):
text = '<p>Hello World</p><!-- more --><p>Hello dude</p>'
preview = HTMLPreview(text, splitters=['<!--more-->'],
max_words=1000, more_string=' ...')
self.assertEqual(str(preview), text)
preview = HTMLPreview(text, splitters=['<!--more-->',
'<!-- more -->'],
max_words=1000, more_string=' ...')
self.assertEqual(str(preview), '<p>Hello World ...</p>')
def test_truncate(self):
text = '<p>Hello World</p><p>Hello dude</p>'
preview = HTMLPreview(text, splitters=[],
max_words=2, more_string=' ...')
self.assertEqual(str(preview), '<p>Hello World ...</p>')
def test_has_more(self):
text = '<p>Hello World</p><p>Hello dude</p>'
preview = HTMLPreview(text, splitters=[],
max_words=2, more_string=' ...')
self.assertEqual(preview.has_more, True)
preview = HTMLPreview(text, splitters=[],
max_words=4, more_string=' ...')
self.assertEqual(preview.has_more, False)
def test_has_more_with_long_more_text(self):
text = '<p>Hello the World</p>'
preview = HTMLPreview(text, splitters=[],
max_words=2, more_string=' .........')
self.assertEqual(str(preview), '<p>Hello the .........</p>')
self.assertEqual(preview.has_more, True)
def test_has_more_with_lead(self):
text = '<p>Hello the World</p>'
lead = '<p>Lead paragraph</p>'
preview = HTMLPreview(text, lead)
self.assertEqual(str(preview), '<p>Lead paragraph</p>')
self.assertEqual(preview.has_more, True)
preview = HTMLPreview('', lead)
self.assertEqual(str(preview), '<p>Lead paragraph</p>')
self.assertEqual(preview.has_more, False)
def test_str_non_ascii_issue_314(self):
text = '<p>тест non ascii</p>'
preview = HTMLPreview(text, splitters=[],
max_words=2, more_string=' ...')
self.assertEqual(str(preview), '<p>тест non ...</p>')
def test_metrics(self):
text = '<p>Hello World</p> <p>Hello dude</p>'
preview = HTMLPreview(text, splitters=[],
max_words=2, more_string=' ...')
self.assertEqual(preview.total_words, 4)
self.assertEqual(preview.displayed_words, 2)
self.assertEqual(preview.remaining_words, 2)
self.assertEqual(preview.displayed_percent, 50.0)
self.assertEqual(preview.remaining_percent, 50.0)
def test_metrics_with_lead(self):
text = '<p>Hello World</p> <p>Hello dude</p>'
lead = '<p>Lead paragraph</p>'
preview = HTMLPreview(text, lead, splitters=[],
max_words=2, more_string=' ...')
self.assertEqual(preview.total_words, 6)
self.assertEqual(preview.displayed_words, 2)
self.assertEqual(preview.remaining_words, 4)
self.assertEqual('%.2f' % preview.displayed_percent, '33.33')
self.assertEqual('%.2f' % preview.remaining_percent, '66.67')
def test_empty_text(self):
preview = HTMLPreview('')
self.assertEqual(str(preview), '')
self.assertEqual(preview.has_more, False)
preview = HTMLPreview('', '')
self.assertEqual(str(preview), '')
self.assertEqual(preview.has_more, False)
|
import unittest
import numpy as np
from chainer import testing
from chainercv.transforms import center_crop
class TestCenterCrop(unittest.TestCase):
def test_center_crop(self):
img = np.random.uniform(size=(3, 48, 32))
out, param = center_crop(img, (24, 16), return_param=True)
y_slice = param['y_slice']
x_slice = param['x_slice']
np.testing.assert_equal(out, img[:, y_slice, x_slice])
self.assertEqual(y_slice, slice(12, 36))
self.assertEqual(x_slice, slice(8, 24))
testing.run_module(__name__, __file__)
|
import types
import pytest
from qutebrowser.extensions import loader
from qutebrowser.misc import objects
pytestmark = pytest.mark.usefixtures('data_tmpdir', 'config_tmpdir',
'fake_args')
def test_on_walk_error():
with pytest.raises(ImportError, match='Failed to import foo'):
loader._on_walk_error('foo')
def test_walk_normal():
names = [info.name for info in loader._walk_normal()]
assert 'qutebrowser.components.scrollcommands' in names
def test_walk_pyinstaller():
# We can't test whether we get something back without being frozen by
# PyInstaller, but at least we can test that we don't crash.
list(loader._walk_pyinstaller())
def test_load_component(monkeypatch):
monkeypatch.setattr(objects, 'commands', {})
info = loader.ExtensionInfo(name='qutebrowser.components.scrollcommands')
mod = loader._load_component(info, skip_hooks=True)
assert hasattr(mod, 'scroll_to_perc')
assert 'scroll-to-perc' in objects.commands
@pytest.fixture
def module(monkeypatch, request):
mod = types.ModuleType('testmodule')
monkeypatch.setattr(loader, '_module_infos', [])
monkeypatch.setattr(loader.importlib, 'import_module',
lambda _name: mod)
mod.info = loader.add_module_info(mod)
return mod
def test_get_init_context(data_tmpdir, config_tmpdir, fake_args):
ctx = loader._get_init_context()
assert str(ctx.data_dir) == data_tmpdir
assert str(ctx.config_dir) == config_tmpdir
assert ctx.args == fake_args
def test_add_module_info():
# pylint: disable=no-member
mod = types.ModuleType('testmodule')
info1 = loader.add_module_info(mod)
assert mod.__qute_module_info is info1
info2 = loader.add_module_info(mod)
assert mod.__qute_module_info is info1
assert info2 is info1
class _Hook:
"""Hook to use in tests."""
__name__ = '_Hook'
def __init__(self):
self.called = False
self.raising = False
def __call__(self, *args):
if self.raising:
raise Exception("Should not be called!")
self.called = True
@pytest.fixture
def hook():
return _Hook()
def test_skip_hooks(hook, module):
hook.raising = True
module.info.init_hook = hook
module.info.config_changed_hooks = [(None, hook)]
info = loader.ExtensionInfo(name='testmodule')
loader._load_component(info, skip_hooks=True)
loader._on_config_changed('test')
assert not hook.called
@pytest.mark.parametrize('option_filter, option, called', [
(None, 'content.javascript.enabled', True),
('content.javascript', 'content.javascript.enabled', True),
('content.javascript.enabled', 'content.javascript.enabled', True),
('content.javascript.log', 'content.javascript.enabled', False),
])
def test_on_config_changed(configdata_init, hook, module,
option_filter, option, called):
module.info.config_changed_hooks = [(option_filter, hook)]
info = loader.ExtensionInfo(name='testmodule')
loader._load_component(info)
loader._on_config_changed(option)
assert hook.called == called
def test_init_hook(hook, module):
module.info.init_hook = hook
info = loader.ExtensionInfo(name='testmodule')
loader._load_component(info)
assert hook.called
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
from nets import resnet_utils
from nets import resnet_v2
slim = tf.contrib.slim
def create_test_input(batch_size, height, width, channels):
"""Create test input tensor.
Args:
batch_size: The number of images per batch or `None` if unknown.
height: The height of each image or `None` if unknown.
width: The width of each image or `None` if unknown.
channels: The number of channels per image or `None` if unknown.
Returns:
Either a placeholder `Tensor` of dimension
[batch_size, height, width, channels] if any of the inputs are `None` or a
constant `Tensor` with the mesh grid values along the spatial dimensions.
"""
if None in [batch_size, height, width, channels]:
return tf.placeholder(tf.float32, (batch_size, height, width, channels))
else:
return tf.to_float(
np.tile(
np.reshape(
np.reshape(np.arange(height), [height, 1]) +
np.reshape(np.arange(width), [1, width]),
[1, height, width, 1]),
[batch_size, 1, 1, channels]))
class ResnetUtilsTest(tf.test.TestCase):
def testSubsampleThreeByThree(self):
x = tf.reshape(tf.to_float(tf.range(9)), [1, 3, 3, 1])
x = resnet_utils.subsample(x, 2)
expected = tf.reshape(tf.constant([0, 2, 6, 8]), [1, 2, 2, 1])
with self.test_session():
self.assertAllClose(x.eval(), expected.eval())
def testSubsampleFourByFour(self):
x = tf.reshape(tf.to_float(tf.range(16)), [1, 4, 4, 1])
x = resnet_utils.subsample(x, 2)
expected = tf.reshape(tf.constant([0, 2, 8, 10]), [1, 2, 2, 1])
with self.test_session():
self.assertAllClose(x.eval(), expected.eval())
def testConv2DSameEven(self):
n, n2 = 4, 2
# Input image.
x = create_test_input(1, n, n, 1)
# Convolution kernel.
w = create_test_input(1, 3, 3, 1)
w = tf.reshape(w, [3, 3, 1, 1])
tf.get_variable('Conv/weights', initializer=w)
tf.get_variable('Conv/biases', initializer=tf.zeros([1]))
tf.get_variable_scope().reuse_variables()
y1 = slim.conv2d(x, 1, [3, 3], stride=1, scope='Conv')
y1_expected = tf.to_float([[14, 28, 43, 26],
[28, 48, 66, 37],
[43, 66, 84, 46],
[26, 37, 46, 22]])
y1_expected = tf.reshape(y1_expected, [1, n, n, 1])
y2 = resnet_utils.subsample(y1, 2)
y2_expected = tf.to_float([[14, 43],
[43, 84]])
y2_expected = tf.reshape(y2_expected, [1, n2, n2, 1])
y3 = resnet_utils.conv2d_same(x, 1, 3, stride=2, scope='Conv')
y3_expected = y2_expected
y4 = slim.conv2d(x, 1, [3, 3], stride=2, scope='Conv')
y4_expected = tf.to_float([[48, 37],
[37, 22]])
y4_expected = tf.reshape(y4_expected, [1, n2, n2, 1])
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
self.assertAllClose(y1.eval(), y1_expected.eval())
self.assertAllClose(y2.eval(), y2_expected.eval())
self.assertAllClose(y3.eval(), y3_expected.eval())
self.assertAllClose(y4.eval(), y4_expected.eval())
def testConv2DSameOdd(self):
n, n2 = 5, 3
# Input image.
x = create_test_input(1, n, n, 1)
# Convolution kernel.
w = create_test_input(1, 3, 3, 1)
w = tf.reshape(w, [3, 3, 1, 1])
tf.get_variable('Conv/weights', initializer=w)
tf.get_variable('Conv/biases', initializer=tf.zeros([1]))
tf.get_variable_scope().reuse_variables()
y1 = slim.conv2d(x, 1, [3, 3], stride=1, scope='Conv')
y1_expected = tf.to_float([[14, 28, 43, 58, 34],
[28, 48, 66, 84, 46],
[43, 66, 84, 102, 55],
[58, 84, 102, 120, 64],
[34, 46, 55, 64, 30]])
y1_expected = tf.reshape(y1_expected, [1, n, n, 1])
y2 = resnet_utils.subsample(y1, 2)
y2_expected = tf.to_float([[14, 43, 34],
[43, 84, 55],
[34, 55, 30]])
y2_expected = tf.reshape(y2_expected, [1, n2, n2, 1])
y3 = resnet_utils.conv2d_same(x, 1, 3, stride=2, scope='Conv')
y3_expected = y2_expected
y4 = slim.conv2d(x, 1, [3, 3], stride=2, scope='Conv')
y4_expected = y2_expected
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
self.assertAllClose(y1.eval(), y1_expected.eval())
self.assertAllClose(y2.eval(), y2_expected.eval())
self.assertAllClose(y3.eval(), y3_expected.eval())
self.assertAllClose(y4.eval(), y4_expected.eval())
def _resnet_plain(self, inputs, blocks, output_stride=None, scope=None):
"""A plain ResNet without extra layers before or after the ResNet blocks."""
with tf.variable_scope(scope, values=[inputs]):
with slim.arg_scope([slim.conv2d], outputs_collections='end_points'):
net = resnet_utils.stack_blocks_dense(inputs, blocks, output_stride)
end_points = dict(tf.get_collection('end_points'))
return net, end_points
def testEndPointsV2(self):
"""Test the end points of a tiny v2 bottleneck network."""
bottleneck = resnet_v2.bottleneck
blocks = [resnet_utils.Block('block1', bottleneck, [(4, 1, 1), (4, 1, 2)]),
resnet_utils.Block('block2', bottleneck, [(8, 2, 1), (8, 2, 1)])]
inputs = create_test_input(2, 32, 16, 3)
with slim.arg_scope(resnet_utils.resnet_arg_scope()):
_, end_points = self._resnet_plain(inputs, blocks, scope='tiny')
expected = [
'tiny/block1/unit_1/bottleneck_v2/shortcut',
'tiny/block1/unit_1/bottleneck_v2/conv1',
'tiny/block1/unit_1/bottleneck_v2/conv2',
'tiny/block1/unit_1/bottleneck_v2/conv3',
'tiny/block1/unit_2/bottleneck_v2/conv1',
'tiny/block1/unit_2/bottleneck_v2/conv2',
'tiny/block1/unit_2/bottleneck_v2/conv3',
'tiny/block2/unit_1/bottleneck_v2/shortcut',
'tiny/block2/unit_1/bottleneck_v2/conv1',
'tiny/block2/unit_1/bottleneck_v2/conv2',
'tiny/block2/unit_1/bottleneck_v2/conv3',
'tiny/block2/unit_2/bottleneck_v2/conv1',
'tiny/block2/unit_2/bottleneck_v2/conv2',
'tiny/block2/unit_2/bottleneck_v2/conv3']
self.assertItemsEqual(expected, end_points)
def _stack_blocks_nondense(self, net, blocks):
"""A simplified ResNet Block stacker without output stride control."""
for block in blocks:
with tf.variable_scope(block.scope, 'block', [net]):
for i, unit in enumerate(block.args):
depth, depth_bottleneck, stride = unit
with tf.variable_scope('unit_%d' % (i + 1), values=[net]):
net = block.unit_fn(net,
depth=depth,
depth_bottleneck=depth_bottleneck,
stride=stride,
rate=1)
return net
def _atrousValues(self, bottleneck):
"""Verify the values of dense feature extraction by atrous convolution.
Make sure that dense feature extraction by stack_blocks_dense() followed by
subsampling gives identical results to feature extraction at the nominal
network output stride using the simple self._stack_blocks_nondense() above.
Args:
bottleneck: The bottleneck function.
"""
blocks = [
resnet_utils.Block('block1', bottleneck, [(4, 1, 1), (4, 1, 2)]),
resnet_utils.Block('block2', bottleneck, [(8, 2, 1), (8, 2, 2)]),
resnet_utils.Block('block3', bottleneck, [(16, 4, 1), (16, 4, 2)]),
resnet_utils.Block('block4', bottleneck, [(32, 8, 1), (32, 8, 1)])
]
nominal_stride = 8
# Test both odd and even input dimensions.
height = 30
width = 31
with slim.arg_scope(resnet_utils.resnet_arg_scope()):
with slim.arg_scope([slim.batch_norm], is_training=False):
for output_stride in [1, 2, 4, 8, None]:
with tf.Graph().as_default():
with self.test_session() as sess:
tf.set_random_seed(0)
inputs = create_test_input(1, height, width, 3)
# Dense feature extraction followed by subsampling.
output = resnet_utils.stack_blocks_dense(inputs,
blocks,
output_stride)
if output_stride is None:
factor = 1
else:
factor = nominal_stride // output_stride
output = resnet_utils.subsample(output, factor)
# Make the two networks use the same weights.
tf.get_variable_scope().reuse_variables()
# Feature extraction at the nominal network rate.
expected = self._stack_blocks_nondense(inputs, blocks)
sess.run(tf.global_variables_initializer())
output, expected = sess.run([output, expected])
self.assertAllClose(output, expected, atol=1e-4, rtol=1e-4)
def testAtrousValuesBottleneck(self):
self._atrousValues(resnet_v2.bottleneck)
class ResnetCompleteNetworkTest(tf.test.TestCase):
"""Tests with complete small ResNet v2 networks."""
def _resnet_small(self,
inputs,
num_classes=None,
is_training=True,
global_pool=True,
output_stride=None,
include_root_block=True,
reuse=None,
scope='resnet_v2_small'):
"""A shallow and thin ResNet v2 for faster tests."""
bottleneck = resnet_v2.bottleneck
blocks = [
resnet_utils.Block(
'block1', bottleneck, [(4, 1, 1)] * 2 + [(4, 1, 2)]),
resnet_utils.Block(
'block2', bottleneck, [(8, 2, 1)] * 2 + [(8, 2, 2)]),
resnet_utils.Block(
'block3', bottleneck, [(16, 4, 1)] * 2 + [(16, 4, 2)]),
resnet_utils.Block(
'block4', bottleneck, [(32, 8, 1)] * 2)]
return resnet_v2.resnet_v2(inputs, blocks, num_classes,
is_training=is_training,
global_pool=global_pool,
output_stride=output_stride,
include_root_block=include_root_block,
reuse=reuse,
scope=scope)
def testClassificationEndPoints(self):
global_pool = True
num_classes = 10
inputs = create_test_input(2, 224, 224, 3)
with slim.arg_scope(resnet_utils.resnet_arg_scope()):
logits, end_points = self._resnet_small(inputs, num_classes,
global_pool=global_pool,
scope='resnet')
self.assertTrue(logits.op.name.startswith('resnet/logits'))
self.assertListEqual(logits.get_shape().as_list(), [2, 1, 1, num_classes])
self.assertTrue('predictions' in end_points)
self.assertListEqual(end_points['predictions'].get_shape().as_list(),
[2, 1, 1, num_classes])
def testClassificationShapes(self):
global_pool = True
num_classes = 10
inputs = create_test_input(2, 224, 224, 3)
with slim.arg_scope(resnet_utils.resnet_arg_scope()):
_, end_points = self._resnet_small(inputs, num_classes,
global_pool=global_pool,
scope='resnet')
endpoint_to_shape = {
'resnet/block1': [2, 28, 28, 4],
'resnet/block2': [2, 14, 14, 8],
'resnet/block3': [2, 7, 7, 16],
'resnet/block4': [2, 7, 7, 32]}
for endpoint in endpoint_to_shape:
shape = endpoint_to_shape[endpoint]
self.assertListEqual(end_points[endpoint].get_shape().as_list(), shape)
def testFullyConvolutionalEndpointShapes(self):
global_pool = False
num_classes = 10
inputs = create_test_input(2, 321, 321, 3)
with slim.arg_scope(resnet_utils.resnet_arg_scope()):
_, end_points = self._resnet_small(inputs, num_classes,
global_pool=global_pool,
scope='resnet')
endpoint_to_shape = {
'resnet/block1': [2, 41, 41, 4],
'resnet/block2': [2, 21, 21, 8],
'resnet/block3': [2, 11, 11, 16],
'resnet/block4': [2, 11, 11, 32]}
for endpoint in endpoint_to_shape:
shape = endpoint_to_shape[endpoint]
self.assertListEqual(end_points[endpoint].get_shape().as_list(), shape)
def testRootlessFullyConvolutionalEndpointShapes(self):
global_pool = False
num_classes = 10
inputs = create_test_input(2, 128, 128, 3)
with slim.arg_scope(resnet_utils.resnet_arg_scope()):
_, end_points = self._resnet_small(inputs, num_classes,
global_pool=global_pool,
include_root_block=False,
scope='resnet')
endpoint_to_shape = {
'resnet/block1': [2, 64, 64, 4],
'resnet/block2': [2, 32, 32, 8],
'resnet/block3': [2, 16, 16, 16],
'resnet/block4': [2, 16, 16, 32]}
for endpoint in endpoint_to_shape:
shape = endpoint_to_shape[endpoint]
self.assertListEqual(end_points[endpoint].get_shape().as_list(), shape)
def testAtrousFullyConvolutionalEndpointShapes(self):
global_pool = False
num_classes = 10
output_stride = 8
inputs = create_test_input(2, 321, 321, 3)
with slim.arg_scope(resnet_utils.resnet_arg_scope()):
_, end_points = self._resnet_small(inputs,
num_classes,
global_pool=global_pool,
output_stride=output_stride,
scope='resnet')
endpoint_to_shape = {
'resnet/block1': [2, 41, 41, 4],
'resnet/block2': [2, 41, 41, 8],
'resnet/block3': [2, 41, 41, 16],
'resnet/block4': [2, 41, 41, 32]}
for endpoint in endpoint_to_shape:
shape = endpoint_to_shape[endpoint]
self.assertListEqual(end_points[endpoint].get_shape().as_list(), shape)
def testAtrousFullyConvolutionalValues(self):
"""Verify dense feature extraction with atrous convolution."""
nominal_stride = 32
for output_stride in [4, 8, 16, 32, None]:
with slim.arg_scope(resnet_utils.resnet_arg_scope()):
with tf.Graph().as_default():
with self.test_session() as sess:
tf.set_random_seed(0)
inputs = create_test_input(2, 81, 81, 3)
# Dense feature extraction followed by subsampling.
output, _ = self._resnet_small(inputs, None,
is_training=False,
global_pool=False,
output_stride=output_stride)
if output_stride is None:
factor = 1
else:
factor = nominal_stride // output_stride
output = resnet_utils.subsample(output, factor)
# Make the two networks use the same weights.
tf.get_variable_scope().reuse_variables()
# Feature extraction at the nominal network rate.
expected, _ = self._resnet_small(inputs, None,
is_training=False,
global_pool=False)
sess.run(tf.global_variables_initializer())
self.assertAllClose(output.eval(), expected.eval(),
atol=1e-4, rtol=1e-4)
def testUnknownBatchSize(self):
batch = 2
height, width = 65, 65
global_pool = True
num_classes = 10
inputs = create_test_input(None, height, width, 3)
with slim.arg_scope(resnet_utils.resnet_arg_scope()):
logits, _ = self._resnet_small(inputs, num_classes,
global_pool=global_pool,
scope='resnet')
self.assertTrue(logits.op.name.startswith('resnet/logits'))
self.assertListEqual(logits.get_shape().as_list(),
[None, 1, 1, num_classes])
images = create_test_input(batch, height, width, 3)
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
output = sess.run(logits, {inputs: images.eval()})
self.assertEqual(output.shape, (batch, 1, 1, num_classes))
def testFullyConvolutionalUnknownHeightWidth(self):
batch = 2
height, width = 65, 65
global_pool = False
inputs = create_test_input(batch, None, None, 3)
with slim.arg_scope(resnet_utils.resnet_arg_scope()):
output, _ = self._resnet_small(inputs, None,
global_pool=global_pool)
self.assertListEqual(output.get_shape().as_list(),
[batch, None, None, 32])
images = create_test_input(batch, height, width, 3)
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
output = sess.run(output, {inputs: images.eval()})
self.assertEqual(output.shape, (batch, 3, 3, 32))
def testAtrousFullyConvolutionalUnknownHeightWidth(self):
batch = 2
height, width = 65, 65
global_pool = False
output_stride = 8
inputs = create_test_input(batch, None, None, 3)
with slim.arg_scope(resnet_utils.resnet_arg_scope()):
output, _ = self._resnet_small(inputs,
None,
global_pool=global_pool,
output_stride=output_stride)
self.assertListEqual(output.get_shape().as_list(),
[batch, None, None, 32])
images = create_test_input(batch, height, width, 3)
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
output = sess.run(output, {inputs: images.eval()})
self.assertEqual(output.shape, (batch, 9, 9, 32))
if __name__ == '__main__':
tf.test.main()
|
from datetime import timedelta
import importlib
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_HOST, CONF_PORT, PERCENTAGE
from homeassistant.exceptions import PlatformNotReady
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
ATTR_MARKER_TYPE = "marker_type"
ATTR_MARKER_LOW_LEVEL = "marker_low_level"
ATTR_MARKER_HIGH_LEVEL = "marker_high_level"
ATTR_PRINTER_NAME = "printer_name"
ATTR_DEVICE_URI = "device_uri"
ATTR_PRINTER_INFO = "printer_info"
ATTR_PRINTER_IS_SHARED = "printer_is_shared"
ATTR_PRINTER_LOCATION = "printer_location"
ATTR_PRINTER_MODEL = "printer_model"
ATTR_PRINTER_STATE_MESSAGE = "printer_state_message"
ATTR_PRINTER_STATE_REASON = "printer_state_reason"
ATTR_PRINTER_TYPE = "printer_type"
ATTR_PRINTER_URI_SUPPORTED = "printer_uri_supported"
CONF_PRINTERS = "printers"
CONF_IS_CUPS_SERVER = "is_cups_server"
DEFAULT_HOST = "127.0.0.1"
DEFAULT_PORT = 631
DEFAULT_IS_CUPS_SERVER = True
ICON_PRINTER = "mdi:printer"
ICON_MARKER = "mdi:water"
SCAN_INTERVAL = timedelta(minutes=1)
PRINTER_STATES = {3: "idle", 4: "printing", 5: "stopped"}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_PRINTERS): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_IS_CUPS_SERVER, default=DEFAULT_IS_CUPS_SERVER): cv.boolean,
vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the CUPS sensor."""
host = config[CONF_HOST]
port = config[CONF_PORT]
printers = config[CONF_PRINTERS]
is_cups = config[CONF_IS_CUPS_SERVER]
if is_cups:
data = CupsData(host, port, None)
data.update()
if data.available is False:
_LOGGER.error("Unable to connect to CUPS server: %s:%s", host, port)
raise PlatformNotReady()
dev = []
for printer in printers:
if printer not in data.printers:
_LOGGER.error("Printer is not present: %s", printer)
continue
dev.append(CupsSensor(data, printer))
if "marker-names" in data.attributes[printer]:
for marker in data.attributes[printer]["marker-names"]:
dev.append(MarkerSensor(data, printer, marker, True))
add_entities(dev, True)
return
data = CupsData(host, port, printers)
data.update()
if data.available is False:
_LOGGER.error("Unable to connect to IPP printer: %s:%s", host, port)
raise PlatformNotReady()
dev = []
for printer in printers:
dev.append(IPPSensor(data, printer))
if "marker-names" in data.attributes[printer]:
for marker in data.attributes[printer]["marker-names"]:
dev.append(MarkerSensor(data, printer, marker, False))
add_entities(dev, True)
class CupsSensor(Entity):
"""Representation of a CUPS sensor."""
def __init__(self, data, printer):
"""Initialize the CUPS sensor."""
self.data = data
self._name = printer
self._printer = None
self._available = False
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
if self._printer is None:
return None
key = self._printer["printer-state"]
return PRINTER_STATES.get(key, key)
@property
def available(self):
"""Return True if entity is available."""
return self._available
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return ICON_PRINTER
@property
def device_state_attributes(self):
"""Return the state attributes of the sensor."""
if self._printer is None:
return None
return {
ATTR_DEVICE_URI: self._printer["device-uri"],
ATTR_PRINTER_INFO: self._printer["printer-info"],
ATTR_PRINTER_IS_SHARED: self._printer["printer-is-shared"],
ATTR_PRINTER_LOCATION: self._printer["printer-location"],
ATTR_PRINTER_MODEL: self._printer["printer-make-and-model"],
ATTR_PRINTER_STATE_MESSAGE: self._printer["printer-state-message"],
ATTR_PRINTER_STATE_REASON: self._printer["printer-state-reasons"],
ATTR_PRINTER_TYPE: self._printer["printer-type"],
ATTR_PRINTER_URI_SUPPORTED: self._printer["printer-uri-supported"],
}
def update(self):
"""Get the latest data and updates the states."""
self.data.update()
self._printer = self.data.printers.get(self._name)
self._available = self.data.available
class IPPSensor(Entity):
"""Implementation of the IPPSensor.
This sensor represents the status of the printer.
"""
def __init__(self, data, name):
"""Initialize the sensor."""
self.data = data
self._name = name
self._attributes = None
self._available = False
@property
def name(self):
"""Return the name of the sensor."""
return self._attributes["printer-make-and-model"]
@property
def icon(self):
"""Return the icon to use in the frontend."""
return ICON_PRINTER
@property
def available(self):
"""Return True if entity is available."""
return self._available
@property
def state(self):
"""Return the state of the sensor."""
if self._attributes is None:
return None
key = self._attributes["printer-state"]
return PRINTER_STATES.get(key, key)
@property
def device_state_attributes(self):
"""Return the state attributes of the sensor."""
if self._attributes is None:
return None
state_attributes = {}
if "printer-info" in self._attributes:
state_attributes[ATTR_PRINTER_INFO] = self._attributes["printer-info"]
if "printer-location" in self._attributes:
state_attributes[ATTR_PRINTER_LOCATION] = self._attributes[
"printer-location"
]
if "printer-state-message" in self._attributes:
state_attributes[ATTR_PRINTER_STATE_MESSAGE] = self._attributes[
"printer-state-message"
]
if "printer-state-reasons" in self._attributes:
state_attributes[ATTR_PRINTER_STATE_REASON] = self._attributes[
"printer-state-reasons"
]
if "printer-uri-supported" in self._attributes:
state_attributes[ATTR_PRINTER_URI_SUPPORTED] = self._attributes[
"printer-uri-supported"
]
return state_attributes
def update(self):
"""Fetch new state data for the sensor."""
self.data.update()
self._attributes = self.data.attributes.get(self._name)
self._available = self.data.available
class MarkerSensor(Entity):
"""Implementation of the MarkerSensor.
This sensor represents the percentage of ink or toner.
"""
def __init__(self, data, printer, name, is_cups):
"""Initialize the sensor."""
self.data = data
self._name = name
self._printer = printer
self._index = data.attributes[printer]["marker-names"].index(name)
self._is_cups = is_cups
self._attributes = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def icon(self):
"""Return the icon to use in the frontend."""
return ICON_MARKER
@property
def state(self):
"""Return the state of the sensor."""
if self._attributes is None:
return None
return self._attributes[self._printer]["marker-levels"][self._index]
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return PERCENTAGE
@property
def device_state_attributes(self):
"""Return the state attributes of the sensor."""
if self._attributes is None:
return None
high_level = self._attributes[self._printer].get("marker-high-levels")
if isinstance(high_level, list):
high_level = high_level[self._index]
low_level = self._attributes[self._printer].get("marker-low-levels")
if isinstance(low_level, list):
low_level = low_level[self._index]
marker_types = self._attributes[self._printer]["marker-types"]
if isinstance(marker_types, list):
marker_types = marker_types[self._index]
if self._is_cups:
printer_name = self._printer
else:
printer_name = self._attributes[self._printer]["printer-make-and-model"]
return {
ATTR_MARKER_HIGH_LEVEL: high_level,
ATTR_MARKER_LOW_LEVEL: low_level,
ATTR_MARKER_TYPE: marker_types,
ATTR_PRINTER_NAME: printer_name,
}
def update(self):
"""Update the state of the sensor."""
# Data fetching is done by CupsSensor/IPPSensor
self._attributes = self.data.attributes
class CupsData:
"""Get the latest data from CUPS and update the state."""
def __init__(self, host, port, ipp_printers):
"""Initialize the data object."""
self._host = host
self._port = port
self._ipp_printers = ipp_printers
self.is_cups = ipp_printers is None
self.printers = None
self.attributes = {}
self.available = False
def update(self):
"""Get the latest data from CUPS."""
cups = importlib.import_module("cups")
try:
conn = cups.Connection(host=self._host, port=self._port)
if self.is_cups:
self.printers = conn.getPrinters()
for printer in self.printers:
self.attributes[printer] = conn.getPrinterAttributes(name=printer)
else:
for ipp_printer in self._ipp_printers:
self.attributes[ipp_printer] = conn.getPrinterAttributes(
uri=f"ipp://{self._host}:{self._port}/{ipp_printer}"
)
self.available = True
except RuntimeError:
self.available = False
|
from unittest.mock import patch
import pytest
from homeassistant.components.spaceapi import DOMAIN, SPACEAPI_VERSION, URL_API_SPACEAPI
from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, PERCENTAGE, TEMP_CELSIUS
from homeassistant.setup import async_setup_component
from tests.common import mock_coro
CONFIG = {
DOMAIN: {
"space": "Home",
"logo": "https://home-assistant.io/logo.png",
"url": "https://home-assistant.io",
"location": {"address": "In your Home"},
"contact": {"email": "[email protected]"},
"issue_report_channels": ["email"],
"state": {
"entity_id": "test.test_door",
"icon_open": "https://home-assistant.io/open.png",
"icon_closed": "https://home-assistant.io/close.png",
},
"sensors": {
"temperature": ["test.temp1", "test.temp2"],
"humidity": ["test.hum1"],
},
"spacefed": {"spacenet": True, "spacesaml": False, "spacephone": True},
"cam": ["https://home-assistant.io/cam1", "https://home-assistant.io/cam2"],
"stream": {
"m4": "https://home-assistant.io/m4",
"mjpeg": "https://home-assistant.io/mjpeg",
"ustream": "https://home-assistant.io/ustream",
},
"feeds": {
"blog": {"url": "https://home-assistant.io/blog"},
"wiki": {"type": "mediawiki", "url": "https://home-assistant.io/wiki"},
"calendar": {"type": "ical", "url": "https://home-assistant.io/calendar"},
"flicker": {"url": "https://www.flickr.com/photos/home-assistant"},
},
"cache": {"schedule": "m.02"},
"projects": [
"https://home-assistant.io/projects/1",
"https://home-assistant.io/projects/2",
"https://home-assistant.io/projects/3",
],
"radio_show": [
{
"name": "Radioshow",
"url": "https://home-assistant.io/radio",
"type": "ogg",
"start": "2019-09-02T10:00Z",
"end": "2019-09-02T12:00Z",
}
],
}
}
SENSOR_OUTPUT = {
"temperature": [
{"location": "Home", "name": "temp1", "unit": TEMP_CELSIUS, "value": "25"},
{"location": "Home", "name": "temp2", "unit": TEMP_CELSIUS, "value": "23"},
],
"humidity": [
{"location": "Home", "name": "hum1", "unit": PERCENTAGE, "value": "88"}
],
}
@pytest.fixture
def mock_client(hass, hass_client):
"""Start the Home Assistant HTTP component."""
with patch("homeassistant.components.spaceapi", return_value=mock_coro(True)):
hass.loop.run_until_complete(async_setup_component(hass, "spaceapi", CONFIG))
hass.states.async_set(
"test.temp1", 25, attributes={ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}
)
hass.states.async_set(
"test.temp2", 23, attributes={ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}
)
hass.states.async_set(
"test.hum1", 88, attributes={ATTR_UNIT_OF_MEASUREMENT: PERCENTAGE}
)
return hass.loop.run_until_complete(hass_client())
async def test_spaceapi_get(hass, mock_client):
"""Test response after start-up Home Assistant."""
resp = await mock_client.get(URL_API_SPACEAPI)
assert resp.status == 200
data = await resp.json()
assert data["api"] == SPACEAPI_VERSION
assert data["space"] == "Home"
assert data["contact"]["email"] == "[email protected]"
assert data["location"]["address"] == "In your Home"
assert data["location"]["lat"] == 32.87336
assert data["location"]["lon"] == -117.22743
assert data["state"]["open"] == "null"
assert data["state"]["icon"]["open"] == "https://home-assistant.io/open.png"
assert data["state"]["icon"]["close"] == "https://home-assistant.io/close.png"
assert data["spacefed"]["spacenet"] == bool(1)
assert data["spacefed"]["spacesaml"] == bool(0)
assert data["spacefed"]["spacephone"] == bool(1)
assert data["cam"][0] == "https://home-assistant.io/cam1"
assert data["cam"][1] == "https://home-assistant.io/cam2"
assert data["stream"]["m4"] == "https://home-assistant.io/m4"
assert data["stream"]["mjpeg"] == "https://home-assistant.io/mjpeg"
assert data["stream"]["ustream"] == "https://home-assistant.io/ustream"
assert data["feeds"]["blog"]["url"] == "https://home-assistant.io/blog"
assert data["feeds"]["wiki"]["type"] == "mediawiki"
assert data["feeds"]["wiki"]["url"] == "https://home-assistant.io/wiki"
assert data["feeds"]["calendar"]["type"] == "ical"
assert data["feeds"]["calendar"]["url"] == "https://home-assistant.io/calendar"
assert (
data["feeds"]["flicker"]["url"]
== "https://www.flickr.com/photos/home-assistant"
)
assert data["cache"]["schedule"] == "m.02"
assert data["projects"][0] == "https://home-assistant.io/projects/1"
assert data["projects"][1] == "https://home-assistant.io/projects/2"
assert data["projects"][2] == "https://home-assistant.io/projects/3"
assert data["radio_show"][0]["name"] == "Radioshow"
assert data["radio_show"][0]["url"] == "https://home-assistant.io/radio"
assert data["radio_show"][0]["type"] == "ogg"
assert data["radio_show"][0]["start"] == "2019-09-02T10:00Z"
assert data["radio_show"][0]["end"] == "2019-09-02T12:00Z"
async def test_spaceapi_state_get(hass, mock_client):
"""Test response if the state entity was set."""
hass.states.async_set("test.test_door", True)
resp = await mock_client.get(URL_API_SPACEAPI)
assert resp.status == 200
data = await resp.json()
assert data["state"]["open"] == bool(1)
async def test_spaceapi_sensors_get(hass, mock_client):
"""Test the response for the sensors."""
resp = await mock_client.get(URL_API_SPACEAPI)
assert resp.status == 200
data = await resp.json()
assert data["sensors"] == SENSOR_OUTPUT
|
import asyncio
from datetime import timedelta
import logging
from aiopvapi.helpers.aiorequest import AioRequest
from aiopvapi.helpers.constants import ATTR_ID
from aiopvapi.helpers.tools import base64_to_unicode
from aiopvapi.rooms import Rooms
from aiopvapi.scenes import Scenes
from aiopvapi.shades import Shades
from aiopvapi.userdata import UserData
import async_timeout
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.const import CONF_HOST
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import (
COORDINATOR,
DEVICE_FIRMWARE,
DEVICE_INFO,
DEVICE_MAC_ADDRESS,
DEVICE_MODEL,
DEVICE_NAME,
DEVICE_REVISION,
DEVICE_SERIAL_NUMBER,
DOMAIN,
FIRMWARE_BUILD,
FIRMWARE_IN_USERDATA,
FIRMWARE_SUB_REVISION,
HUB_EXCEPTIONS,
HUB_NAME,
LEGACY_DEVICE_BUILD,
LEGACY_DEVICE_MODEL,
LEGACY_DEVICE_REVISION,
LEGACY_DEVICE_SUB_REVISION,
MAC_ADDRESS_IN_USERDATA,
MAINPROCESSOR_IN_USERDATA_FIRMWARE,
MODEL_IN_MAINPROCESSOR,
PV_API,
PV_ROOM_DATA,
PV_SCENE_DATA,
PV_SHADE_DATA,
PV_SHADES,
REVISION_IN_MAINPROCESSOR,
ROOM_DATA,
SCENE_DATA,
SERIAL_NUMBER_IN_USERDATA,
SHADE_DATA,
USER_DATA,
)
PARALLEL_UPDATES = 1
DEVICE_SCHEMA = vol.Schema(
{DOMAIN: vol.Schema({vol.Required(CONF_HOST): cv.string})}, extra=vol.ALLOW_EXTRA
)
def _has_all_unique_hosts(value):
"""Validate that each hub configured has a unique host."""
hosts = [device[CONF_HOST] for device in value]
schema = vol.Schema(vol.Unique())
schema(hosts)
return value
CONFIG_SCHEMA = vol.Schema(
{DOMAIN: vol.All(cv.ensure_list, [DEVICE_SCHEMA], _has_all_unique_hosts)},
extra=vol.ALLOW_EXTRA,
)
PLATFORMS = ["cover", "scene", "sensor"]
_LOGGER = logging.getLogger(__name__)
async def async_setup(hass: HomeAssistant, hass_config: dict):
"""Set up the Hunter Douglas PowerView component."""
hass.data.setdefault(DOMAIN, {})
if DOMAIN not in hass_config:
return True
for conf in hass_config[DOMAIN]:
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=conf
)
)
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Set up Hunter Douglas PowerView from a config entry."""
config = entry.data
hub_address = config.get(CONF_HOST)
websession = async_get_clientsession(hass)
pv_request = AioRequest(hub_address, loop=hass.loop, websession=websession)
try:
async with async_timeout.timeout(10):
device_info = await async_get_device_info(pv_request)
async with async_timeout.timeout(10):
rooms = Rooms(pv_request)
room_data = _async_map_data_by_id((await rooms.get_resources())[ROOM_DATA])
async with async_timeout.timeout(10):
scenes = Scenes(pv_request)
scene_data = _async_map_data_by_id(
(await scenes.get_resources())[SCENE_DATA]
)
async with async_timeout.timeout(10):
shades = Shades(pv_request)
shade_data = _async_map_data_by_id(
(await shades.get_resources())[SHADE_DATA]
)
except HUB_EXCEPTIONS as err:
_LOGGER.error("Connection error to PowerView hub: %s", hub_address)
raise ConfigEntryNotReady from err
if not device_info:
_LOGGER.error("Unable to initialize PowerView hub: %s", hub_address)
raise ConfigEntryNotReady
async def async_update_data():
"""Fetch data from shade endpoint."""
async with async_timeout.timeout(10):
shade_entries = await shades.get_resources()
if not shade_entries:
raise UpdateFailed("Failed to fetch new shade data.")
return _async_map_data_by_id(shade_entries[SHADE_DATA])
coordinator = DataUpdateCoordinator(
hass,
_LOGGER,
name="powerview hub",
update_method=async_update_data,
update_interval=timedelta(seconds=60),
)
hass.data[DOMAIN][entry.entry_id] = {
PV_API: pv_request,
PV_ROOM_DATA: room_data,
PV_SCENE_DATA: scene_data,
PV_SHADES: shades,
PV_SHADE_DATA: shade_data,
COORDINATOR: coordinator,
DEVICE_INFO: device_info,
}
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
return True
async def async_get_device_info(pv_request):
"""Determine device info."""
userdata = UserData(pv_request)
resources = await userdata.get_resources()
userdata_data = resources[USER_DATA]
if FIRMWARE_IN_USERDATA in userdata_data:
main_processor_info = userdata_data[FIRMWARE_IN_USERDATA][
MAINPROCESSOR_IN_USERDATA_FIRMWARE
]
else:
# Legacy devices
main_processor_info = {
REVISION_IN_MAINPROCESSOR: LEGACY_DEVICE_REVISION,
FIRMWARE_SUB_REVISION: LEGACY_DEVICE_SUB_REVISION,
FIRMWARE_BUILD: LEGACY_DEVICE_BUILD,
MODEL_IN_MAINPROCESSOR: LEGACY_DEVICE_MODEL,
}
return {
DEVICE_NAME: base64_to_unicode(userdata_data[HUB_NAME]),
DEVICE_MAC_ADDRESS: userdata_data[MAC_ADDRESS_IN_USERDATA],
DEVICE_SERIAL_NUMBER: userdata_data[SERIAL_NUMBER_IN_USERDATA],
DEVICE_REVISION: main_processor_info[REVISION_IN_MAINPROCESSOR],
DEVICE_FIRMWARE: main_processor_info,
DEVICE_MODEL: main_processor_info[MODEL_IN_MAINPROCESSOR],
}
@callback
def _async_map_data_by_id(data):
"""Return a dict with the key being the id for a list of entries."""
return {entry[ATTR_ID]: entry for entry in data}
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
|
import os
import os.path as op
from distutils.version import LooseVersion
from ._brain import Brain
class _BrainScraper(object):
"""Scrape Brain objects."""
def __repr__(self):
return '<BrainScraper>'
def __call__(self, block, block_vars, gallery_conf):
rst = ''
for brain in list(block_vars['example_globals'].values()):
# Only need to process if it's a brain with a time_viewer
# with traces on and shown in the same window, otherwise
# PyVista and matplotlib scrapers can just do the work
if (not isinstance(brain, Brain)) or brain._closed:
continue
import matplotlib
from matplotlib import animation, pyplot as plt
from sphinx_gallery.scrapers import matplotlib_scraper
img = brain.screenshot(time_viewer=True)
dpi = 100.
figsize = (img.shape[1] / dpi, img.shape[0] / dpi)
fig = plt.figure(figsize=figsize, dpi=dpi)
ax = plt.Axes(fig, [0, 0, 1, 1])
fig.add_axes(ax)
img = ax.imshow(img)
movie_key = '# brain.save_movie'
if movie_key in block[1]:
kwargs = dict()
# Parse our parameters
lines = block[1].splitlines()
for li, line in enumerate(block[1].splitlines()):
if line.startswith(movie_key):
line = line[len(movie_key):].replace('..., ', '')
for ni in range(1, 5): # should be enough
if len(lines) > li + ni and \
lines[li + ni].startswith('# '):
line = line + lines[li + ni][1:].strip()
else:
break
assert line.startswith('(') and line.endswith(')')
kwargs.update(eval(f'dict{line}'))
for key, default in [('time_dilation', 4),
('framerate', 24),
('tmin', None),
('tmax', None),
('interpolation', None),
('time_viewer', False)]:
if key not in kwargs:
kwargs[key] = default
kwargs.pop('filename', None) # always omit this one
if brain.time_viewer:
assert kwargs['time_viewer'], 'Must use time_viewer=True'
frames = brain._make_movie_frames(callback=None, **kwargs)
# Turn them into an animation
def func(frame):
img.set_data(frame)
return [img]
anim = animation.FuncAnimation(
fig, func=func, frames=frames, blit=True,
interval=1000. / kwargs['framerate'])
# Out to sphinx-gallery:
#
# 1. A static image but hide it (useful for carousel)
if LooseVersion(matplotlib.__version__) >= \
LooseVersion('3.3.1') and \
animation.FFMpegWriter.isAvailable():
writer = 'ffmpeg'
elif animation.ImageMagickWriter.isAvailable():
writer = 'imagemagick'
else:
writer = None
static_fname = next(block_vars['image_path_iterator'])
static_fname = static_fname[:-4] + '.gif'
anim.save(static_fname, writer=writer, dpi=dpi)
rel_fname = op.relpath(static_fname, gallery_conf['src_dir'])
rel_fname = rel_fname.replace(os.sep, '/').lstrip('/')
rst += f'\n.. image:: /{rel_fname}\n :class: hidden\n'
# 2. An animation that will be embedded and visible
block_vars['example_globals']['_brain_anim_'] = anim
brain.close()
rst += matplotlib_scraper(block, block_vars, gallery_conf)
return rst
|
from diamond.metric import Metric
import diamond.collector
import glob
import re
import os
class HBaseCollector(diamond.collector.Collector):
re_log = re.compile(r'^(?P<timestamp>\d+) (?P<name>\S+): (?P<metrics>.*)$')
def get_default_config_help(self):
config_help = super(HBaseCollector, self).get_default_config_help()
config_help.update({
'metrics': "List of paths to process metrics from",
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(HBaseCollector, self).get_default_config()
config.update({
'path': 'hbase',
'metrics': ['/var/log/hbase/*.metrics'],
})
return config
def collect(self):
metrics = self.config['metrics']
if not isinstance(metrics, list):
metrics = [str(metrics)]
for pattern in metrics:
for filename in glob.glob(pattern):
self.collect_from(filename)
def collect_from(self, filename):
if not os.access(filename, os.R_OK):
self.log.error('HBaseCollector unable to read "%s"', filename)
return False
fd = open(filename, 'r+')
for line in fd:
match = self.re_log.match(line)
if not match:
continue
metrics = {}
data = match.groupdict()
for metric in data['metrics'].split(','):
metric = metric.strip()
if '=' in metric:
key, value = metric.split('=', 1)
metrics[key] = value
for metric in metrics.keys():
try:
if data['name'] == 'jvm.metrics':
path = self.get_metric_path('.'.join([
data['name'],
metrics['hostName'].replace('.', '_'),
metrics['processName'].replace(' ', '_'),
metric, ]))
elif data['name'] == 'mapred.job':
path = self.get_metric_path('.'.join([
data['name'],
metrics['hostName'].replace('.', '_'),
metrics['group'].replace(' ', '_'),
metrics['counter'].replace(' ', '_'),
metric, ]))
elif data['name'] == 'rpc.metrics':
if metric == 'port':
continue
path = self.get_metric_path('.'.join([
data['name'],
metrics['hostName'].replace('.', '_'),
metrics['port'],
metric, ]))
else:
path = self.get_metric_path('.'.join([
data['name'],
metric, ]))
value = float(metrics[metric])
self.publish_metric(
Metric(path,
value,
timestamp=int(data['timestamp']) / 1000))
except ValueError:
pass
fd.seek(0)
fd.truncate()
fd.close()
|
import os
import os.path
import re
from xml.etree import ElementTree
from unittest_mixins import change_dir
import coverage
from coverage.backward import import_local_file
from coverage.files import abs_file
from tests.coveragetest import CoverageTest
from tests.goldtest import compare, gold_path
class XmlTestHelpers(CoverageTest):
"""Methods to use from XML tests."""
def run_mycode(self):
"""Run mycode.py, so we can report on it."""
self.make_file("mycode.py", "print('hello')\n")
self.run_command("coverage run mycode.py")
def run_doit(self):
"""Construct a simple sub-package."""
self.make_file("sub/__init__.py")
self.make_file("sub/doit.py", "print('doit!')")
self.make_file("main.py", "import sub.doit")
cov = coverage.Coverage(source=["."])
self.start_import_stop(cov, "main")
return cov
def make_tree(self, width, depth, curdir="."):
"""Make a tree of packages.
Makes `width` directories, named d0 .. d{width-1}. Each directory has
__init__.py, and `width` files, named f0.py .. f{width-1}.py. Each
directory also has `width` sub-directories, in the same fashion, until
a depth of `depth` is reached.
"""
if depth == 0:
return
def here(p):
"""A path for `p` in our currently interesting directory."""
return os.path.join(curdir, p)
for i in range(width):
next_dir = here("d{}".format(i))
self.make_tree(width, depth-1, next_dir)
if curdir != ".":
self.make_file(here("__init__.py"), "")
for i in range(width):
filename = here("f{}.py".format(i))
self.make_file(filename, "# {}\n".format(filename))
def assert_source(self, xmldom, src):
"""Assert that the XML has a <source> element with `src`."""
src = abs_file(src)
elts = xmldom.findall(".//sources/source")
assert any(elt.text == src for elt in elts)
class XmlTestHelpersTest(XmlTestHelpers, CoverageTest):
"""Tests of methods in XmlTestHelpers."""
run_in_temp_dir = False
def test_assert_source(self):
dom = ElementTree.fromstring("""\
<doc>
<src>foo</src>
<sources>
<source>{cwd}something</source>
<source>{cwd}another</source>
</sources>
</doc>
""".format(cwd=abs_file(".")+os.sep))
self.assert_source(dom, "something")
self.assert_source(dom, "another")
with self.assertRaises(AssertionError):
self.assert_source(dom, "hello")
with self.assertRaises(AssertionError):
self.assert_source(dom, "foo")
with self.assertRaises(AssertionError):
self.assert_source(dom, "thing")
class XmlReportTest(XmlTestHelpers, CoverageTest):
"""Tests of the XML reports from coverage.py."""
def test_default_file_placement(self):
self.run_mycode()
self.run_command("coverage xml")
self.assert_exists("coverage.xml")
def test_argument_affects_xml_placement(self):
self.run_mycode()
self.run_command("coverage xml -o put_it_there.xml")
self.assert_doesnt_exist("coverage.xml")
self.assert_exists("put_it_there.xml")
def test_config_file_directory_does_not_exist(self):
self.run_mycode()
self.run_command("coverage xml -o nonexistent/put_it_there.xml")
self.assert_doesnt_exist("coverage.xml")
self.assert_doesnt_exist("put_it_there.xml")
self.assert_exists("nonexistent/put_it_there.xml")
def test_config_affects_xml_placement(self):
self.run_mycode()
self.make_file(".coveragerc", "[xml]\noutput = xml.out\n")
self.run_command("coverage xml")
self.assert_doesnt_exist("coverage.xml")
self.assert_exists("xml.out")
def test_no_data(self):
# https://github.com/nedbat/coveragepy/issues/210
self.run_command("coverage xml")
self.assert_doesnt_exist("coverage.xml")
def test_no_source(self):
# Written while investigating a bug, might as well keep it.
# https://github.com/nedbat/coveragepy/issues/208
self.make_file("innocuous.py", "a = 4")
cov = coverage.Coverage()
self.start_import_stop(cov, "innocuous")
os.remove("innocuous.py")
cov.xml_report(ignore_errors=True)
self.assert_exists("coverage.xml")
def test_filename_format_showing_everything(self):
cov = self.run_doit()
cov.xml_report()
dom = ElementTree.parse("coverage.xml")
elts = dom.findall(".//class[@name='doit.py']")
assert len(elts) == 1
assert elts[0].get('filename') == "sub/doit.py"
def test_filename_format_including_filename(self):
cov = self.run_doit()
cov.xml_report(["sub/doit.py"])
dom = ElementTree.parse("coverage.xml")
elts = dom.findall(".//class[@name='doit.py']")
assert len(elts) == 1
assert elts[0].get('filename') == "sub/doit.py"
def test_filename_format_including_module(self):
cov = self.run_doit()
import sub.doit # pylint: disable=import-error
cov.xml_report([sub.doit])
dom = ElementTree.parse("coverage.xml")
elts = dom.findall(".//class[@name='doit.py']")
assert len(elts) == 1
assert elts[0].get('filename') == "sub/doit.py"
def test_reporting_on_nothing(self):
# Used to raise a zero division error:
# https://github.com/nedbat/coveragepy/issues/250
self.make_file("empty.py", "")
cov = coverage.Coverage()
empty = self.start_import_stop(cov, "empty")
cov.xml_report([empty])
dom = ElementTree.parse("coverage.xml")
elts = dom.findall(".//class[@name='empty.py']")
assert len(elts) == 1
assert elts[0].get('filename') == "empty.py"
assert elts[0].get('line-rate') == '1'
def test_empty_file_is_100_not_0(self):
# https://github.com/nedbat/coveragepy/issues/345
cov = self.run_doit()
cov.xml_report()
dom = ElementTree.parse("coverage.xml")
elts = dom.findall(".//class[@name='__init__.py']")
assert len(elts) == 1
assert elts[0].get('line-rate') == '1'
def test_empty_file_is_skipped(self):
cov = self.run_doit()
cov.xml_report(skip_empty=True)
dom = ElementTree.parse("coverage.xml")
elts = dom.findall(".//class[@name='__init__.py']")
assert len(elts) == 0
def test_curdir_source(self):
# With no source= option, the XML report should explain that the source
# is in the current directory.
cov = self.run_doit()
cov.xml_report()
dom = ElementTree.parse("coverage.xml")
self.assert_source(dom, ".")
sources = dom.findall(".//source")
assert len(sources) == 1
def test_deep_source(self):
# When using source=, the XML report needs to mention those directories
# in the <source> elements.
# https://github.com/nedbat/coveragepy/issues/439
self.make_file("src/main/foo.py", "a = 1")
self.make_file("also/over/there/bar.py", "b = 2")
cov = coverage.Coverage(source=["src/main", "also/over/there", "not/really"])
cov.start()
mod_foo = import_local_file("foo", "src/main/foo.py") # pragma: nested
mod_bar = import_local_file("bar", "also/over/there/bar.py") # pragma: nested
cov.stop() # pragma: nested
cov.xml_report([mod_foo, mod_bar])
dom = ElementTree.parse("coverage.xml")
self.assert_source(dom, "src/main")
self.assert_source(dom, "also/over/there")
sources = dom.findall(".//source")
assert len(sources) == 2
foo_class = dom.findall(".//class[@name='foo.py']")
assert len(foo_class) == 1
assert foo_class[0].attrib == {
'branch-rate': '0',
'complexity': '0',
'filename': 'foo.py',
'line-rate': '1',
'name': 'foo.py',
}
bar_class = dom.findall(".//class[@name='bar.py']")
assert len(bar_class) == 1
assert bar_class[0].attrib == {
'branch-rate': '0',
'complexity': '0',
'filename': 'bar.py',
'line-rate': '1',
'name': 'bar.py',
}
def test_nonascii_directory(self):
# https://github.com/nedbat/coveragepy/issues/573
self.make_file("테스트/program.py", "a = 1")
with change_dir("테스트"):
cov = coverage.Coverage()
self.start_import_stop(cov, "program")
cov.xml_report()
def unbackslash(v):
"""Find strings in `v`, and replace backslashes with slashes throughout."""
if isinstance(v, (tuple, list)):
return [unbackslash(vv) for vv in v]
elif isinstance(v, dict):
return {k: unbackslash(vv) for k, vv in v.items()}
else:
assert isinstance(v, str)
return v.replace("\\", "/")
class XmlPackageStructureTest(XmlTestHelpers, CoverageTest):
"""Tests about the package structure reported in the coverage.xml file."""
def package_and_class_tags(self, cov):
"""Run an XML report on `cov`, and get the package and class tags."""
cov.xml_report()
dom = ElementTree.parse("coverage.xml")
for node in dom.iter():
if node.tag in ('package', 'class'):
yield (node.tag, {a:v for a,v in node.items() if a in ('name', 'filename')})
def assert_package_and_class_tags(self, cov, result):
"""Check the XML package and class tags from `cov` match `result`."""
self.assertEqual(
unbackslash(list(self.package_and_class_tags(cov))),
unbackslash(result),
)
def test_package_names(self):
self.make_tree(width=1, depth=3)
self.make_file("main.py", """\
from d0.d0 import f0
""")
cov = coverage.Coverage(source=".")
self.start_import_stop(cov, "main")
self.assert_package_and_class_tags(cov, [
('package', {'name': "."}),
('class', {'filename': "main.py", 'name': "main.py"}),
('package', {'name': "d0"}),
('class', {'filename': "d0/__init__.py", 'name': "__init__.py"}),
('class', {'filename': "d0/f0.py", 'name': "f0.py"}),
('package', {'name': "d0.d0"}),
('class', {'filename': "d0/d0/__init__.py", 'name': "__init__.py"}),
('class', {'filename': "d0/d0/f0.py", 'name': "f0.py"}),
])
def test_package_depth_1(self):
self.make_tree(width=1, depth=4)
self.make_file("main.py", """\
from d0.d0 import f0
""")
cov = coverage.Coverage(source=".")
self.start_import_stop(cov, "main")
cov.set_option("xml:package_depth", 1)
self.assert_package_and_class_tags(cov, [
('package', {'name': "."}),
('class', {'filename': "main.py", 'name': "main.py"}),
('package', {'name': "d0"}),
('class', {'filename': "d0/__init__.py", 'name': "__init__.py"}),
('class', {'filename': "d0/d0/__init__.py", 'name': "d0/__init__.py"}),
('class', {'filename': "d0/d0/d0/__init__.py", 'name': "d0/d0/__init__.py"}),
('class', {'filename': "d0/d0/d0/f0.py", 'name': "d0/d0/f0.py"}),
('class', {'filename': "d0/d0/f0.py", 'name': "d0/f0.py"}),
('class', {'filename': "d0/f0.py", 'name': "f0.py"}),
])
def test_package_depth_2(self):
self.make_tree(width=1, depth=4)
self.make_file("main.py", """\
from d0.d0 import f0
""")
cov = coverage.Coverage(source=".")
self.start_import_stop(cov, "main")
cov.set_option("xml:package_depth", 2)
self.assert_package_and_class_tags(cov, [
('package', {'name': "."}),
('class', {'filename': "main.py", 'name': "main.py"}),
('package', {'name': "d0"}),
('class', {'filename': "d0/__init__.py", 'name': "__init__.py"}),
('class', {'filename': "d0/f0.py", 'name': "f0.py"}),
('package', {'name': "d0.d0"}),
('class', {'filename': "d0/d0/__init__.py", 'name': "__init__.py"}),
('class', {'filename': "d0/d0/d0/__init__.py", 'name': "d0/__init__.py"}),
('class', {'filename': "d0/d0/d0/f0.py", 'name': "d0/f0.py"}),
('class', {'filename': "d0/d0/f0.py", 'name': "f0.py"}),
])
def test_package_depth_3(self):
self.make_tree(width=1, depth=4)
self.make_file("main.py", """\
from d0.d0 import f0
""")
cov = coverage.Coverage(source=".")
self.start_import_stop(cov, "main")
cov.set_option("xml:package_depth", 3)
self.assert_package_and_class_tags(cov, [
('package', {'name': "."}),
('class', {'filename': "main.py", 'name': "main.py"}),
('package', {'name': "d0"}),
('class', {'filename': "d0/__init__.py", 'name': "__init__.py"}),
('class', {'filename': "d0/f0.py", 'name': "f0.py"}),
('package', {'name': "d0.d0"}),
('class', {'filename': "d0/d0/__init__.py", 'name': "__init__.py"}),
('class', {'filename': "d0/d0/f0.py", 'name': "f0.py"}),
('package', {'name': "d0.d0.d0"}),
('class', {'filename': "d0/d0/d0/__init__.py", 'name': "__init__.py"}),
('class', {'filename': "d0/d0/d0/f0.py", 'name': "f0.py"}),
])
def test_source_prefix(self):
# https://github.com/nedbat/coveragepy/issues/465
# https://github.com/nedbat/coveragepy/issues/526
self.make_file("src/mod.py", "print(17)")
cov = coverage.Coverage(source=["src"])
self.start_import_stop(cov, "mod", modfile="src/mod.py")
self.assert_package_and_class_tags(cov, [
('package', {'name': "."}),
('class', {'filename': "mod.py", 'name': "mod.py"}),
])
dom = ElementTree.parse("coverage.xml")
self.assert_source(dom, "src")
def test_relative_source(self):
self.make_file("src/mod.py", "print(17)")
cov = coverage.Coverage(source=["src"])
cov.set_option("run:relative_files", True)
self.start_import_stop(cov, "mod", modfile="src/mod.py")
cov.xml_report()
with open("coverage.xml") as x:
print(x.read())
dom = ElementTree.parse("coverage.xml")
elts = dom.findall(".//sources/source")
assert [elt.text for elt in elts] == ["src"]
def compare_xml(expected, actual, **kwargs):
"""Specialized compare function for our XML files."""
source_path = coverage.files.relative_directory().rstrip(r"\/")
scrubs=[
(r' timestamp="\d+"', ' timestamp="TIMESTAMP"'),
(r' version="[-.\w]+"', ' version="VERSION"'),
(r'<source>\s*.*?\s*</source>', '<source>%s</source>' % re.escape(source_path)),
(r'/coverage.readthedocs.io/?[-.\w/]*', '/coverage.readthedocs.io/VER'),
]
compare(expected, actual, scrubs=scrubs, **kwargs)
class XmlGoldTest(CoverageTest):
"""Tests of XML reporting that use gold files."""
def test_a_xml_1(self):
self.make_file("a.py", """\
if 1 < 2:
# Needed a < to look at HTML entities.
a = 3
else:
a = 4
""")
cov = coverage.Coverage()
a = self.start_import_stop(cov, "a")
cov.xml_report(a, outfile="coverage.xml")
compare_xml(gold_path("xml/x_xml"), ".", actual_extra=True)
def test_a_xml_2(self):
self.make_file("a.py", """\
if 1 < 2:
# Needed a < to look at HTML entities.
a = 3
else:
a = 4
""")
self.make_file("run_a_xml_2.ini", """\
# Put all the XML output in xml_2
[xml]
output = xml_2/coverage.xml
""")
cov = coverage.Coverage(config_file="run_a_xml_2.ini")
a = self.start_import_stop(cov, "a")
cov.xml_report(a)
compare_xml(gold_path("xml/x_xml"), "xml_2")
def test_y_xml_branch(self):
self.make_file("y.py", """\
def choice(x):
if x < 2:
return 3
else:
return 4
assert choice(1) == 3
""")
cov = coverage.Coverage(branch=True)
y = self.start_import_stop(cov, "y")
cov.xml_report(y, outfile="y_xml_branch/coverage.xml")
compare_xml(gold_path("xml/y_xml_branch"), "y_xml_branch")
|
import os
import json
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from fluentd import FluentdCollector
dirname = os.path.dirname(__file__)
fixtures_path = os.path.join(dirname, 'fixtures/')
class TestFluentdCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('FluentdCollector', {
'interval': 10,
'collect': {
'kinesis': [
'buffer_queue_length',
'buffer_total_queued_size',
'retry_count'
]
}
})
self.collector = FluentdCollector(config, None)
def test_import(self):
self.assertTrue(FluentdCollector)
def test_api_output_parse(self):
f = open(os.path.join(fixtures_path, "example.stat")).read()
stat = json.loads(f)
self.assertTrue(len(self.collector.parse_api_output(stat)) is 3)
def test_api_output_parse_empty(self):
f = open(os.path.join(fixtures_path, "example_empty.stat")).read()
stat = json.loads(f)
self.assertTrue(len(self.collector.parse_api_output(stat)) is 0)
if __name__ == "__main__":
unittest.main()
|
import fnmatch
import re
from typing import Callable, Dict, List, Pattern
import voluptuous as vol
from homeassistant.const import CONF_DOMAINS, CONF_ENTITIES, CONF_EXCLUDE, CONF_INCLUDE
from homeassistant.core import split_entity_id
from homeassistant.helpers import config_validation as cv
CONF_INCLUDE_DOMAINS = "include_domains"
CONF_INCLUDE_ENTITY_GLOBS = "include_entity_globs"
CONF_INCLUDE_ENTITIES = "include_entities"
CONF_EXCLUDE_DOMAINS = "exclude_domains"
CONF_EXCLUDE_ENTITY_GLOBS = "exclude_entity_globs"
CONF_EXCLUDE_ENTITIES = "exclude_entities"
CONF_ENTITY_GLOBS = "entity_globs"
def convert_filter(config: Dict[str, List[str]]) -> Callable[[str], bool]:
"""Convert the filter schema into a filter."""
filt = generate_filter(
config[CONF_INCLUDE_DOMAINS],
config[CONF_INCLUDE_ENTITIES],
config[CONF_EXCLUDE_DOMAINS],
config[CONF_EXCLUDE_ENTITIES],
config[CONF_INCLUDE_ENTITY_GLOBS],
config[CONF_EXCLUDE_ENTITY_GLOBS],
)
setattr(filt, "config", config)
setattr(filt, "empty_filter", sum(len(val) for val in config.values()) == 0)
return filt
BASE_FILTER_SCHEMA = vol.Schema(
{
vol.Optional(CONF_EXCLUDE_DOMAINS, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(CONF_EXCLUDE_ENTITY_GLOBS, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(CONF_EXCLUDE_ENTITIES, default=[]): cv.entity_ids,
vol.Optional(CONF_INCLUDE_DOMAINS, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(CONF_INCLUDE_ENTITY_GLOBS, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(CONF_INCLUDE_ENTITIES, default=[]): cv.entity_ids,
}
)
FILTER_SCHEMA = vol.All(BASE_FILTER_SCHEMA, convert_filter)
def convert_include_exclude_filter(
config: Dict[str, Dict[str, List[str]]]
) -> Callable[[str], bool]:
"""Convert the include exclude filter schema into a filter."""
include = config[CONF_INCLUDE]
exclude = config[CONF_EXCLUDE]
filt = convert_filter(
{
CONF_INCLUDE_DOMAINS: include[CONF_DOMAINS],
CONF_INCLUDE_ENTITY_GLOBS: include[CONF_ENTITY_GLOBS],
CONF_INCLUDE_ENTITIES: include[CONF_ENTITIES],
CONF_EXCLUDE_DOMAINS: exclude[CONF_DOMAINS],
CONF_EXCLUDE_ENTITY_GLOBS: exclude[CONF_ENTITY_GLOBS],
CONF_EXCLUDE_ENTITIES: exclude[CONF_ENTITIES],
}
)
setattr(filt, "config", config)
return filt
INCLUDE_EXCLUDE_FILTER_SCHEMA_INNER = vol.Schema(
{
vol.Optional(CONF_DOMAINS, default=[]): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_ENTITY_GLOBS, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(CONF_ENTITIES, default=[]): cv.entity_ids,
}
)
INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA = vol.Schema(
{
vol.Optional(
CONF_INCLUDE, default=INCLUDE_EXCLUDE_FILTER_SCHEMA_INNER({})
): INCLUDE_EXCLUDE_FILTER_SCHEMA_INNER,
vol.Optional(
CONF_EXCLUDE, default=INCLUDE_EXCLUDE_FILTER_SCHEMA_INNER({})
): INCLUDE_EXCLUDE_FILTER_SCHEMA_INNER,
}
)
INCLUDE_EXCLUDE_FILTER_SCHEMA = vol.All(
INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA, convert_include_exclude_filter
)
def _glob_to_re(glob: str) -> Pattern[str]:
"""Translate and compile glob string into pattern."""
return re.compile(fnmatch.translate(glob))
def _test_against_patterns(patterns: List[Pattern[str]], entity_id: str) -> bool:
"""Test entity against list of patterns, true if any match."""
for pattern in patterns:
if pattern.match(entity_id):
return True
return False
# It's safe since we don't modify it. And None causes typing warnings
# pylint: disable=dangerous-default-value
def generate_filter(
include_domains: List[str],
include_entities: List[str],
exclude_domains: List[str],
exclude_entities: List[str],
include_entity_globs: List[str] = [],
exclude_entity_globs: List[str] = [],
) -> Callable[[str], bool]:
"""Return a function that will filter entities based on the args."""
include_d = set(include_domains)
include_e = set(include_entities)
exclude_d = set(exclude_domains)
exclude_e = set(exclude_entities)
include_eg_set = set(include_entity_globs)
exclude_eg_set = set(exclude_entity_globs)
include_eg = list(map(_glob_to_re, include_eg_set))
exclude_eg = list(map(_glob_to_re, exclude_eg_set))
have_exclude = bool(exclude_e or exclude_d or exclude_eg)
have_include = bool(include_e or include_d or include_eg)
def entity_included(domain: str, entity_id: str) -> bool:
"""Return true if entity matches inclusion filters."""
return (
entity_id in include_e
or domain in include_d
or bool(include_eg and _test_against_patterns(include_eg, entity_id))
)
def entity_excluded(domain: str, entity_id: str) -> bool:
"""Return true if entity matches exclusion filters."""
return (
entity_id in exclude_e
or domain in exclude_d
or bool(exclude_eg and _test_against_patterns(exclude_eg, entity_id))
)
# Case 1 - no includes or excludes - pass all entities
if not have_include and not have_exclude:
return lambda entity_id: True
# Case 2 - includes, no excludes - only include specified entities
if have_include and not have_exclude:
def entity_filter_2(entity_id: str) -> bool:
"""Return filter function for case 2."""
domain = split_entity_id(entity_id)[0]
return entity_included(domain, entity_id)
return entity_filter_2
# Case 3 - excludes, no includes - only exclude specified entities
if not have_include and have_exclude:
def entity_filter_3(entity_id: str) -> bool:
"""Return filter function for case 3."""
domain = split_entity_id(entity_id)[0]
return not entity_excluded(domain, entity_id)
return entity_filter_3
# Case 4 - both includes and excludes specified
# Case 4a - include domain or glob specified
# - if domain is included, pass if entity not excluded
# - if glob is included, pass if entity and domain not excluded
# - if domain and glob are not included, pass if entity is included
# note: if both include domain matches then exclude domains ignored.
# If glob matches then exclude domains and glob checked
if include_d or include_eg:
def entity_filter_4a(entity_id: str) -> bool:
"""Return filter function for case 4a."""
domain = split_entity_id(entity_id)[0]
if domain in include_d:
return not (
entity_id in exclude_e
or bool(
exclude_eg and _test_against_patterns(exclude_eg, entity_id)
)
)
if _test_against_patterns(include_eg, entity_id):
return not entity_excluded(domain, entity_id)
return entity_id in include_e
return entity_filter_4a
# Case 4b - exclude domain or glob specified, include has no domain or glob
# In this one case the traditional include logic is inverted. Even though an
# include is specified since its only a list of entity IDs its used only to
# expose specific entities excluded by domain or glob. Any entities not
# excluded are then presumed included. Logic is as follows
# - if domain or glob is excluded, pass if entity is included
# - if domain is not excluded, pass if entity not excluded by ID
if exclude_d or exclude_eg:
def entity_filter_4b(entity_id: str) -> bool:
"""Return filter function for case 4b."""
domain = split_entity_id(entity_id)[0]
if domain in exclude_d or (
exclude_eg and _test_against_patterns(exclude_eg, entity_id)
):
return entity_id in include_e
return entity_id not in exclude_e
return entity_filter_4b
# Case 4c - neither include or exclude domain specified
# - Only pass if entity is included. Ignore entity excludes.
return lambda entity_id: entity_id in include_e
|
from shop.admin.order import BaseOrderAdmin, OrderPaymentInline
class OrderAdmin(BaseOrderAdmin):
"""
Admin class to be used for Order model :class:`shop.models.defaults.order`
"""
def get_fields(self, request, obj=None):
fields = list(super().get_fields(request, obj))
fields.extend(['shipping_address_text', 'billing_address_text'])
return fields
def get_readonly_fields(self, request, obj=None):
readonly_fields = list(super().get_readonly_fields(request, obj))
readonly_fields.extend(['shipping_address_text', 'billing_address_text'])
return readonly_fields
def get_search_fields(self, request):
search_fields = list(super().get_search_fields(request))
search_fields.extend(['number', 'shipping_address_text', 'billing_address_text'])
return search_fields
def get_inline_instances(self, request, obj=None):
inline_instances = list(super().get_inline_instances(request, obj))
inline_instances.append(OrderPaymentInline(self.model, self.admin_site))
return inline_instances
|
import argparse
import copy
import multiprocessing
import numpy as np
import chainer
import chainer.functions as F
import chainer.links as L
from chainer import training
from chainer.training import extensions
from chainer.training.extensions import PolynomialShift
from chainercv.datasets import ade20k_semantic_segmentation_label_names
from chainercv.datasets import ADE20KSemanticSegmentationDataset
from chainercv.datasets import cityscapes_semantic_segmentation_label_names
from chainercv.datasets import CityscapesSemanticSegmentationDataset
from chainercv.experimental.links import PSPNetResNet101
from chainercv.experimental.links import PSPNetResNet50
from chainercv.chainer_experimental.datasets.sliceable import TransformDataset
from chainercv.extensions import SemanticSegmentationEvaluator
from chainercv.links import Conv2DBNActiv
from chainercv import transforms
from chainercv.links.model.ssd import GradientScaling
import PIL
import chainermn
# https://docs.chainer.org/en/stable/tips.html#my-training-process-gets-stuck-when-using-multiprocessiterator
try:
import cv2
cv2.setNumThreads(0)
except ImportError:
pass
def create_mnbn_model(link, comm):
"""Returns a copy of a model with BN replaced by Multi-node BN."""
if isinstance(link, chainer.links.BatchNormalization):
mnbn = chainermn.links.MultiNodeBatchNormalization(
size=link.avg_mean.shape,
comm=comm,
decay=link.decay,
eps=link.eps,
dtype=link.avg_mean.dtype,
use_gamma=hasattr(link, 'gamma'),
use_beta=hasattr(link, 'beta'),
)
mnbn.copyparams(link)
for name in link._persistent:
mnbn.__dict__[name] = copy.deepcopy(link.__dict__[name])
return mnbn
elif isinstance(link, chainer.Chain):
new_children = [
(child_name, create_mnbn_model(
link.__dict__[child_name], comm))
for child_name in link._children
]
new_link = copy.deepcopy(link)
for name, new_child in new_children:
new_link.__dict__[name] = new_child
return new_link
elif isinstance(link, chainer.ChainList):
new_children = [
create_mnbn_model(l, comm) for l in link]
new_link = copy.deepcopy(link)
for i, new_child in enumerate(new_children):
new_link._children[i] = new_child
return new_link
else:
return copy.deepcopy(link)
class Transform(object):
def __init__(
self, mean,
crop_size, scale_range=[0.5, 2.0]):
self.mean = mean
self.scale_range = scale_range
self.crop_size = crop_size
def __call__(self, in_data):
img, label = in_data
_, H, W = img.shape
scale = np.random.uniform(self.scale_range[0], self.scale_range[1])
# Scale
scaled_H = int(scale * H)
scaled_W = int(scale * W)
img = transforms.resize(img, (scaled_H, scaled_W), PIL.Image.BICUBIC)
label = transforms.resize(
label[None], (scaled_H, scaled_W), PIL.Image.NEAREST)[0]
# Crop
if (scaled_H < self.crop_size[0]) or (scaled_W < self.crop_size[1]):
shorter_side = min(img.shape[1:])
img, param = transforms.random_crop(
img, (shorter_side, shorter_side), True)
else:
img, param = transforms.random_crop(img, self.crop_size, True)
label = label[param['y_slice'], param['x_slice']]
# Rotate
angle = np.random.uniform(-10, 10)
img = transforms.rotate(img, angle, expand=False)
label = transforms.rotate(
label[None], angle, expand=False,
interpolation=PIL.Image.NEAREST,
fill=-1)[0]
# Resize
if ((img.shape[1] < self.crop_size[0])
or (img.shape[2] < self.crop_size[1])):
img = transforms.resize(img, self.crop_size, PIL.Image.BICUBIC)
if ((label.shape[0] < self.crop_size[0])
or (label.shape[1] < self.crop_size[1])):
label = transforms.resize(
label[None].astype(np.float32),
self.crop_size, PIL.Image.NEAREST)
label = label.astype(np.int32)[0]
# Horizontal flip
if np.random.rand() > 0.5:
img = transforms.flip(img, x_flip=True)
label = transforms.flip(label[None], x_flip=True)[0]
# Mean subtraction
img = img - self.mean
return img, label
class TrainChain(chainer.Chain):
def __init__(self, model):
initialW = chainer.initializers.HeNormal()
super(TrainChain, self).__init__()
with self.init_scope():
self.model = model
self.aux_conv1 = Conv2DBNActiv(
None, 512, 3, 1, 1, initialW=initialW)
self.aux_conv2 = L.Convolution2D(
None, model.n_class, 3, 1, 1, False, initialW=initialW)
def forward(self, imgs, labels):
h_aux, h_main = self.model.extractor(imgs)
h_aux = F.dropout(self.aux_conv1(h_aux), ratio=0.1)
h_aux = self.aux_conv2(h_aux)
h_aux = F.resize_images(h_aux, imgs.shape[2:])
h_main = self.model.ppm(h_main)
h_main = F.dropout(self.model.head_conv1(h_main), ratio=0.1)
h_main = self.model.head_conv2(h_main)
h_main = F.resize_images(h_main, imgs.shape[2:])
aux_loss = F.softmax_cross_entropy(h_aux, labels)
main_loss = F.softmax_cross_entropy(h_main, labels)
loss = 0.4 * aux_loss + main_loss
chainer.reporter.report({'loss': loss}, self)
return loss
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--data-dir', default='auto')
parser.add_argument('--dataset',
choices=('ade20k', 'cityscapes'))
parser.add_argument('--model',
choices=('pspnet_resnet101', 'pspnet_resnet50'))
parser.add_argument('--lr', default=1e-2, type=float)
parser.add_argument('--batchsize', default=2, type=int)
parser.add_argument('--out', default='result')
parser.add_argument('--iteration', default=None, type=int)
parser.add_argument('--communicator', default='pure_nccl')
args = parser.parse_args()
dataset_cfgs = {
'ade20k': {
'input_size': (473, 473),
'label_names': ade20k_semantic_segmentation_label_names,
'iteration': 150000},
'cityscapes': {
'input_size': (713, 713),
'label_names': cityscapes_semantic_segmentation_label_names,
'iteration': 90000}
}
dataset_cfg = dataset_cfgs[args.dataset]
# https://docs.chainer.org/en/stable/chainermn/tutorial/tips_faqs.html#using-multiprocessiterator
if hasattr(multiprocessing, 'set_start_method'):
multiprocessing.set_start_method('forkserver')
p = multiprocessing.Process()
p.start()
p.join()
comm = chainermn.create_communicator(args.communicator)
device = comm.intra_rank
n_class = len(dataset_cfg['label_names'])
if args.model == 'pspnet_resnet101':
model = PSPNetResNet101(
n_class, pretrained_model='imagenet',
input_size=dataset_cfg['input_size'])
elif args.model == 'pspnet_resnet50':
model = PSPNetResNet50(
n_class, pretrained_model='imagenet',
input_size=dataset_cfg['input_size'])
train_chain = create_mnbn_model(TrainChain(model), comm)
model = train_chain.model
if device >= 0:
chainer.cuda.get_device_from_id(device).use()
train_chain.to_gpu()
if args.iteration is None:
n_iter = dataset_cfg['iteration']
else:
n_iter = args.iteration
if args.dataset == 'ade20k':
train = ADE20KSemanticSegmentationDataset(
data_dir=args.data_dir, split='train')
if comm.rank == 0:
val = ADE20KSemanticSegmentationDataset(
data_dir=args.data_dir, split='val')
label_names = ade20k_semantic_segmentation_label_names
elif args.dataset == 'cityscapes':
train = CityscapesSemanticSegmentationDataset(
args.data_dir,
label_resolution='fine', split='train')
if comm.rank == 0:
val = CityscapesSemanticSegmentationDataset(
args.data_dir,
label_resolution='fine', split='val')
label_names = cityscapes_semantic_segmentation_label_names
train = TransformDataset(
train,
('img', 'label'),
Transform(model.mean, dataset_cfg['input_size']))
if comm.rank == 0:
indices = np.arange(len(train))
else:
indices = None
indices = chainermn.scatter_dataset(indices, comm, shuffle=True)
train = train.slice[indices]
train_iter = chainer.iterators.MultiprocessIterator(
train, batch_size=args.batchsize, n_processes=2)
optimizer = chainermn.create_multi_node_optimizer(
chainer.optimizers.MomentumSGD(args.lr, 0.9), comm)
optimizer.setup(train_chain)
for param in train_chain.params():
if param.name not in ('beta', 'gamma'):
param.update_rule.add_hook(chainer.optimizer.WeightDecay(1e-4))
for l in [
model.ppm, model.head_conv1, model.head_conv2,
train_chain.aux_conv1, train_chain.aux_conv2]:
for param in l.params():
param.update_rule.add_hook(GradientScaling(10))
updater = training.updaters.StandardUpdater(
train_iter, optimizer, device=device)
trainer = training.Trainer(updater, (n_iter, 'iteration'), args.out)
trainer.extend(
PolynomialShift('lr', 0.9, n_iter, optimizer=optimizer),
trigger=(1, 'iteration'))
log_interval = 10, 'iteration'
if comm.rank == 0:
trainer.extend(extensions.LogReport(trigger=log_interval))
trainer.extend(extensions.observe_lr(), trigger=log_interval)
trainer.extend(extensions.PrintReport(
['epoch', 'iteration', 'elapsed_time', 'lr', 'main/loss',
'validation/main/miou', 'validation/main/mean_class_accuracy',
'validation/main/pixel_accuracy']),
trigger=log_interval)
trainer.extend(extensions.ProgressBar(update_interval=10))
trainer.extend(
extensions.snapshot_object(
train_chain.model, 'snapshot_model_{.updater.iteration}.npz'),
trigger=(n_iter, 'iteration'))
val_iter = chainer.iterators.SerialIterator(
val, batch_size=1, repeat=False, shuffle=False)
trainer.extend(
SemanticSegmentationEvaluator(
val_iter, model,
label_names),
trigger=(n_iter, 'iteration'))
trainer.run()
if __name__ == '__main__':
main()
|
from unittest import TestCase
from scattertext.AsianNLP import chinese_nlp, japanese_nlp
class TestAsianNLP(TestCase):
def setUp(self):
self.chinese_text = u'''总理主持召开的这场座谈会,旨在听取专家学者和企业界人士对《政府工作报告(征求意见稿)》的意见建议。胡玮炜和另外6名不同专业、领域的专家、企业家受邀参加。\n李克强对胡玮炜的细致提问始终围绕虚拟经济和实体经济,以及新旧动能转换。他关心自行车制造材料,也询问所采用的互联网技术。 “摩拜单车听起来是经营方式的革命,但基础还是自行车,还是要靠实体经济支撑。反过来,实体经济也要靠服务变革来带动。”总理指出。\n当听到这家共享智能单车企业在不到一年的时间发展为拥有80万辆自行车的规模后,李克强充分肯定此类互联网企业对实体经济的带动作用,“某个自行车企业可能就被你带活了,新兴服务业的发展给制造业创造了巨大的市场空间”。\n相关资料显示,受益于共享单车,国内传统的自行车产业正在迎来春天,至少带动了160万辆以上自行车的制造生产。甚至有生产自行车零部件的上市公司因此股票涨停。美国彭博新闻社网站注意到这一现象,评价说“中国正重新成为自行车大国”。'''
self.japanese_text = u'''(淸實《きよざね》)私共《わたくしども》は、唯《たゞ》君《きみ》の仰《おほ》せのままに、此處《こゝ》までお供《とも》致《いた》して參《まゐ》つたのでござります。丁度《ちやうど》今日《けふ》の午頃《ひるごろ》のこと、わが君《きみ》には急《きふ》に靑褪《あをざ》めた顏《かほ》をなすつて、「都《みやこ》に居《ゐ》ては命《いのち》が危《あやう》い故《ゆゑ》、一刻《いつこく》も早《はや》くわしを何處《どこ》かの山奧《やまおく》へ伴《つ》れて行《い》つて、隱《かく》してくれい。」と仰《おつ》しやりました。それで私共《わたくしども》は取《と》る物《もの》も取《と》り敢《あ》へず、深《ふか》い仔細《しさい》も承《うけたまは》らずに、君《きみ》をお伴《つ》れ申《まを》して、一《ひ》と先《ま》づ田原《たはら》の奧《おく》の大道寺《だいだうじ》の所領《しよりやう》まで逃《に》げのびたのでござりました。すると君《きみ》には、「いや、まだ此處《こゝ》では安心《あんしん》が出來《でき》ない。もつと人里《ひとざと》を離《はな》れた、もつと寂《さび》しい處《ところ》へ行《ゆ》かねばならぬ。」と仰《おつ》しやつて、たうとうこんな山奧《やまおく》へ參《まゐ》つたのでござります。'''
def test_chinese(self):
try:
doc = chinese_nlp(self.chinese_text)
except:
return
sent1 = doc.sents[0]
self.assertEqual(str(sent1), u'总理 主持 召开 的 这场 座谈会 , 旨在 听取 专家学者 和 企业界 人士 对 《 政府 工作 报告 ( 征求意见 稿 ) 》 的 意见建议 。')
self.assertEqual(len(doc.sents), 11)
def test_japanese(self):
try:
__import__('tinysegmenter')
except ImportError:
return
doc = japanese_nlp(self.japanese_text)
sent1 = doc.sents[0]
self.assertGreater(len(str(sent1)), 10)
self.assertEqual(len(doc.sents), 7)
|
from datetime import timedelta
import logging
from typing import Any, Callable, Dict, List, Optional
from sonarr import Sonarr, SonarrConnectionError, SonarrError
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import DATA_GIGABYTES
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.typing import HomeAssistantType
import homeassistant.util.dt as dt_util
from . import SonarrEntity
from .const import CONF_UPCOMING_DAYS, CONF_WANTED_MAX_ITEMS, DATA_SONARR, DOMAIN
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(
hass: HomeAssistantType,
entry: ConfigEntry,
async_add_entities: Callable[[List[Entity], bool], None],
) -> None:
"""Set up Sonarr sensors based on a config entry."""
options = entry.options
sonarr = hass.data[DOMAIN][entry.entry_id][DATA_SONARR]
entities = [
SonarrCommandsSensor(sonarr, entry.entry_id),
SonarrDiskspaceSensor(sonarr, entry.entry_id),
SonarrQueueSensor(sonarr, entry.entry_id),
SonarrSeriesSensor(sonarr, entry.entry_id),
SonarrUpcomingSensor(sonarr, entry.entry_id, days=options[CONF_UPCOMING_DAYS]),
SonarrWantedSensor(
sonarr, entry.entry_id, max_items=options[CONF_WANTED_MAX_ITEMS]
),
]
async_add_entities(entities, True)
def sonarr_exception_handler(func):
"""Decorate Sonarr calls to handle Sonarr exceptions.
A decorator that wraps the passed in function, catches Sonarr errors,
and handles the availability of the entity.
"""
async def handler(self, *args, **kwargs):
try:
await func(self, *args, **kwargs)
self.last_update_success = True
except SonarrConnectionError as error:
if self.available:
_LOGGER.error("Error communicating with API: %s", error)
self.last_update_success = False
except SonarrError as error:
if self.available:
_LOGGER.error("Invalid response from API: %s", error)
self.last_update_success = False
return handler
class SonarrSensor(SonarrEntity):
"""Implementation of the Sonarr sensor."""
def __init__(
self,
*,
sonarr: Sonarr,
entry_id: str,
enabled_default: bool = True,
icon: str,
key: str,
name: str,
unit_of_measurement: Optional[str] = None,
) -> None:
"""Initialize Sonarr sensor."""
self._unit_of_measurement = unit_of_measurement
self._key = key
self._unique_id = f"{entry_id}_{key}"
self.last_update_success = False
super().__init__(
sonarr=sonarr,
entry_id=entry_id,
device_id=entry_id,
name=name,
icon=icon,
enabled_default=enabled_default,
)
@property
def unique_id(self) -> str:
"""Return the unique ID for this sensor."""
return self._unique_id
@property
def available(self) -> bool:
"""Return sensor availability."""
return self.last_update_success
@property
def unit_of_measurement(self) -> str:
"""Return the unit this state is expressed in."""
return self._unit_of_measurement
class SonarrCommandsSensor(SonarrSensor):
"""Defines a Sonarr Commands sensor."""
def __init__(self, sonarr: Sonarr, entry_id: str) -> None:
"""Initialize Sonarr Commands sensor."""
self._commands = []
super().__init__(
sonarr=sonarr,
entry_id=entry_id,
icon="mdi:code-braces",
key="commands",
name=f"{sonarr.app.info.app_name} Commands",
unit_of_measurement="Commands",
enabled_default=False,
)
@sonarr_exception_handler
async def async_update(self) -> None:
"""Update entity."""
self._commands = await self.sonarr.commands()
@property
def device_state_attributes(self) -> Optional[Dict[str, Any]]:
"""Return the state attributes of the entity."""
attrs = {}
for command in self._commands:
attrs[command.name] = command.state
return attrs
@property
def state(self) -> int:
"""Return the state of the sensor."""
return len(self._commands)
class SonarrDiskspaceSensor(SonarrSensor):
"""Defines a Sonarr Disk Space sensor."""
def __init__(self, sonarr: Sonarr, entry_id: str) -> None:
"""Initialize Sonarr Disk Space sensor."""
self._disks = []
self._total_free = 0
super().__init__(
sonarr=sonarr,
entry_id=entry_id,
icon="mdi:harddisk",
key="diskspace",
name=f"{sonarr.app.info.app_name} Disk Space",
unit_of_measurement=DATA_GIGABYTES,
enabled_default=False,
)
@sonarr_exception_handler
async def async_update(self) -> None:
"""Update entity."""
app = await self.sonarr.update()
self._disks = app.disks
self._total_free = sum([disk.free for disk in self._disks])
@property
def device_state_attributes(self) -> Optional[Dict[str, Any]]:
"""Return the state attributes of the entity."""
attrs = {}
for disk in self._disks:
free = disk.free / 1024 ** 3
total = disk.total / 1024 ** 3
usage = free / total * 100
attrs[
disk.path
] = f"{free:.2f}/{total:.2f}{self._unit_of_measurement} ({usage:.2f}%)"
return attrs
@property
def state(self) -> str:
"""Return the state of the sensor."""
free = self._total_free / 1024 ** 3
return f"{free:.2f}"
class SonarrQueueSensor(SonarrSensor):
"""Defines a Sonarr Queue sensor."""
def __init__(self, sonarr: Sonarr, entry_id: str) -> None:
"""Initialize Sonarr Queue sensor."""
self._queue = []
super().__init__(
sonarr=sonarr,
entry_id=entry_id,
icon="mdi:download",
key="queue",
name=f"{sonarr.app.info.app_name} Queue",
unit_of_measurement="Episodes",
enabled_default=False,
)
@sonarr_exception_handler
async def async_update(self) -> None:
"""Update entity."""
self._queue = await self.sonarr.queue()
@property
def device_state_attributes(self) -> Optional[Dict[str, Any]]:
"""Return the state attributes of the entity."""
attrs = {}
for item in self._queue:
remaining = 1 if item.size == 0 else item.size_remaining / item.size
remaining_pct = 100 * (1 - remaining)
name = f"{item.episode.series.title} {item.episode.identifier}"
attrs[name] = f"{remaining_pct:.2f}%"
return attrs
@property
def state(self) -> int:
"""Return the state of the sensor."""
return len(self._queue)
class SonarrSeriesSensor(SonarrSensor):
"""Defines a Sonarr Series sensor."""
def __init__(self, sonarr: Sonarr, entry_id: str) -> None:
"""Initialize Sonarr Series sensor."""
self._items = []
super().__init__(
sonarr=sonarr,
entry_id=entry_id,
icon="mdi:television",
key="series",
name=f"{sonarr.app.info.app_name} Shows",
unit_of_measurement="Series",
enabled_default=False,
)
@sonarr_exception_handler
async def async_update(self) -> None:
"""Update entity."""
self._items = await self.sonarr.series()
@property
def device_state_attributes(self) -> Optional[Dict[str, Any]]:
"""Return the state attributes of the entity."""
attrs = {}
for item in self._items:
attrs[item.series.title] = f"{item.downloaded}/{item.episodes} Episodes"
return attrs
@property
def state(self) -> int:
"""Return the state of the sensor."""
return len(self._items)
class SonarrUpcomingSensor(SonarrSensor):
"""Defines a Sonarr Upcoming sensor."""
def __init__(self, sonarr: Sonarr, entry_id: str, days: int = 1) -> None:
"""Initialize Sonarr Upcoming sensor."""
self._days = days
self._upcoming = []
super().__init__(
sonarr=sonarr,
entry_id=entry_id,
icon="mdi:television",
key="upcoming",
name=f"{sonarr.app.info.app_name} Upcoming",
unit_of_measurement="Episodes",
)
async def async_added_to_hass(self):
"""Listen for signals."""
await super().async_added_to_hass()
self.async_on_remove(
async_dispatcher_connect(
self.hass,
f"sonarr.{self._entry_id}.entry_options_update",
self.async_update_entry_options,
)
)
@sonarr_exception_handler
async def async_update(self) -> None:
"""Update entity."""
local = dt_util.start_of_local_day().replace(microsecond=0)
start = dt_util.as_utc(local)
end = start + timedelta(days=self._days)
self._upcoming = await self.sonarr.calendar(
start=start.isoformat(), end=end.isoformat()
)
async def async_update_entry_options(self, options: dict) -> None:
"""Update sensor settings when config entry options are update."""
self._days = options[CONF_UPCOMING_DAYS]
@property
def device_state_attributes(self) -> Optional[Dict[str, Any]]:
"""Return the state attributes of the entity."""
attrs = {}
for episode in self._upcoming:
attrs[episode.series.title] = episode.identifier
return attrs
@property
def state(self) -> int:
"""Return the state of the sensor."""
return len(self._upcoming)
class SonarrWantedSensor(SonarrSensor):
"""Defines a Sonarr Wanted sensor."""
def __init__(self, sonarr: Sonarr, entry_id: str, max_items: int = 10) -> None:
"""Initialize Sonarr Wanted sensor."""
self._max_items = max_items
self._results = None
self._total: Optional[int] = None
super().__init__(
sonarr=sonarr,
entry_id=entry_id,
icon="mdi:television",
key="wanted",
name=f"{sonarr.app.info.app_name} Wanted",
unit_of_measurement="Episodes",
enabled_default=False,
)
async def async_added_to_hass(self):
"""Listen for signals."""
await super().async_added_to_hass()
self.async_on_remove(
async_dispatcher_connect(
self.hass,
f"sonarr.{self._entry_id}.entry_options_update",
self.async_update_entry_options,
)
)
@sonarr_exception_handler
async def async_update(self) -> None:
"""Update entity."""
self._results = await self.sonarr.wanted(page_size=self._max_items)
self._total = self._results.total
async def async_update_entry_options(self, options: dict) -> None:
"""Update sensor settings when config entry options are update."""
self._max_items = options[CONF_WANTED_MAX_ITEMS]
@property
def device_state_attributes(self) -> Optional[Dict[str, Any]]:
"""Return the state attributes of the entity."""
attrs = {}
if self._results is not None:
for episode in self._results.episodes:
name = f"{episode.series.title} {episode.identifier}"
attrs[name] = episode.airdate
return attrs
@property
def state(self) -> Optional[int]:
"""Return the state of the sensor."""
return self._total
|
from django.core.exceptions import ImproperlyConfigured
class PrefetchRelatedMixin(object):
"""
Mixin allow you to provides list of relation names
to be prefetching when the queryset is build.
"""
relation_names = None
def get_queryset(self):
"""
Check if relation_names is correctly set and
do a prefetch related on the queryset with it.
"""
if self.relation_names is None:
raise ImproperlyConfigured(
"'%s' must define 'relation_names'" %
self.__class__.__name__)
if not isinstance(self.relation_names, (tuple, list)):
raise ImproperlyConfigured(
"%s's relation_names property must be a tuple or list." %
self.__class__.__name__)
return super(PrefetchRelatedMixin, self
).get_queryset().prefetch_related(*self.relation_names)
class PrefetchCategoriesAuthorsMixin(PrefetchRelatedMixin):
"""
Mixin for prefetching categories and authors related
to the entries in the queryset.
"""
relation_names = ('categories', 'authors')
|
from django.urls import reverse
from weblate.trans.tests.test_views import ViewTestCase
class ChecksViewTest(ViewTestCase):
"""Testing of check views."""
def test_browse(self):
response = self.client.get(reverse("checks"))
self.assertContains(response, "/same/")
response = self.client.get(reverse("checks"), {"lang": "de"})
self.assertContains(response, "/same/")
response = self.client.get(reverse("checks"), {"project": self.project.slug})
self.assertContains(response, "/same/")
response = self.client.get(
reverse("checks"),
{"project": self.project.slug, "component": self.component.slug},
)
self.assertContains(response, "/same/")
def test_check(self):
response = self.client.get(reverse("show_check", kwargs={"name": "same"}))
self.assertContains(response, "/same/")
response = self.client.get(reverse("show_check", kwargs={"name": "ellipsis"}))
self.assertContains(response, "…")
response = self.client.get(
reverse("show_check", kwargs={"name": "not-existing"})
)
self.assertEqual(response.status_code, 404)
response = self.client.get(
reverse("show_check", kwargs={"name": "same"}),
{"project": self.project.slug},
)
self.assertRedirects(
response,
reverse(
"show_check_project",
kwargs={"name": "same", "project": self.project.slug},
),
)
response = self.client.get(
reverse("show_check", kwargs={"name": "same"}), {"lang": "de"}
)
self.assertContains(response, "/checks/same/test/?lang=de")
def test_project(self):
response = self.client.get(
reverse(
"show_check_project",
kwargs={"name": "same", "project": self.project.slug},
)
)
self.assertContains(response, "/same/")
response = self.client.get(
reverse(
"show_check_project",
kwargs={"name": "same", "project": self.project.slug},
),
{"lang": "cs"},
)
self.assertContains(response, "/same/")
response = self.client.get(
reverse(
"show_check_project",
kwargs={"name": "ellipsis", "project": self.project.slug},
)
)
self.assertContains(response, "…")
response = self.client.get(
reverse(
"show_check_project",
kwargs={"name": "non-existing", "project": self.project.slug},
)
)
self.assertEqual(response.status_code, 404)
def test_component(self):
response = self.client.get(
reverse(
"show_check_component",
kwargs={
"name": "same",
"project": self.project.slug,
"component": self.component.slug,
},
)
)
self.assertContains(response, "/same/")
response = self.client.get(
reverse(
"show_check_component",
kwargs={
"name": "multiple_failures",
"project": self.project.slug,
"component": self.component.slug,
},
)
)
self.assertContains(response, "/multiple_failures/")
response = self.client.get(
reverse(
"show_check_component",
kwargs={
"name": "non-existing",
"project": self.project.slug,
"component": self.component.slug,
},
)
)
self.assertEqual(response.status_code, 404)
|
from __future__ import absolute_import
import unittest
from xml.dom import pulldom
from xml.sax.handler import ContentHandler
from .common_imports import HelperTestCase, make_doctest, BytesIO, _bytes
from lxml import sax
class ETreeSaxTestCase(HelperTestCase):
def test_etree_sax_simple(self):
tree = self.parse('<a>ab<b/>ba</a>')
xml_out = self._saxify_serialize(tree)
self.assertEqual(_bytes('<a>ab<b/>ba</a>'),
xml_out)
def test_etree_sax_double(self):
tree = self.parse('<a>ab<b>bb</b>ba</a>')
xml_out = self._saxify_serialize(tree)
self.assertEqual(_bytes('<a>ab<b>bb</b>ba</a>'),
xml_out)
def test_etree_sax_comment(self):
tree = self.parse('<a>ab<!-- TEST -->ba</a>')
xml_out = self._saxify_serialize(tree)
self.assertEqual(_bytes('<a>abba</a>'),
xml_out)
def test_etree_sax_pi(self):
tree = self.parse('<a>ab<?this and that?>ba</a>')
xml_out = self._saxify_serialize(tree)
self.assertEqual(_bytes('<a>ab<?this and that?>ba</a>'),
xml_out)
def test_etree_sax_comment_root(self):
tree = self.parse('<!-- TEST --><a>ab</a>')
xml_out = self._saxify_serialize(tree)
self.assertEqual(_bytes('<a>ab</a>'),
xml_out)
def test_etree_sax_pi_root(self):
tree = self.parse('<?this and that?><a>ab</a>')
xml_out = self._saxify_serialize(tree)
self.assertEqual(_bytes('<?this and that?><a>ab</a>'),
xml_out)
def test_etree_sax_attributes(self):
tree = self.parse('<a aa="5">ab<b b="5"/>ba</a>')
xml_out = self._saxify_serialize(tree)
self.assertEqual(_bytes('<a aa="5">ab<b b="5"/>ba</a>'),
xml_out)
def test_etree_sax_ns1(self):
tree = self.parse('<a xmlns="bla">ab<b>bb</b>ba</a>')
new_tree = self._saxify_unsaxify(tree)
root = new_tree.getroot()
self.assertEqual('{bla}a',
root.tag)
self.assertEqual('{bla}b',
root[0].tag)
def test_etree_sax_ns2(self):
tree = self.parse('<a xmlns="blaA">ab<b:b xmlns:b="blaB">bb</b:b>ba</a>')
new_tree = self._saxify_unsaxify(tree)
root = new_tree.getroot()
self.assertEqual('{blaA}a',
root.tag)
self.assertEqual('{blaB}b',
root[0].tag)
def test_sax_to_pulldom(self):
tree = self.parse('<a xmlns="blaA">ab<b:b xmlns:b="blaB">bb</b:b>ba</a>')
handler = pulldom.SAX2DOM()
sax.saxify(tree, handler)
dom = handler.document
self.assertEqual('a',
dom.firstChild.localName)
self.assertEqual('blaA',
dom.firstChild.namespaceURI)
self.assertEqual(None,
dom.firstChild.prefix)
children = dom.firstChild.childNodes
self.assertEqual('ab',
children[0].nodeValue)
self.assertEqual('blaB',
children[1].namespaceURI)
self.assertEqual('ba',
children[2].nodeValue)
def test_sax_to_pulldom_multiple_namespaces(self):
tree = self.parse('<a xmlns="blaA" xmlns:a="blaA"></a>')
handler = pulldom.SAX2DOM()
sax.saxify(tree, handler)
dom = handler.document
# With multiple prefix definitions, the node should keep the one
# that was actually used, even if the others also are valid.
self.assertEqual('a',
dom.firstChild.localName)
self.assertEqual('blaA',
dom.firstChild.namespaceURI)
self.assertEqual(None,
dom.firstChild.prefix)
tree = self.parse('<a:a xmlns="blaA" xmlns:a="blaA"></a:a>')
handler = pulldom.SAX2DOM()
sax.saxify(tree, handler)
dom = handler.document
self.assertEqual('a',
dom.firstChild.localName)
self.assertEqual('blaA',
dom.firstChild.namespaceURI)
self.assertEqual('a',
dom.firstChild.prefix)
def test_element_sax(self):
tree = self.parse('<a><b/></a>')
a = tree.getroot()
b = a[0]
xml_out = self._saxify_serialize(a)
self.assertEqual(_bytes('<a><b/></a>'),
xml_out)
xml_out = self._saxify_serialize(b)
self.assertEqual(_bytes('<b/>'),
xml_out)
def test_element_sax_ns(self):
tree = self.parse('<a:a xmlns:a="blaA"><b/></a:a>')
a = tree.getroot()
b = a[0]
new_tree = self._saxify_unsaxify(a)
root = new_tree.getroot()
self.assertEqual('{blaA}a',
root.tag)
self.assertEqual('b',
root[0].tag)
new_tree = self._saxify_unsaxify(b)
root = new_tree.getroot()
self.assertEqual('b',
root.tag)
self.assertEqual(0,
len(root))
def test_etree_sax_handler_default_ns(self):
handler = sax.ElementTreeContentHandler()
handler.startDocument()
handler.startPrefixMapping(None, 'blaA')
handler.startElementNS(('blaA', 'a'), 'a', {})
handler.startPrefixMapping(None, 'blaB')
handler.startElementNS(('blaB', 'b'), 'b', {})
handler.endElementNS( ('blaB', 'b'), 'b')
handler.endPrefixMapping(None)
handler.startElementNS(('blaA', 'c'), 'c', {})
handler.endElementNS( ('blaA', 'c'), 'c')
handler.endElementNS( ('blaA', 'a'), 'a')
handler.endPrefixMapping(None)
handler.endDocument()
new_tree = handler.etree
root = new_tree.getroot()
self.assertEqual('{blaA}a',
root.tag)
self.assertEqual('{blaB}b',
root[0].tag)
self.assertEqual('{blaA}c',
root[1].tag)
def test_etree_sax_handler_default_ns_None(self):
handler = sax.ElementTreeContentHandler()
handler.startDocument()
handler.startPrefixMapping(None, 'blaA')
handler.startElementNS((None, 'a'), 'a', {})
handler.startPrefixMapping(None, 'blaB')
handler.startElementNS((None, 'b'), 'b', {})
handler.endElementNS( (None, 'b'), 'b')
handler.endPrefixMapping(None)
handler.startElementNS((None, 'c'), 'c', {})
handler.endElementNS( (None, 'c'), 'c')
handler.endElementNS( (None, 'a'), 'a')
handler.endPrefixMapping(None)
handler.endDocument()
new_tree = handler.etree
root = new_tree.getroot()
self.assertEqual('{blaA}a',
root.tag)
self.assertEqual('{blaB}b',
root[0].tag)
self.assertEqual('{blaA}c',
root[1].tag)
def test_etree_sax_redefine_ns(self):
handler = sax.ElementTreeContentHandler()
handler.startDocument()
handler.startPrefixMapping('ns', 'blaA')
handler.startElementNS(('blaA', 'a'), 'ns:a', {})
handler.startPrefixMapping('ns', 'blaB')
handler.startElementNS(('blaB', 'b'), 'ns:b', {})
handler.endElementNS( ('blaB', 'b'), 'ns:b')
handler.endPrefixMapping('ns')
handler.startElementNS(('blaA', 'c'), 'ns:c', {})
handler.endElementNS( ('blaA', 'c'), 'ns:c')
handler.endElementNS( ('blaA', 'a'), 'ns:a')
handler.endPrefixMapping('ns')
handler.endDocument()
new_tree = handler.etree
root = new_tree.getroot()
self.assertEqual('{blaA}a',
root.tag)
self.assertEqual('{blaB}b',
root[0].tag)
self.assertEqual('{blaA}c',
root[1].tag)
def test_etree_sax_no_ns(self):
handler = sax.ElementTreeContentHandler()
handler.startDocument()
handler.startElement('a', {})
handler.startElement('b', {})
handler.endElement('b')
handler.startElement('c') # with empty attributes
handler.endElement('c')
handler.endElement('a')
handler.endDocument()
new_tree = handler.etree
root = new_tree.getroot()
self.assertEqual('a', root.tag)
self.assertEqual('b', root[0].tag)
self.assertEqual('c', root[1].tag)
def test_etree_sax_no_ns_attributes(self):
handler = sax.ElementTreeContentHandler()
handler.startDocument()
handler.startElement('a', {"attr_a1": "a1"})
handler.startElement('b', {"attr_b1": "b1"})
handler.endElement('b')
handler.endElement('a')
handler.endDocument()
new_tree = handler.etree
root = new_tree.getroot()
self.assertEqual('a', root.tag)
self.assertEqual('b', root[0].tag)
self.assertEqual('a1', root.attrib["attr_a1"])
self.assertEqual('b1', root[0].attrib["attr_b1"])
def test_etree_sax_ns_attributes(self):
handler = sax.ElementTreeContentHandler()
handler.startDocument()
self.assertRaises(ValueError,
handler.startElement,
'a', {"blaA:attr_a1": "a1"}
)
def test_etree_sax_error(self):
handler = sax.ElementTreeContentHandler()
handler.startDocument()
handler.startElement('a')
self.assertRaises(sax.SaxError, handler.endElement, 'b')
def test_etree_sax_error2(self):
handler = sax.ElementTreeContentHandler()
handler.startDocument()
handler.startElement('a')
handler.startElement('b')
self.assertRaises(sax.SaxError, handler.endElement, 'a')
def _saxify_unsaxify(self, saxifiable):
handler = sax.ElementTreeContentHandler()
sax.ElementTreeProducer(saxifiable, handler).saxify()
return handler.etree
def _saxify_serialize(self, tree):
new_tree = self._saxify_unsaxify(tree)
f = BytesIO()
new_tree.write(f)
return f.getvalue().replace(_bytes('\n'), _bytes(''))
class SimpleContentHandler(ContentHandler, object):
"""A SAX content handler that just stores the events"""
def __init__(self):
self.sax_events = []
super(SimpleContentHandler, self).__init__()
def startDocument(self):
self.sax_events.append(('startDocument',))
def endDocument(self):
self.sax_events.append(('endDocument',))
def startPrefixMapping(self, prefix, uri):
self.sax_events.append(('startPrefixMapping', prefix, uri))
def endPrefixMapping(self, prefix):
self.sax_events.append(('endPrefixMapping', prefix))
def startElement(self, name, attrs):
self.sax_events.append(('startElement', name, dict(attrs)))
def endElement(self, name):
self.sax_events.append(('endElement', name))
def startElementNS(self, name, qname, attrs):
self.sax_events.append(('startElementNS', name, qname, attrs._qnames))
def endElementNS(self, name, qname):
self.sax_events.append(('endElementNS', name, qname))
def characters(self, content):
self.sax_events.append(('characters', content))
def ignorableWhitespace(self, whitespace):
self.sax_events.append(('ignorableWhitespace', whitespace))
def processingInstruction(self, target, data):
self.sax_events.append(('processingInstruction', target, data))
def skippedEntity(self, name):
self.sax_events.append(('skippedEntity', name))
class NSPrefixSaxTestCase(HelperTestCase):
"""Testing that namespaces generate the right SAX events"""
def _saxify(self, tree):
handler = SimpleContentHandler()
sax.ElementTreeProducer(tree, handler).saxify()
return handler.sax_events
def test_element_sax_ns_prefix(self):
# The name of the prefix should be preserved, if the uri is unique
tree = self.parse('<a:a xmlns:a="blaA" xmlns:c="blaC">'
'<d a:attr="value" c:attr="value" /></a:a>')
a = tree.getroot()
self.assertEqual(
[('startElementNS', ('blaA', 'a'), 'a:a', {}),
('startElementNS', (None, 'd'), 'd',
{('blaA', 'attr'): 'a:attr', ('blaC', 'attr'): 'c:attr'}),
('endElementNS', (None, 'd'), 'd'),
('endElementNS', ('blaA', 'a'), 'a:a'),
],
self._saxify(a)[3:7])
def test_element_sax_default_ns_prefix(self):
# Default prefixes should also not get a generated prefix
tree = self.parse('<a xmlns="blaA"><b attr="value" /></a>')
a = tree.getroot()
self.assertEqual(
[('startDocument',),
# NS prefix should be None:
('startPrefixMapping', None, 'blaA'),
('startElementNS', ('blaA', 'a'), 'a', {}),
# Attribute prefix should be None:
('startElementNS', ('blaA', 'b'), 'b', {(None, 'attr'): 'attr'}),
('endElementNS', ('blaA', 'b'), 'b'),
('endElementNS', ('blaA', 'a'), 'a'),
# Prefix should be None again:
('endPrefixMapping', None),
('endDocument',)],
self._saxify(a))
# Except for attributes, if there is both a default namespace
# and a named namespace with the same uri
tree = self.parse('<a xmlns="bla" xmlns:a="bla">'
'<b a:attr="value" /></a>')
a = tree.getroot()
self.assertEqual(
('startElementNS', ('bla', 'b'), 'b', {('bla', 'attr'): 'a:attr'}),
self._saxify(a)[4])
def test_element_sax_twin_ns_prefix(self):
# Make an element with an doubly registered uri
tree = self.parse('<a xmlns:b="bla" xmlns:c="bla">'
'<d c:attr="attr" /></a>')
a = tree.getroot()
self.assertEqual(
# It should get the b prefix in this case
('startElementNS', (None, 'd'), 'd', {('bla', 'attr'): 'b:attr'}),
self._saxify(a)[4])
def test_suite():
suite = unittest.TestSuite()
suite.addTests([unittest.makeSuite(ETreeSaxTestCase)])
suite.addTests([unittest.makeSuite(NSPrefixSaxTestCase)])
suite.addTests(
[make_doctest('../../../doc/sax.txt')])
return suite
if __name__ == '__main__':
print('to test use test.py %s' % __file__)
|
import logging
from kalliope.core import NeuronModule
import alsaaudio
from kalliope.core.NeuronModule import InvalidParameterException
logging.basicConfig()
logger = logging.getLogger("kalliope")
class SoundManager(object):
try:
m = alsaaudio.Mixer()
except alsaaudio.ALSAAudioError:
# no master, we are on a Rpi
try:
m = alsaaudio.Mixer("PCM")
except alsaaudio.ALSAAudioError:
# no audio config at all
m = None
@classmethod
def set_volume(cls, volume_level):
if cls.m is not None:
cls.m.setvolume(int(volume_level))
@classmethod
def get_volume(cls):
if cls.m is not None:
vol = cls.m.getvolume()
return int(vol[0])
return None
class Volume(NeuronModule):
def __init__(self, **kwargs):
super(Volume, self).__init__(**kwargs)
self.level = kwargs.get('level', None)
self.action = kwargs.get('action', "set") # can be set, raise or lower
# check parameters
if self._is_parameters_ok():
if self.action == "set":
logger.debug("[Volume] set volume to: {}".format(self.level))
SoundManager.set_volume(self.level)
if self.action == "raise":
current_level = SoundManager.get_volume()
level_to_set = self.level + current_level
if level_to_set > 100:
level_to_set = 100
logger.debug("[Volume] set volume to: {}".format(level_to_set))
SoundManager.set_volume(level_to_set)
if self.action == "lower":
current_level = SoundManager.get_volume()
level_to_set = current_level - self.level
if level_to_set < 0:
level_to_set = 0
logger.debug("[Volume] set volume to: {}".format(level_to_set))
SoundManager.set_volume(level_to_set)
message = {
"asked_level": self.level,
"asked_action": self.action,
"current_level": SoundManager.get_volume()
}
self.say(message)
def _is_parameters_ok(self):
if self.level is None:
raise InvalidParameterException("[Volume] level need to be set")
try:
self.level = int(self.level)
except ValueError:
raise InvalidParameterException("[Volume] level '{}' is not a valid integer".format(self.level))
if self.level < 0 or self.level > 100:
raise InvalidParameterException("[Volume] level need to be placed between 0 and 100")
if self.action not in ["set", "raise", "lower"]:
raise InvalidParameterException("[Volume] action can be 'set', 'raise' or 'lower'")
return True
|
import logging
from homeassistant.components.switch import SwitchEntity
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .common import VeSyncDevice
from .const import DOMAIN, VS_DISCOVERY, VS_DISPATCHERS, VS_SWITCHES
_LOGGER = logging.getLogger(__name__)
DEV_TYPE_TO_HA = {
"wifi-switch-1.3": "outlet",
"ESW03-USA": "outlet",
"ESW01-EU": "outlet",
"ESW15-USA": "outlet",
"ESWL01": "switch",
"ESWL03": "switch",
"ESO15-TB": "outlet",
}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up switches."""
async def async_discover(devices):
"""Add new devices to platform."""
_async_setup_entities(devices, async_add_entities)
disp = async_dispatcher_connect(
hass, VS_DISCOVERY.format(VS_SWITCHES), async_discover
)
hass.data[DOMAIN][VS_DISPATCHERS].append(disp)
_async_setup_entities(hass.data[DOMAIN][VS_SWITCHES], async_add_entities)
return True
@callback
def _async_setup_entities(devices, async_add_entities):
"""Check if device is online and add entity."""
dev_list = []
for dev in devices:
if DEV_TYPE_TO_HA.get(dev.device_type) == "outlet":
dev_list.append(VeSyncSwitchHA(dev))
elif DEV_TYPE_TO_HA.get(dev.device_type) == "switch":
dev_list.append(VeSyncLightSwitch(dev))
else:
_LOGGER.warning(
"%s - Unknown device type - %s", dev.device_name, dev.device_type
)
continue
async_add_entities(dev_list, update_before_add=True)
class VeSyncBaseSwitch(VeSyncDevice, SwitchEntity):
"""Base class for VeSync switch Device Representations."""
def turn_on(self, **kwargs):
"""Turn the device on."""
self.device.turn_on()
class VeSyncSwitchHA(VeSyncBaseSwitch, SwitchEntity):
"""Representation of a VeSync switch."""
def __init__(self, plug):
"""Initialize the VeSync switch device."""
super().__init__(plug)
self.smartplug = plug
@property
def device_state_attributes(self):
"""Return the state attributes of the device."""
if not hasattr(self.smartplug, "weekly_energy_total"):
return {}
return {
"voltage": self.smartplug.voltage,
"weekly_energy_total": self.smartplug.weekly_energy_total,
"monthly_energy_total": self.smartplug.monthly_energy_total,
"yearly_energy_total": self.smartplug.yearly_energy_total,
}
@property
def current_power_w(self):
"""Return the current power usage in W."""
return self.smartplug.power
@property
def today_energy_kwh(self):
"""Return the today total energy usage in kWh."""
return self.smartplug.energy_today
def update(self):
"""Update outlet details and energy usage."""
self.smartplug.update()
self.smartplug.update_energy()
class VeSyncLightSwitch(VeSyncBaseSwitch, SwitchEntity):
"""Handle representation of VeSync Light Switch."""
def __init__(self, switch):
"""Initialize Light Switch device class."""
super().__init__(switch)
self.switch = switch
|
import numpy as np
from sklearn.decomposition import NMF
from sklearn.feature_extraction.text import TfidfTransformer
from sklearn.pipeline import Pipeline
from scattertext import CSRMatrixFactory
from scattertext.ParsedCorpus import ParsedCorpus
from scattertext.termscoring.RankDifference import RankDifference
class SentencesForTopicModeling(object):
'''
Creates a topic model from a set of key terms based on sentence level co-occurrence.
'''
def __init__(self, corpus):
'''
Parameters
----------
corpus
'''
assert isinstance(corpus, ParsedCorpus)
self.corpus = corpus
self.termidxstore = corpus._term_idx_store
matfact = CSRMatrixFactory()
self.doclabs = []
self.sentlabs = []
self.sentdocs = []
senti = 0
for doci, doc in enumerate(corpus.get_parsed_docs()):
for sent in doc.sents:
validsent = False
for t in sent:
try:
termi = self.termidxstore.getidxstrict(t.lower_)
except:
continue
if validsent is False:
senti += 1
self.sentlabs.append(corpus._y[doci])
self.sentdocs.append(doci)
validsent = True
matfact[senti, termi] = 1
self.sentX = matfact.get_csr_matrix().astype(bool)
def get_sentence_word_mat(self):
return self.sentX.astype(np.double).tocoo()
def get_topics_from_model(
self,
pipe=Pipeline([
('tfidf', TfidfTransformer(sublinear_tf=True)),
('nmf', (NMF(n_components=30, alpha=.1, l1_ratio=.5, random_state=0)))]),
num_terms_per_topic=10):
'''
Parameters
----------
pipe : Pipeline
For example, `Pipeline([
('tfidf', TfidfTransformer(sublinear_tf=True)),
('nmf', (NMF(n_components=30, alpha=.1, l1_ratio=.5, random_state=0)))])`
The last transformer must populate a `components_` attribute when finished.
num_terms_per_topic : int
Returns
-------
dict: {term: [term1, ...], ...}
'''
pipe.fit_transform(self.sentX)
topic_model = {}
for topic_idx, topic in enumerate(pipe._final_estimator.components_):
term_list = [self.termidxstore.getval(i)
for i
in topic.argsort()[:-num_terms_per_topic - 1:-1]
if topic[i] > 0]
if len(term_list) > 0:
topic_model['%s. %s' % (topic_idx, term_list[0])] = term_list
else:
Warning("Topic %s has no terms with scores > 0. Omitting." % (topic_idx))
return topic_model
def get_topics_from_terms(self,
terms=None,
num_terms_per_topic=10,
scorer=RankDifference()):
'''
Parameters
----------
terms : list or None
If terms is list, make these the seed terms for the topoics
If none, use the first 30 terms in get_scaled_f_scores_vs_background
num_terms_per_topic : int, default 10
Use this many terms per topic
scorer : TermScorer
Implements get_scores, default is RankDifferce, which tends to work best
Returns
-------
dict: {term: [term1, ...], ...}
'''
topic_model = {}
if terms is None:
terms = self.corpus.get_scaled_f_scores_vs_background().index[:30]
for term in terms:
termidx = self.termidxstore.getidxstrict(term)
labels = self.sentX[:, termidx].astype(bool).todense().A1
poscnts = self.sentX[labels, :].astype(bool).sum(axis=0).A1
negcnts = self.sentX[~labels, :].astype(bool).sum(axis=0).A1
scores = scorer.get_scores(poscnts, negcnts)
topic_model[term] = [self.termidxstore.getval(i) for i in
np.argsort(-scores)[:num_terms_per_topic]]
return topic_model
|
from __future__ import print_function
__docformat__ = "restructuredtext en"
from six.moves import range
from logilab.common.textutils import linesep
from logilab.common.ureports import BaseWriter
TITLE_UNDERLINES = [u'', u'=', u'-', u'`', u'.', u'~', u'^']
BULLETS = [u'*', u'-']
class TextWriter(BaseWriter):
"""format layouts as text
(ReStructured inspiration but not totally handled yet)
"""
def begin_format(self, layout):
super(TextWriter, self).begin_format(layout)
self.list_level = 0
self.pending_urls = []
def visit_section(self, layout):
"""display a section as text
"""
self.section += 1
self.writeln()
self.format_children(layout)
if self.pending_urls:
self.writeln()
for label, url in self.pending_urls:
self.writeln(u'.. _`%s`: %s' % (label, url))
self.pending_urls = []
self.section -= 1
self.writeln()
def visit_title(self, layout):
title = u''.join(list(self.compute_content(layout)))
self.writeln(title)
try:
self.writeln(TITLE_UNDERLINES[self.section] * len(title))
except IndexError:
print("FIXME TITLE TOO DEEP. TURNING TITLE INTO TEXT")
def visit_paragraph(self, layout):
"""enter a paragraph"""
self.format_children(layout)
self.writeln()
def visit_span(self, layout):
"""enter a span"""
self.format_children(layout)
def visit_table(self, layout):
"""display a table as text"""
table_content = self.get_table_content(layout)
# get columns width
cols_width = [0]*len(table_content[0])
for row in table_content:
for index in range(len(row)):
col = row[index]
cols_width[index] = max(cols_width[index], len(col))
if layout.klass == 'field':
self.field_table(layout, table_content, cols_width)
else:
self.default_table(layout, table_content, cols_width)
self.writeln()
def default_table(self, layout, table_content, cols_width):
"""format a table"""
cols_width = [size+1 for size in cols_width]
format_strings = u' '.join([u'%%-%ss'] * len(cols_width))
format_strings = format_strings % tuple(cols_width)
format_strings = format_strings.split(' ')
table_linesep = u'\n+' + u'+'.join([u'-'*w for w in cols_width]) + u'+\n'
headsep = u'\n+' + u'+'.join([u'='*w for w in cols_width]) + u'+\n'
# FIXME: layout.cheaders
self.write(table_linesep)
for i in range(len(table_content)):
self.write(u'|')
line = table_content[i]
for j in range(len(line)):
self.write(format_strings[j] % line[j])
self.write(u'|')
if i == 0 and layout.rheaders:
self.write(headsep)
else:
self.write(table_linesep)
def field_table(self, layout, table_content, cols_width):
"""special case for field table"""
assert layout.cols == 2
format_string = u'%s%%-%ss: %%s' % (linesep, cols_width[0])
for field, value in table_content:
self.write(format_string % (field, value))
def visit_list(self, layout):
"""display a list layout as text"""
bullet = BULLETS[self.list_level % len(BULLETS)]
indent = ' ' * self.list_level
self.list_level += 1
for child in layout.children:
self.write(u'%s%s%s ' % (linesep, indent, bullet))
child.accept(self)
self.list_level -= 1
def visit_link(self, layout):
"""add a hyperlink"""
if layout.label != layout.url:
self.write(u'`%s`_' % layout.label)
self.pending_urls.append( (layout.label, layout.url) )
else:
self.write(layout.url)
def visit_verbatimtext(self, layout):
"""display a verbatim layout as text (so difficult ;)
"""
self.writeln(u'::\n')
for line in layout.data.splitlines():
self.writeln(u' ' + line)
self.writeln()
def visit_text(self, layout):
"""add some text"""
self.write(u'%s' % layout.data)
|
import audioop
from time import sleep
import re
import collections
import json
import os
import pyaudio
import speech_recognition
from speech_recognition import (
Microphone,
AudioSource,
AudioData
)
class MutableStream:
def __init__(self, wrapped_stream, format):
assert wrapped_stream is not None
self.wrapped_stream = wrapped_stream
self.SAMPLE_WIDTH = pyaudio.get_sample_size(format)
self.muted_buffer = b''.join([b'\x00' * self.SAMPLE_WIDTH])
def read(self, size, of_exc=False):
"""
Read data from stream.
Arguments:
size (int): Number of bytes to read
of_exc (bool): flag determining if the audio producer thread
should throw IOError at overflows.
Returns:
Data read from device
"""
frames = collections.deque()
remaining = size
while remaining > 0:
to_read = min(self.wrapped_stream.get_read_available(), remaining)
if to_read == 0:
sleep(.01)
continue
result = self.wrapped_stream.read(to_read,
exception_on_overflow=of_exc)
frames.append(result)
remaining -= to_read
input_latency = self.wrapped_stream.get_input_latency()
audio = b"".join(list(frames))
return audio
def close(self):
self.wrapped_stream.close()
self.wrapped_stream = None
def is_stopped(self):
return self.wrapped_stream.is_stopped()
def stop_stream(self):
return self.wrapped_stream.stop_stream()
class MutableMicrophone(Microphone):
def __init__(self, device_index=None, sample_rate=16000,
chunk_size=1024):
Microphone.__init__(self, device_index=device_index,
sample_rate=sample_rate, chunk_size=chunk_size)
def __enter__(self):
assert self.stream is None, \
"This audio source is already inside a context manager"
self.audio = pyaudio.PyAudio()
self.stream = MutableStream(self.audio.open(
input_device_index=self.device_index,
channels=1,
format=self.format,
rate=self.SAMPLE_RATE,
frames_per_buffer=self.CHUNK,
input=True, # stream is an input stream
), self.format)
return self
def __exit__(self, exc_type, exc_value, traceback):
if not self.stream.is_stopped():
self.stream.stop_stream()
self.stream.close()
self.stream = None
self.audio.terminate()
def get_silence(num_bytes):
return b'\0' * num_bytes
class NoiseTracker:
"""Noise tracker, used to deterimine if an audio utterance is complete.
The current implementation expects a number of loud chunks (not necessary
in one continous sequence) followed by a short period of continous quiet
audio data to be considered complete.
Arguments:
minimum (int): lower noise level will be threshold for "quiet" level
maximum (int): ceiling of noise level
sec_per_buffer (float): the length of each buffer used when updating
the tracker
loud_time_limit (float): time in seconds of low noise to be considered
a complete sentence
silence_time_limit (float): time limit for silence to abort sentence
silence_after_loud (float): time of silence to finalize the sentence.
default 0.25 seconds.
"""
def __init__(self, minimum, maximum, sec_per_buffer, loud_time_limit,
silence_time_limit, silence_after_loud_time=0.25):
self.min_level = minimum
self.max_level = maximum
self.sec_per_buffer = sec_per_buffer
self.num_loud_chunks = 0
self.level = 0
# Smallest number of loud chunks required to return loud enough
self.min_loud_chunks = int(loud_time_limit / sec_per_buffer)
self.max_silence_duration = silence_time_limit
self.silence_duration = 0
# time of quite period after long enough loud data to consider the
# sentence complete
self.silence_after_loud = silence_after_loud_time
# Constants
self.increase_multiplier = 200
self.decrease_multiplier = 100
def _increase_noise(self):
"""Bumps the current level.
Modifies the noise level with a factor depending in the buffer length.
"""
if self.level < self.max_level:
self.level += self.increase_multiplier * self.sec_per_buffer
def _decrease_noise(self):
"""Decrease the current level.
Modifies the noise level with a factor depending in the buffer length.
"""
if self.level > self.min_level:
self.level -= self.decrease_multiplier * self.sec_per_buffer
def update(self, is_loud):
"""Update the tracking. with either a loud chunk or a quiet chunk.
Arguments:
is_loud: True if a loud chunk should be registered
False if a quiet chunk should be registered
"""
if is_loud:
self._increase_noise()
self.num_loud_chunks += 1
else:
self._decrease_noise()
# Update duration of energy under the threshold level
if self._quiet_enough():
self.silence_duration += self.sec_per_buffer
else: # Reset silence duration
self.silence_duration = 0
def _loud_enough(self):
"""Check if the noise loudness criteria is fulfilled.
The noise is considered loud enough if it's been over the threshold
for a certain number of chunks (accumulated, not in a row).
"""
return self.num_loud_chunks > self.min_loud_chunks
def _quiet_enough(self):
"""Check if the noise quietness criteria is fulfilled.
The quiet level is instant and will return True if the level is lower
or equal to the minimum noise level.
"""
return self.level <= self.min_level
def recording_complete(self):
"""Has the end creteria for the recording been met.
If the noise level has decresed from a loud level to a low level
the user has stopped speaking.
Alternatively if a lot of silence was recorded without detecting
a loud enough phrase.
"""
too_much_silence = (self.silence_duration > self.max_silence_duration)
return ((self._quiet_enough() and
self.silence_duration > self.silence_after_loud) and
(self._loud_enough() or too_much_silence))
class ResponsiveRecognizer(speech_recognition.Recognizer):
# The minimum seconds of noise before a
# phrase can be considered complete
MIN_LOUD_SEC_PER_PHRASE = 0.5
# The minimum seconds of silence required at the end
# before a phrase will be considered complete
MIN_SILENCE_AT_END = 0.25
# The maximum seconds a phrase can be recorded,
# provided there is noise the entire time
#RECORDING_TIMEOUT = 15.0
# The maximum time it will continue to record silence
# when not enough noise has been detected
#RECORDING_TIMEOUT_WITH_SILENCE = 2.5 # default 3.0
def __init__(self,
multiplier,
energy_ratio,
recording_timeout,
recording_timeout_with_silence):
self.overflow_exc = False
speech_recognition.Recognizer.__init__(self)
self.audio = pyaudio.PyAudio()
self.multiplier = multiplier
self.energy_ratio = energy_ratio
# The maximum seconds a phrase can be recorded,
# provided there is noise the entire time
self.recording_timeout = recording_timeout
# The maximum time it will continue to record silence
# when not enough noise has been detected
self.recording_timeout_with_silence = recording_timeout_with_silence
def record_sound_chunk(self, source):
return source.stream.read(source.CHUNK, self.overflow_exc)
@staticmethod
def calc_energy(sound_chunk, sample_width):
return audioop.rms(sound_chunk, sample_width)
def _record_phrase(self, source, sec_per_buffer):
"""Record an entire spoken phrase.
Essentially, this code waits for a period of silence and then returns
the audio. If silence isn't detected, it will terminate and return
a buffer of self.recording_timeout duration.
Args:
source (AudioSource): Source producing the audio chunks
sec_per_buffer (float): Fractional number of seconds in each chunk
stream (AudioStreamHandler): Stream target that will receive chunks
of the utterance audio while it is
being recorded.
ww_frames (deque): Frames of audio data from the last part of wake
word detection.
Returns:
bytearray: complete audio buffer recorded, including any
silence at the end of the user's utterance
"""
noise_tracker = NoiseTracker(0, 25, sec_per_buffer,
self.MIN_LOUD_SEC_PER_PHRASE,
self.recording_timeout_with_silence)
# Maximum number of chunks to record before timing out
max_chunks = int(self.recording_timeout / sec_per_buffer)
num_chunks = 0
# bytearray to store audio in, initialized with a single sample of
# silence.
byte_data = get_silence(source.SAMPLE_WIDTH)
phrase_complete = False
while num_chunks < max_chunks and not phrase_complete:
chunk = self.record_sound_chunk(source)
byte_data += chunk
num_chunks += 1
energy = self.calc_energy(chunk, source.SAMPLE_WIDTH)
test_threshold = self.energy_threshold * self.multiplier
is_loud = energy > test_threshold
noise_tracker.update(is_loud)
if not is_loud:
self._adjust_threshold(energy, sec_per_buffer)
# The phrase is complete if the noise_tracker end of sentence
# criteria is met or if the top-button is pressed
phrase_complete = (noise_tracker.recording_complete())
return byte_data
@staticmethod
def _create_audio_data(raw_data, source):
"""
Constructs an AudioData instance with the same parameters
as the source and the specified frame_data
"""
return AudioData(raw_data, source.SAMPLE_RATE, source.SAMPLE_WIDTH)
def listen(self, source):
"""Listens for chunks of audio that Mycroft should perform STT on.
This will listen continuously for a wake-up-word, then return the
audio chunk containing the spoken phrase that comes immediately
afterwards.
Args:
source (AudioSource): Source producing the audio chunks
emitter (EventEmitter): Emitter for notifications of when recording
begins and ends.
Returns:
AudioData: audio with the user's utterance, minus the wake-up-word
"""
assert isinstance(source, AudioSource), "Source must be an AudioSource"
# bytes_per_sec = source.SAMPLE_RATE * source.SAMPLE_WIDTH
sec_per_buffer = float(source.CHUNK) / source.SAMPLE_RATE
# Every time a new 'listen()' request begins, reset the threshold
# used for silence detection. This is as good of a reset point as
# any, as we expect the user and Mycroft to not be talking.
# NOTE: adjust_for_ambient_noise() doc claims it will stop early if
# speech is detected, but there is no code to actually do that.
self.adjust_for_ambient_noise(source, 0.1)
frame_data = self._record_phrase(source, sec_per_buffer)
audio_data = self._create_audio_data(frame_data, source)
return audio_data
def _adjust_threshold(self, energy, seconds_per_buffer):
if self.dynamic_energy_threshold and energy > 0:
# account for different chunk sizes and rates
damping = (
self.dynamic_energy_adjustment_damping ** seconds_per_buffer)
target_energy = energy * self.energy_ratio
self.energy_threshold = (
self.energy_threshold * damping +
target_energy * (1 - damping))
|
from IPython.core.magic import (
Magics,
magics_class, # type: ignore
cell_magic,
needs_local_scope) # type: ignore
import IPython.display # type: ignore
try:
from io import StringIO
except ImportError:
try:
from cStringIO import StringIO # type: ignore
except ImportError:
from StringIO import StringIO # type: ignore
import sys
valid_choices = [x[8:] for x in dir(IPython.display) if 'display_' == x[:8]]
@magics_class
class OutputMagics(Magics): # pragma: no cover
@needs_local_scope
@cell_magic
def to(self, line, cell, local_ns=None):
choice = line.strip()
assert choice in valid_choices, "Valid choices for '%%to' are: " + str(
valid_choices)
display_fn = getattr(IPython.display, "display_" + choice)
"Captures stdout and renders it in the notebook with some ."
with StringIO() as out:
old_out = sys.stdout
try:
sys.stdout = out
exec (cell, self.shell.user_ns, local_ns)
out.seek(0)
display_fn(out.getvalue(), raw=True)
finally:
sys.stdout = old_out
|
import asyncio
from datetime import timedelta
import hashlib
from typing import Any, List, Sequence
import voluptuous as vol
from homeassistant import util
from homeassistant.components import zone
from homeassistant.config import async_log_exception, load_yaml_config_file
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_GPS_ACCURACY,
ATTR_ICON,
ATTR_LATITUDE,
ATTR_LONGITUDE,
ATTR_NAME,
CONF_ICON,
CONF_MAC,
CONF_NAME,
DEVICE_DEFAULT_NAME,
STATE_HOME,
STATE_NOT_HOME,
)
from homeassistant.core import callback
from homeassistant.exceptions import HomeAssistantError
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity_registry import async_get_registry
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.helpers.typing import GPSType, HomeAssistantType
import homeassistant.util.dt as dt_util
from homeassistant.util.yaml import dump
from .const import (
ATTR_BATTERY,
ATTR_HOST_NAME,
ATTR_MAC,
ATTR_SOURCE_TYPE,
CONF_CONSIDER_HOME,
CONF_NEW_DEVICE_DEFAULTS,
CONF_TRACK_NEW,
DEFAULT_CONSIDER_HOME,
DEFAULT_TRACK_NEW,
DOMAIN,
LOGGER,
SOURCE_TYPE_GPS,
)
YAML_DEVICES = "known_devices.yaml"
EVENT_NEW_DEVICE = "device_tracker_new_device"
async def get_tracker(hass, config):
"""Create a tracker."""
yaml_path = hass.config.path(YAML_DEVICES)
conf = config.get(DOMAIN, [])
conf = conf[0] if conf else {}
consider_home = conf.get(CONF_CONSIDER_HOME, DEFAULT_CONSIDER_HOME)
defaults = conf.get(CONF_NEW_DEVICE_DEFAULTS, {})
track_new = conf.get(CONF_TRACK_NEW)
if track_new is None:
track_new = defaults.get(CONF_TRACK_NEW, DEFAULT_TRACK_NEW)
devices = await async_load_config(yaml_path, hass, consider_home)
tracker = DeviceTracker(hass, consider_home, track_new, defaults, devices)
return tracker
class DeviceTracker:
"""Representation of a device tracker."""
def __init__(
self,
hass: HomeAssistantType,
consider_home: timedelta,
track_new: bool,
defaults: dict,
devices: Sequence,
) -> None:
"""Initialize a device tracker."""
self.hass = hass
self.devices = {dev.dev_id: dev for dev in devices}
self.mac_to_dev = {dev.mac: dev for dev in devices if dev.mac}
self.consider_home = consider_home
self.track_new = (
track_new
if track_new is not None
else defaults.get(CONF_TRACK_NEW, DEFAULT_TRACK_NEW)
)
self.defaults = defaults
self._is_updating = asyncio.Lock()
for dev in devices:
if self.devices[dev.dev_id] is not dev:
LOGGER.warning("Duplicate device IDs detected %s", dev.dev_id)
if dev.mac and self.mac_to_dev[dev.mac] is not dev:
LOGGER.warning("Duplicate device MAC addresses detected %s", dev.mac)
def see(
self,
mac: str = None,
dev_id: str = None,
host_name: str = None,
location_name: str = None,
gps: GPSType = None,
gps_accuracy: int = None,
battery: int = None,
attributes: dict = None,
source_type: str = SOURCE_TYPE_GPS,
picture: str = None,
icon: str = None,
consider_home: timedelta = None,
):
"""Notify the device tracker that you see a device."""
self.hass.add_job(
self.async_see(
mac,
dev_id,
host_name,
location_name,
gps,
gps_accuracy,
battery,
attributes,
source_type,
picture,
icon,
consider_home,
)
)
async def async_see(
self,
mac: str = None,
dev_id: str = None,
host_name: str = None,
location_name: str = None,
gps: GPSType = None,
gps_accuracy: int = None,
battery: int = None,
attributes: dict = None,
source_type: str = SOURCE_TYPE_GPS,
picture: str = None,
icon: str = None,
consider_home: timedelta = None,
):
"""Notify the device tracker that you see a device.
This method is a coroutine.
"""
registry = await async_get_registry(self.hass)
if mac is None and dev_id is None:
raise HomeAssistantError("Neither mac or device id passed in")
if mac is not None:
mac = str(mac).upper()
device = self.mac_to_dev.get(mac)
if not device:
dev_id = util.slugify(host_name or "") or util.slugify(mac)
else:
dev_id = cv.slug(str(dev_id).lower())
device = self.devices.get(dev_id)
if device:
await device.async_seen(
host_name,
location_name,
gps,
gps_accuracy,
battery,
attributes,
source_type,
consider_home,
)
if device.track:
device.async_write_ha_state()
return
# Guard from calling see on entity registry entities.
entity_id = f"{DOMAIN}.{dev_id}"
if registry.async_is_registered(entity_id):
LOGGER.error(
"The see service is not supported for this entity %s", entity_id
)
return
# If no device can be found, create it
dev_id = util.ensure_unique_string(dev_id, self.devices.keys())
device = Device(
self.hass,
consider_home or self.consider_home,
self.track_new,
dev_id,
mac,
picture=picture,
icon=icon,
)
self.devices[dev_id] = device
if mac is not None:
self.mac_to_dev[mac] = device
await device.async_seen(
host_name,
location_name,
gps,
gps_accuracy,
battery,
attributes,
source_type,
)
if device.track:
device.async_write_ha_state()
self.hass.bus.async_fire(
EVENT_NEW_DEVICE,
{
ATTR_ENTITY_ID: device.entity_id,
ATTR_HOST_NAME: device.host_name,
ATTR_MAC: device.mac,
},
)
# update known_devices.yaml
self.hass.async_create_task(
self.async_update_config(
self.hass.config.path(YAML_DEVICES), dev_id, device
)
)
async def async_update_config(self, path, dev_id, device):
"""Add device to YAML configuration file.
This method is a coroutine.
"""
async with self._is_updating:
await self.hass.async_add_executor_job(
update_config, self.hass.config.path(YAML_DEVICES), dev_id, device
)
@callback
def async_update_stale(self, now: dt_util.dt.datetime):
"""Update stale devices.
This method must be run in the event loop.
"""
for device in self.devices.values():
if (device.track and device.last_update_home) and device.stale(now):
self.hass.async_create_task(device.async_update_ha_state(True))
async def async_setup_tracked_device(self):
"""Set up all not exists tracked devices.
This method is a coroutine.
"""
async def async_init_single_device(dev):
"""Init a single device_tracker entity."""
await dev.async_added_to_hass()
dev.async_write_ha_state()
tasks = []
for device in self.devices.values():
if device.track and not device.last_seen:
tasks.append(
self.hass.async_create_task(async_init_single_device(device))
)
if tasks:
await asyncio.wait(tasks)
class Device(RestoreEntity):
"""Represent a tracked device."""
host_name: str = None
location_name: str = None
gps: GPSType = None
gps_accuracy: int = 0
last_seen: dt_util.dt.datetime = None
consider_home: dt_util.dt.timedelta = None
battery: int = None
attributes: dict = None
icon: str = None
# Track if the last update of this device was HOME.
last_update_home = False
_state = STATE_NOT_HOME
def __init__(
self,
hass: HomeAssistantType,
consider_home: timedelta,
track: bool,
dev_id: str,
mac: str,
name: str = None,
picture: str = None,
gravatar: str = None,
icon: str = None,
) -> None:
"""Initialize a device."""
self.hass = hass
self.entity_id = f"{DOMAIN}.{dev_id}"
# Timedelta object how long we consider a device home if it is not
# detected anymore.
self.consider_home = consider_home
# Device ID
self.dev_id = dev_id
self.mac = mac
# If we should track this device
self.track = track
# Configured name
self.config_name = name
# Configured picture
if gravatar is not None:
self.config_picture = get_gravatar_for_email(gravatar)
else:
self.config_picture = picture
self.icon = icon
self.source_type = None
self._attributes = {}
@property
def name(self):
"""Return the name of the entity."""
return self.config_name or self.host_name or self.dev_id or DEVICE_DEFAULT_NAME
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def entity_picture(self):
"""Return the picture of the device."""
return self.config_picture
@property
def state_attributes(self):
"""Return the device state attributes."""
attr = {ATTR_SOURCE_TYPE: self.source_type}
if self.gps:
attr[ATTR_LATITUDE] = self.gps[0]
attr[ATTR_LONGITUDE] = self.gps[1]
attr[ATTR_GPS_ACCURACY] = self.gps_accuracy
if self.battery:
attr[ATTR_BATTERY] = self.battery
return attr
@property
def device_state_attributes(self):
"""Return device state attributes."""
return self._attributes
async def async_seen(
self,
host_name: str = None,
location_name: str = None,
gps: GPSType = None,
gps_accuracy=0,
battery: int = None,
attributes: dict = None,
source_type: str = SOURCE_TYPE_GPS,
consider_home: timedelta = None,
):
"""Mark the device as seen."""
self.source_type = source_type
self.last_seen = dt_util.utcnow()
self.host_name = host_name or self.host_name
self.location_name = location_name
self.consider_home = consider_home or self.consider_home
if battery:
self.battery = battery
if attributes:
self._attributes.update(attributes)
self.gps = None
if gps is not None:
try:
self.gps = float(gps[0]), float(gps[1])
self.gps_accuracy = gps_accuracy or 0
except (ValueError, TypeError, IndexError):
self.gps = None
self.gps_accuracy = 0
LOGGER.warning("Could not parse gps value for %s: %s", self.dev_id, gps)
await self.async_update()
def stale(self, now: dt_util.dt.datetime = None):
"""Return if device state is stale.
Async friendly.
"""
return (
self.last_seen is None
or (now or dt_util.utcnow()) - self.last_seen > self.consider_home
)
def mark_stale(self):
"""Mark the device state as stale."""
self._state = STATE_NOT_HOME
self.gps = None
self.last_update_home = False
async def async_update(self):
"""Update state of entity.
This method is a coroutine.
"""
if not self.last_seen:
return
if self.location_name:
self._state = self.location_name
elif self.gps is not None and self.source_type == SOURCE_TYPE_GPS:
zone_state = zone.async_active_zone(
self.hass, self.gps[0], self.gps[1], self.gps_accuracy
)
if zone_state is None:
self._state = STATE_NOT_HOME
elif zone_state.entity_id == zone.ENTITY_ID_HOME:
self._state = STATE_HOME
else:
self._state = zone_state.name
elif self.stale():
self.mark_stale()
else:
self._state = STATE_HOME
self.last_update_home = True
async def async_added_to_hass(self):
"""Add an entity."""
await super().async_added_to_hass()
state = await self.async_get_last_state()
if not state:
return
self._state = state.state
self.last_update_home = state.state == STATE_HOME
self.last_seen = dt_util.utcnow()
for attr, var in (
(ATTR_SOURCE_TYPE, "source_type"),
(ATTR_GPS_ACCURACY, "gps_accuracy"),
(ATTR_BATTERY, "battery"),
):
if attr in state.attributes:
setattr(self, var, state.attributes[attr])
if ATTR_LONGITUDE in state.attributes:
self.gps = (
state.attributes[ATTR_LATITUDE],
state.attributes[ATTR_LONGITUDE],
)
class DeviceScanner:
"""Device scanner object."""
hass: HomeAssistantType = None
def scan_devices(self) -> List[str]:
"""Scan for devices."""
raise NotImplementedError()
async def async_scan_devices(self) -> Any:
"""Scan for devices."""
return await self.hass.async_add_executor_job(self.scan_devices)
def get_device_name(self, device: str) -> str:
"""Get the name of a device."""
raise NotImplementedError()
async def async_get_device_name(self, device: str) -> Any:
"""Get the name of a device."""
return await self.hass.async_add_executor_job(self.get_device_name, device)
def get_extra_attributes(self, device: str) -> dict:
"""Get the extra attributes of a device."""
raise NotImplementedError()
async def async_get_extra_attributes(self, device: str) -> Any:
"""Get the extra attributes of a device."""
return await self.hass.async_add_executor_job(self.get_extra_attributes, device)
async def async_load_config(
path: str, hass: HomeAssistantType, consider_home: timedelta
):
"""Load devices from YAML configuration file.
This method is a coroutine.
"""
dev_schema = vol.Schema(
{
vol.Required(CONF_NAME): cv.string,
vol.Optional(CONF_ICON, default=None): vol.Any(None, cv.icon),
vol.Optional("track", default=False): cv.boolean,
vol.Optional(CONF_MAC, default=None): vol.Any(
None, vol.All(cv.string, vol.Upper)
),
vol.Optional("gravatar", default=None): vol.Any(None, cv.string),
vol.Optional("picture", default=None): vol.Any(None, cv.string),
vol.Optional(CONF_CONSIDER_HOME, default=consider_home): vol.All(
cv.time_period, cv.positive_timedelta
),
}
)
result = []
try:
devices = await hass.async_add_executor_job(load_yaml_config_file, path)
except HomeAssistantError as err:
LOGGER.error("Unable to load %s: %s", path, str(err))
return []
except FileNotFoundError:
return []
for dev_id, device in devices.items():
# Deprecated option. We just ignore it to avoid breaking change
device.pop("vendor", None)
device.pop("hide_if_away", None)
try:
device = dev_schema(device)
device["dev_id"] = cv.slugify(dev_id)
except vol.Invalid as exp:
async_log_exception(exp, dev_id, devices, hass)
else:
result.append(Device(hass, **device))
return result
def update_config(path: str, dev_id: str, device: Device):
"""Add device to YAML configuration file."""
with open(path, "a") as out:
device = {
device.dev_id: {
ATTR_NAME: device.name,
ATTR_MAC: device.mac,
ATTR_ICON: device.icon,
"picture": device.config_picture,
"track": device.track,
}
}
out.write("\n")
out.write(dump(device))
def get_gravatar_for_email(email: str):
"""Return an 80px Gravatar for the given email address.
Async friendly.
"""
return (
f"https://www.gravatar.com/avatar/"
f"{hashlib.md5(email.encode('utf-8').lower()).hexdigest()}.jpg?s=80&d=wavatar"
)
|
import argparse
import subprocess
import sys
import tempfile
import textwrap
from google.protobuf import descriptor_pb2
INFINITY = 10000
# pylint: disable=no-member
LABEL_TO_STR = {value: name.lower()[6:] for name, value in
descriptor_pb2.FieldDescriptorProto.Label.items()}
TYPE_TO_STR = {value: name.lower()[5:] for name, value in
descriptor_pb2.FieldDescriptorProto.Type.items()}
# pylint: enable=no-member
def print_table(col_tuple, row_tuples):
"""Print column headers and rows as a reStructuredText table.
Args:
col_tuple: Tuple of column name strings.
row_tuples: List of tuples containing row data.
"""
col_widths = [max(len(str(row[col])) for row in [col_tuple] + row_tuples)
for col in range(len(col_tuple))]
format_str = ' '.join('{{:<{}}}'.format(col_width)
for col_width in col_widths)
header_border = ' '.join('=' * col_width for col_width in col_widths)
print(header_border)
print(format_str.format(*col_tuple))
print(header_border)
for row_tuple in row_tuples:
print(format_str.format(*row_tuple))
print(header_border)
print()
def make_subsection(text):
"""Format text as reStructuredText subsection.
Args:
text: Text string to format.
Returns:
Formatted text string.
"""
return '{}\n{}\n'.format(text, '-' * len(text))
def make_link(text):
"""Format text as reStructuredText link.
Args:
text: Text string to format.
Returns:
Formatted text string.
"""
return '`{}`_'.format(text)
def make_code(text):
"""Format text as reStructuredText code.
Args:
text: Text string to format.
Returns:
Formatted text string.
"""
return ':code:`{}`'.format(text)
def make_comment(text):
"""Format text as reStructuredText comment.
Args:
text: Text string to format.
Returns:
Formatted text string.
"""
return '.. {}\n'.format(text)
def get_comment_from_location(location):
"""Return comment text from location.
Args:
location: descriptor_pb2.SourceCodeInfo.Location instance to get
comment from.
Returns:
Comment as string.
"""
return textwrap.dedent(location.leading_comments or
location.trailing_comments)
def generate_enum_doc(enum_descriptor, locations, path, name_prefix=''):
"""Generate doc for an enum.
Args:
enum_descriptor: descriptor_pb2.EnumDescriptorProto instance for enum
to generate docs for.
locations: Dictionary of location paths tuples to
descriptor_pb2.SourceCodeInfo.Location instances.
path: Path tuple to the enum definition.
name_prefix: Optional prefix for this enum's name.
"""
print(make_subsection(name_prefix + enum_descriptor.name))
location = locations[path]
if location.HasField('leading_comments'):
print(textwrap.dedent(location.leading_comments))
row_tuples = []
for value_index, value in enumerate(enum_descriptor.value):
field_location = locations[path + (2, value_index)]
row_tuples.append((
make_code(value.name),
value.number,
textwrap.fill(get_comment_from_location(field_location), INFINITY),
))
print_table(('Name', 'Number', 'Description'), row_tuples)
def generate_message_doc(message_descriptor, locations, path, name_prefix=''):
"""Generate docs for message and nested messages and enums.
Args:
message_descriptor: descriptor_pb2.DescriptorProto instance for message
to generate docs for.
locations: Dictionary of location paths tuples to
descriptor_pb2.SourceCodeInfo.Location instances.
path: Path tuple to the message definition.
name_prefix: Optional prefix for this message's name.
"""
# message_type is 4
prefixed_name = name_prefix + message_descriptor.name
print(make_subsection(prefixed_name))
location = locations[path]
if location.HasField('leading_comments'):
print(textwrap.dedent(location.leading_comments))
row_tuples = []
for field_index, field in enumerate(message_descriptor.field):
field_location = locations[path + (2, field_index)]
if field.type not in [11, 14]:
type_str = TYPE_TO_STR[field.type]
else:
type_str = make_link(field.type_name.lstrip('.'))
row_tuples.append((
make_code(field.name),
field.number,
type_str,
LABEL_TO_STR[field.label],
textwrap.fill(get_comment_from_location(field_location), INFINITY),
))
print_table(('Field', 'Number', 'Type', 'Label', 'Description'),
row_tuples)
# Generate nested messages
nested_types = enumerate(message_descriptor.nested_type)
for index, nested_message_desc in nested_types:
generate_message_doc(nested_message_desc, locations,
path + (3, index),
name_prefix=prefixed_name + '.')
# Generate nested enums
for index, nested_enum_desc in enumerate(message_descriptor.enum_type):
generate_enum_doc(nested_enum_desc, locations, path + (4, index),
name_prefix=prefixed_name + '.')
def compile_protofile(proto_file_path):
"""Compile proto file to descriptor set.
Args:
proto_file_path: Path to proto file to compile.
Returns:
Path to file containing compiled descriptor set.
Raises:
SystemExit if the compilation fails.
"""
out_file = tempfile.mkstemp()[1]
try:
subprocess.check_output(['protoc', '--include_source_info',
'--descriptor_set_out', out_file,
proto_file_path])
except subprocess.CalledProcessError as e:
sys.exit('protoc returned status {}'.format(e.returncode))
return out_file
def main():
"""Parse arguments and print generated documentation to stdout."""
parser = argparse.ArgumentParser()
parser.add_argument('protofilepath')
args = parser.parse_args()
out_file = compile_protofile(args.protofilepath)
with open(out_file, 'rb') as proto_file:
# pylint: disable=no-member
file_descriptor_set = descriptor_pb2.FileDescriptorSet.FromString(
proto_file.read()
)
# pylint: enable=no-member
for file_descriptor in file_descriptor_set.file:
# Build dict of location tuples
locations = {}
for location in file_descriptor.source_code_info.location:
locations[tuple(location.path)] = location
# Add comment to top
print(make_comment('This file was automatically generated from {} and '
'should not be edited directly.'
.format(args.protofilepath)))
# Generate documentation
for index, message_desc in enumerate(file_descriptor.message_type):
generate_message_doc(message_desc, locations, (4, index))
for index, enum_desc in enumerate(file_descriptor.enum_type):
generate_enum_doc(enum_desc, locations, (5, index))
if __name__ == '__main__':
main()
|
from collections import defaultdict
from ..utils import classify, classify_bool, bfs, fzset, Enumerator, logger
from ..exceptions import GrammarError
from .grammar_analysis import GrammarAnalyzer, Terminal, LR0ItemSet
from ..grammar import Rule
###{standalone
class Action:
def __init__(self, name):
self.name = name
def __str__(self):
return self.name
def __repr__(self):
return str(self)
Shift = Action('Shift')
Reduce = Action('Reduce')
class ParseTable:
def __init__(self, states, start_states, end_states):
self.states = states
self.start_states = start_states
self.end_states = end_states
def serialize(self, memo):
tokens = Enumerator()
rules = Enumerator()
states = {
state: {tokens.get(token): ((1, arg.serialize(memo)) if action is Reduce else (0, arg))
for token, (action, arg) in actions.items()}
for state, actions in self.states.items()
}
return {
'tokens': tokens.reversed(),
'states': states,
'start_states': self.start_states,
'end_states': self.end_states,
}
@classmethod
def deserialize(cls, data, memo):
tokens = data['tokens']
states = {
state: {tokens[token]: ((Reduce, Rule.deserialize(arg, memo)) if action==1 else (Shift, arg))
for token, (action, arg) in actions.items()}
for state, actions in data['states'].items()
}
return cls(states, data['start_states'], data['end_states'])
class IntParseTable(ParseTable):
@classmethod
def from_ParseTable(cls, parse_table):
enum = list(parse_table.states)
state_to_idx = {s:i for i,s in enumerate(enum)}
int_states = {}
for s, la in parse_table.states.items():
la = {k:(v[0], state_to_idx[v[1]]) if v[0] is Shift else v
for k,v in la.items()}
int_states[ state_to_idx[s] ] = la
start_states = {start:state_to_idx[s] for start, s in parse_table.start_states.items()}
end_states = {start:state_to_idx[s] for start, s in parse_table.end_states.items()}
return cls(int_states, start_states, end_states)
###}
# digraph and traverse, see The Theory and Practice of Compiler Writing
# computes F(x) = G(x) union (union { G(y) | x R y })
# X: nodes
# R: relation (function mapping node -> list of nodes that satisfy the relation)
# G: set valued function
def digraph(X, R, G):
F = {}
S = []
N = {}
for x in X:
N[x] = 0
for x in X:
# this is always true for the first iteration, but N[x] may be updated in traverse below
if N[x] == 0:
traverse(x, S, N, X, R, G, F)
return F
# x: single node
# S: stack
# N: weights
# X: nodes
# R: relation (see above)
# G: set valued function
# F: set valued function we are computing (map of input -> output)
def traverse(x, S, N, X, R, G, F):
S.append(x)
d = len(S)
N[x] = d
F[x] = G[x]
for y in R[x]:
if N[y] == 0:
traverse(y, S, N, X, R, G, F)
n_x = N[x]
assert(n_x > 0)
n_y = N[y]
assert(n_y != 0)
if (n_y > 0) and (n_y < n_x):
N[x] = n_y
F[x].update(F[y])
if N[x] == d:
f_x = F[x]
while True:
z = S.pop()
N[z] = -1
F[z] = f_x
if z == x:
break
class LALR_Analyzer(GrammarAnalyzer):
def __init__(self, parser_conf, debug=False):
GrammarAnalyzer.__init__(self, parser_conf, debug)
self.nonterminal_transitions = []
self.directly_reads = defaultdict(set)
self.reads = defaultdict(set)
self.includes = defaultdict(set)
self.lookback = defaultdict(set)
def compute_lr0_states(self):
self.lr0_states = set()
# map of kernels to LR0ItemSets
cache = {}
def step(state):
_, unsat = classify_bool(state.closure, lambda rp: rp.is_satisfied)
d = classify(unsat, lambda rp: rp.next)
for sym, rps in d.items():
kernel = fzset({rp.advance(sym) for rp in rps})
new_state = cache.get(kernel, None)
if new_state is None:
closure = set(kernel)
for rp in kernel:
if not rp.is_satisfied and not rp.next.is_term:
closure |= self.expand_rule(rp.next, self.lr0_rules_by_origin)
new_state = LR0ItemSet(kernel, closure)
cache[kernel] = new_state
state.transitions[sym] = new_state
yield new_state
self.lr0_states.add(state)
for _ in bfs(self.lr0_start_states.values(), step):
pass
def compute_reads_relations(self):
# handle start state
for root in self.lr0_start_states.values():
assert(len(root.kernel) == 1)
for rp in root.kernel:
assert(rp.index == 0)
self.directly_reads[(root, rp.next)] = set([ Terminal('$END') ])
for state in self.lr0_states:
seen = set()
for rp in state.closure:
if rp.is_satisfied:
continue
s = rp.next
# if s is a not a nonterminal
if s not in self.lr0_rules_by_origin:
continue
if s in seen:
continue
seen.add(s)
nt = (state, s)
self.nonterminal_transitions.append(nt)
dr = self.directly_reads[nt]
r = self.reads[nt]
next_state = state.transitions[s]
for rp2 in next_state.closure:
if rp2.is_satisfied:
continue
s2 = rp2.next
# if s2 is a terminal
if s2 not in self.lr0_rules_by_origin:
dr.add(s2)
if s2 in self.NULLABLE:
r.add((next_state, s2))
def compute_includes_lookback(self):
for nt in self.nonterminal_transitions:
state, nonterminal = nt
includes = []
lookback = self.lookback[nt]
for rp in state.closure:
if rp.rule.origin != nonterminal:
continue
# traverse the states for rp(.rule)
state2 = state
for i in range(rp.index, len(rp.rule.expansion)):
s = rp.rule.expansion[i]
nt2 = (state2, s)
state2 = state2.transitions[s]
if nt2 not in self.reads:
continue
for j in range(i + 1, len(rp.rule.expansion)):
if not rp.rule.expansion[j] in self.NULLABLE:
break
else:
includes.append(nt2)
# state2 is at the final state for rp.rule
if rp.index == 0:
for rp2 in state2.closure:
if (rp2.rule == rp.rule) and rp2.is_satisfied:
lookback.add((state2, rp2.rule))
for nt2 in includes:
self.includes[nt2].add(nt)
def compute_lookaheads(self):
read_sets = digraph(self.nonterminal_transitions, self.reads, self.directly_reads)
follow_sets = digraph(self.nonterminal_transitions, self.includes, read_sets)
for nt, lookbacks in self.lookback.items():
for state, rule in lookbacks:
for s in follow_sets[nt]:
state.lookaheads[s].add(rule)
def compute_lalr1_states(self):
m = {}
reduce_reduce = []
for state in self.lr0_states:
actions = {}
for la, next_state in state.transitions.items():
actions[la] = (Shift, next_state.closure)
for la, rules in state.lookaheads.items():
if len(rules) > 1:
# Try to resolve conflict based on priority
p = [(r.options.priority or 0, r) for r in rules]
p.sort(key=lambda r: r[0], reverse=True)
best, second_best = p[:2]
if best[0] > second_best[0]:
rules = [best[1]]
else:
reduce_reduce.append((state, la, rules))
if la in actions:
if self.debug:
logger.warning('Shift/Reduce conflict for terminal %s: (resolving as shift)', la.name)
logger.warning(' * %s', list(rules)[0])
else:
actions[la] = (Reduce, list(rules)[0])
m[state] = { k.name: v for k, v in actions.items() }
if reduce_reduce:
msgs = []
for state, la, rules in reduce_reduce:
msg = 'Reduce/Reduce collision in %s between the following rules: %s' % (la, ''.join([ '\n\t- ' + str(r) for r in rules ]))
if self.debug:
msg += '\n collision occurred in state: {%s\n }' % ''.join(['\n\t' + str(x) for x in state.closure])
msgs.append(msg)
raise GrammarError('\n\n'.join(msgs))
states = { k.closure: v for k, v in m.items() }
# compute end states
end_states = {}
for state in states:
for rp in state:
for start in self.lr0_start_states:
if rp.rule.origin.name == ('$root_' + start) and rp.is_satisfied:
assert(not start in end_states)
end_states[start] = state
_parse_table = ParseTable(states, { start: state.closure for start, state in self.lr0_start_states.items() }, end_states)
if self.debug:
self.parse_table = _parse_table
else:
self.parse_table = IntParseTable.from_ParseTable(_parse_table)
def compute_lalr(self):
self.compute_lr0_states()
self.compute_reads_relations()
self.compute_includes_lookback()
self.compute_lookaheads()
self.compute_lalr1_states()
|
import logging
import pysnmp.hlapi.asyncio as hlapi
from pysnmp.hlapi.asyncio import (
CommunityData,
ContextData,
ObjectIdentity,
ObjectType,
SnmpEngine,
UdpTransportTarget,
UsmUserData,
getCmd,
setCmd,
)
from pysnmp.proto.rfc1902 import (
Counter32,
Counter64,
Gauge32,
Integer,
Integer32,
IpAddress,
Null,
ObjectIdentifier,
OctetString,
Opaque,
TimeTicks,
Unsigned32,
)
import voluptuous as vol
from homeassistant.components.switch import PLATFORM_SCHEMA, SwitchEntity
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PAYLOAD_OFF,
CONF_PAYLOAD_ON,
CONF_PORT,
CONF_USERNAME,
)
import homeassistant.helpers.config_validation as cv
from .const import (
CONF_AUTH_KEY,
CONF_AUTH_PROTOCOL,
CONF_BASEOID,
CONF_COMMUNITY,
CONF_PRIV_KEY,
CONF_PRIV_PROTOCOL,
CONF_VARTYPE,
CONF_VERSION,
DEFAULT_AUTH_PROTOCOL,
DEFAULT_HOST,
DEFAULT_NAME,
DEFAULT_PORT,
DEFAULT_PRIV_PROTOCOL,
DEFAULT_VARTYPE,
DEFAULT_VERSION,
MAP_AUTH_PROTOCOLS,
MAP_PRIV_PROTOCOLS,
SNMP_VERSIONS,
)
_LOGGER = logging.getLogger(__name__)
CONF_COMMAND_OID = "command_oid"
CONF_COMMAND_PAYLOAD_OFF = "command_payload_off"
CONF_COMMAND_PAYLOAD_ON = "command_payload_on"
DEFAULT_COMMUNITY = "private"
DEFAULT_PAYLOAD_OFF = 0
DEFAULT_PAYLOAD_ON = 1
MAP_SNMP_VARTYPES = {
"Counter32": Counter32,
"Counter64": Counter64,
"Gauge32": Gauge32,
"Integer32": Integer32,
"Integer": Integer,
"IpAddress": IpAddress,
"Null": Null,
# some work todo to support tuple ObjectIdentifier, this just supports str
"ObjectIdentifier": ObjectIdentifier,
"OctetString": OctetString,
"Opaque": Opaque,
"TimeTicks": TimeTicks,
"Unsigned32": Unsigned32,
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_BASEOID): cv.string,
vol.Optional(CONF_COMMAND_OID): cv.string,
vol.Optional(CONF_COMMAND_PAYLOAD_ON): cv.string,
vol.Optional(CONF_COMMAND_PAYLOAD_OFF): cv.string,
vol.Optional(CONF_COMMUNITY, default=DEFAULT_COMMUNITY): cv.string,
vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PAYLOAD_OFF, default=DEFAULT_PAYLOAD_OFF): cv.string,
vol.Optional(CONF_PAYLOAD_ON, default=DEFAULT_PAYLOAD_ON): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_VERSION, default=DEFAULT_VERSION): vol.In(SNMP_VERSIONS),
vol.Optional(CONF_USERNAME): cv.string,
vol.Optional(CONF_AUTH_KEY): cv.string,
vol.Optional(CONF_AUTH_PROTOCOL, default=DEFAULT_AUTH_PROTOCOL): vol.In(
MAP_AUTH_PROTOCOLS
),
vol.Optional(CONF_PRIV_KEY): cv.string,
vol.Optional(CONF_PRIV_PROTOCOL, default=DEFAULT_PRIV_PROTOCOL): vol.In(
MAP_PRIV_PROTOCOLS
),
vol.Optional(CONF_VARTYPE, default=DEFAULT_VARTYPE): cv.string,
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the SNMP switch."""
name = config.get(CONF_NAME)
host = config.get(CONF_HOST)
port = config.get(CONF_PORT)
community = config.get(CONF_COMMUNITY)
baseoid = config.get(CONF_BASEOID)
command_oid = config.get(CONF_COMMAND_OID)
command_payload_on = config.get(CONF_COMMAND_PAYLOAD_ON)
command_payload_off = config.get(CONF_COMMAND_PAYLOAD_OFF)
version = config.get(CONF_VERSION)
username = config.get(CONF_USERNAME)
authkey = config.get(CONF_AUTH_KEY)
authproto = config.get(CONF_AUTH_PROTOCOL)
privkey = config.get(CONF_PRIV_KEY)
privproto = config.get(CONF_PRIV_PROTOCOL)
payload_on = config.get(CONF_PAYLOAD_ON)
payload_off = config.get(CONF_PAYLOAD_OFF)
vartype = config.get(CONF_VARTYPE)
async_add_entities(
[
SnmpSwitch(
name,
host,
port,
community,
baseoid,
command_oid,
version,
username,
authkey,
authproto,
privkey,
privproto,
payload_on,
payload_off,
command_payload_on,
command_payload_off,
vartype,
)
],
True,
)
class SnmpSwitch(SwitchEntity):
"""Representation of a SNMP switch."""
def __init__(
self,
name,
host,
port,
community,
baseoid,
commandoid,
version,
username,
authkey,
authproto,
privkey,
privproto,
payload_on,
payload_off,
command_payload_on,
command_payload_off,
vartype,
):
"""Initialize the switch."""
self._name = name
self._baseoid = baseoid
self._vartype = vartype
# Set the command OID to the base OID if command OID is unset
self._commandoid = commandoid or baseoid
self._command_payload_on = command_payload_on or payload_on
self._command_payload_off = command_payload_off or payload_off
self._state = None
self._payload_on = payload_on
self._payload_off = payload_off
if version == "3":
if not authkey:
authproto = "none"
if not privkey:
privproto = "none"
self._request_args = [
SnmpEngine(),
UsmUserData(
username,
authKey=authkey or None,
privKey=privkey or None,
authProtocol=getattr(hlapi, MAP_AUTH_PROTOCOLS[authproto]),
privProtocol=getattr(hlapi, MAP_PRIV_PROTOCOLS[privproto]),
),
UdpTransportTarget((host, port)),
ContextData(),
]
else:
self._request_args = [
SnmpEngine(),
CommunityData(community, mpModel=SNMP_VERSIONS[version]),
UdpTransportTarget((host, port)),
ContextData(),
]
async def async_turn_on(self, **kwargs):
"""Turn on the switch."""
# If vartype set, use it - http://snmplabs.com/pysnmp/docs/api-reference.html#pysnmp.smi.rfc1902.ObjectType
await self._execute_command(self._command_payload_on)
async def async_turn_off(self, **kwargs):
"""Turn off the switch."""
await self._execute_command(self._command_payload_off)
async def _execute_command(self, command):
# User did not set vartype and command is not a digit
if self._vartype == "none" and not self._command_payload_on.isdigit():
await self._set(command)
# User set vartype Null, command must be an empty string
elif self._vartype == "Null":
await self._set(Null)("")
# user did not set vartype but command is digit: defaulting to Integer
# or user did set vartype
else:
await self._set(MAP_SNMP_VARTYPES.get(self._vartype, Integer)(command))
async def async_update(self):
"""Update the state."""
errindication, errstatus, errindex, restable = await getCmd(
*self._request_args, ObjectType(ObjectIdentity(self._baseoid))
)
if errindication:
_LOGGER.error("SNMP error: %s", errindication)
elif errstatus:
_LOGGER.error(
"SNMP error: %s at %s",
errstatus.prettyPrint(),
errindex and restable[-1][int(errindex) - 1] or "?",
)
else:
for resrow in restable:
if resrow[-1] == self._payload_on:
self._state = True
elif resrow[-1] == Integer(self._payload_on):
self._state = True
elif resrow[-1] == self._payload_off:
self._state = False
elif resrow[-1] == Integer(self._payload_off):
self._state = False
else:
self._state = None
@property
def name(self):
"""Return the switch's name."""
return self._name
@property
def is_on(self):
"""Return true if switch is on; False if off. None if unknown."""
return self._state
async def _set(self, value):
await setCmd(
*self._request_args, ObjectType(ObjectIdentity(self._commandoid), value)
)
|
import os
import re
import weakref
from markups import MarkdownMarkup, ReStructuredTextMarkup, TextileMarkup
from ReText import globalSettings, tablemode, readFromSettings
from PyQt5.QtCore import pyqtSignal, QFileInfo, QPoint, QRect, QSize, Qt
from PyQt5.QtGui import QColor, QImage, QKeyEvent, QMouseEvent, QPainter, \
QPalette, QTextCursor, QTextFormat, QWheelEvent, QGuiApplication
from PyQt5.QtWidgets import QAction, QApplication, QFileDialog, QLabel, QTextEdit, QWidget
try:
from ReText.fakevimeditor import ReTextFakeVimHandler
except ImportError:
ReTextFakeVimHandler = None
colors = {
'marginLine': QColor(0xdc, 0xd2, 0xdc),
'currentLineHighlight': QColor(0xff, 0xff, 0xc8),
'infoArea': QColor(0xaa, 0xff, 0x55, 0xaa),
'statsArea': QColor(0xff, 0xaa, 0x55, 0xaa),
'lineNumberArea': Qt.cyan,
'lineNumberAreaText': Qt.darkCyan
}
colorValues = {
colorName: readFromSettings(
'ColorScheme/' + colorName, QColor, default=colors[colorName])
for colorName in colors
}
def documentIndentMore(document, cursor, globalSettings=globalSettings):
if cursor.hasSelection():
block = document.findBlock(cursor.selectionStart())
end = document.findBlock(cursor.selectionEnd()).next()
cursor.beginEditBlock()
while block != end:
cursor.setPosition(block.position())
if globalSettings.tabInsertsSpaces:
cursor.insertText(' ' * globalSettings.tabWidth)
else:
cursor.insertText('\t')
block = block.next()
cursor.endEditBlock()
else:
indent = globalSettings.tabWidth - (cursor.positionInBlock()
% globalSettings.tabWidth)
if globalSettings.tabInsertsSpaces:
cursor.insertText(' ' * indent)
else:
cursor.insertText('\t')
def documentIndentLess(document, cursor, globalSettings=globalSettings):
if cursor.hasSelection():
block = document.findBlock(cursor.selectionStart())
end = document.findBlock(cursor.selectionEnd()).next()
else:
block = document.findBlock(cursor.position())
end = block.next()
cursor.beginEditBlock()
while block != end:
cursor.setPosition(block.position())
if document.characterAt(cursor.position()) == '\t':
cursor.deleteChar()
else:
pos = 0
while document.characterAt(cursor.position()) == ' ' \
and pos < globalSettings.tabWidth:
pos += 1
cursor.deleteChar()
block = block.next()
cursor.endEditBlock()
class ReTextEdit(QTextEdit):
resized = pyqtSignal(QRect)
scrollLimitReached = pyqtSignal(QWheelEvent)
returnBlockPattern = re.compile("^[\\s]*([*>-]|\\d+\\.) ")
orderedListPattern = re.compile("^([\\s]*)(\\d+)\\. $")
wordPattern = re.compile(r"\w+")
nonAlphaNumPattern = re.compile(r"\W")
surroundKeysSelfClose = [
Qt.Key_Underscore,
Qt.Key_Asterisk,
Qt.Key_QuoteDbl,
Qt.Key_Apostrophe
]
surroundKeysOtherClose = {
Qt.Key_ParenLeft: ')',
Qt.Key_BracketLeft: ']'
}
def __init__(self, parent, settings=globalSettings):
QTextEdit.__init__(self)
self.tab = weakref.proxy(parent)
self.parent = parent.p
self.undoRedoActive = False
self.tableModeEnabled = False
self.setAcceptRichText(False)
self.lineNumberArea = LineNumberArea(self)
self.infoArea = LineInfoArea(self)
self.statistics = (0, 0, 0)
self.statsArea = TextInfoArea(self)
self.updateFont()
self.setWrapModeAndWidth()
self.document().blockCountChanged.connect(self.updateLineNumberAreaWidth)
self.cursorPositionChanged.connect(self.highlightCurrentLine)
self.document().contentsChange.connect(self.contentsChange)
self.settings = settings
if globalSettings.useFakeVim:
self.installFakeVimHandler()
def setWrapModeAndWidth(self):
if globalSettings.rightMarginWrap and (self.rect().topRight().x() > self.marginx):
self.setLineWrapMode(QTextEdit.FixedPixelWidth)
self.setLineWrapColumnOrWidth(self.marginx)
else:
self.setLineWrapMode(QTextEdit.WidgetWidth)
def updateFont(self):
self.setFont(globalSettings.editorFont)
metrics = self.fontMetrics()
self.marginx = (int(self.document().documentMargin())
+ metrics.width(' ' * globalSettings.rightMargin))
self.setTabStopWidth(globalSettings.tabWidth * self.fontMetrics().width(' '))
self.updateLineNumberAreaWidth()
self.infoArea.updateTextAndGeometry()
self.updateTextStatistics()
self.statsArea.updateTextAndGeometry()
if globalSettings.wideCursor:
self.setCursorWidth(metrics.averageCharWidth())
def paintEvent(self, event):
if not globalSettings.rightMargin:
return QTextEdit.paintEvent(self, event)
painter = QPainter(self.viewport())
painter.setPen(colorValues['marginLine'])
y1 = self.rect().topLeft().y()
y2 = self.rect().bottomLeft().y()
painter.drawLine(self.marginx, y1, self.marginx, y2)
QTextEdit.paintEvent(self, event)
def wheelEvent(self, event):
modifiers = QGuiApplication.keyboardModifiers()
if modifiers == Qt.ControlModifier:
font = globalSettings.editorFont
size = font.pointSize()
scroll = event.angleDelta().y()
if scroll > 0:
size += 1
elif scroll < 0:
size -= 1
else:
return
font.setPointSize(size)
self.parent.setEditorFont(font)
else:
QTextEdit.wheelEvent(self, event)
if event.angleDelta().y() < 0:
scrollBarLimit = self.verticalScrollBar().maximum()
else:
scrollBarLimit = self.verticalScrollBar().minimum()
if self.verticalScrollBar().value() == scrollBarLimit:
self.scrollLimitReached.emit(event)
def scrollContentsBy(self, dx, dy):
QTextEdit.scrollContentsBy(self, dx, dy)
self.lineNumberArea.update()
def contextMenuEvent(self, event):
# Create base menu
menu = self.createStandardContextMenu()
if self.parent.actionPasteImage.isEnabled():
actions = menu.actions()
actionPaste = menu.findChild(QAction, "edit-paste")
actionNextAfterPaste = actions[actions.index(actionPaste) + 1]
menu.insertAction(actionNextAfterPaste, self.parent.actionPasteImage)
text = self.toPlainText()
if not text:
menu.exec(event.globalPos())
return
# Check word under the cursor
oldcursor = self.textCursor()
cursor = self.cursorForPosition(event.pos())
curchar = self.document().characterAt(cursor.position())
isalpha = curchar.isalpha()
dictionary = self.tab.highlighter.dictionary
word = None
if isalpha and not (oldcursor.hasSelection() and oldcursor.selectedText() != cursor.selectedText()):
cursor.select(QTextCursor.WordUnderCursor)
word = cursor.selectedText()
if word is not None and dictionary and not dictionary.check(word):
self.setTextCursor(cursor)
suggestions = dictionary.suggest(word)
actions = [self.parent.act(sug, trig=self.fixWord(sug)) for sug in suggestions]
menu.insertSeparator(menu.actions()[0])
for action in actions[::-1]:
menu.insertAction(menu.actions()[0], action)
menu.insertSeparator(menu.actions()[0])
menu.insertAction(menu.actions()[0], self.parent.act(self.tr('Add to dictionary'), trig=self.learnWord(word)))
menu.addSeparator()
menu.addAction(self.parent.actionMoveUp)
menu.addAction(self.parent.actionMoveDown)
menu.exec(event.globalPos())
def fixWord(self, correctword):
return lambda: self.insertPlainText(correctword)
def learnWord(self, newword):
return lambda: self.addNewWord(newword)
def addNewWord(self, newword):
cursor = self.textCursor()
block = cursor.block()
cursor.clearSelection()
self.setTextCursor(cursor)
dictionary = self.tab.highlighter.dictionary
if (dictionary is None) or not newword:
return
dictionary.add(newword)
self.tab.highlighter.rehighlightBlock(block)
def isSurroundKey(self, key):
return key in self.surroundKeysSelfClose or key in self.surroundKeysOtherClose
def getCloseKey(self, event, key):
if key in self.surroundKeysSelfClose:
return event.text()
if key in self.surroundKeysOtherClose:
return self.surroundKeysOtherClose[key]
def surroundText(self, cursor, event, key):
text = cursor.selectedText()
keyStr = event.text()
keyClose = self.getCloseKey(event, key)
cursor.insertText(keyStr + text + keyClose)
def keyPressEvent(self, event):
key = event.key()
cursor = self.textCursor()
if key == Qt.Key_Backspace and event.modifiers() & Qt.GroupSwitchModifier:
# Workaround for https://bugreports.qt.io/browse/QTBUG-49771
event = QKeyEvent(event.type(), event.key(),
event.modifiers() ^ Qt.GroupSwitchModifier)
if key == Qt.Key_Tab:
documentIndentMore(self.document(), cursor)
elif key == Qt.Key_Backtab:
documentIndentLess(self.document(), cursor)
elif key == Qt.Key_Return:
markupClass = self.tab.getActiveMarkupClass()
if event.modifiers() & Qt.ControlModifier:
cursor.insertText('\n')
self.ensureCursorVisible()
elif self.tableModeEnabled and tablemode.handleReturn(cursor, markupClass,
newRow=(event.modifiers() & Qt.ShiftModifier)):
self.setTextCursor(cursor)
self.ensureCursorVisible()
else:
if event.modifiers() & Qt.ShiftModifier and markupClass == MarkdownMarkup:
# Insert Markdown-style line break
cursor.insertText(' ')
self.handleReturn(cursor)
elif cursor.selectedText() and self.isSurroundKey(key):
self.surroundText(cursor, event, key)
else:
if event.text() and self.tableModeEnabled:
cursor.beginEditBlock()
QTextEdit.keyPressEvent(self, event)
if event.text() and self.tableModeEnabled:
cursor.endEditBlock()
def handleReturn(self, cursor):
# Select text between the cursor and the line start
cursor.movePosition(QTextCursor.StartOfBlock, QTextCursor.KeepAnchor)
text = cursor.selectedText()
length = len(text)
match = self.returnBlockPattern.search(text)
if match is not None:
matchedText = match.group(0)
if len(matchedText) == length:
cursor.removeSelectedText()
matchedText = ''
else:
matchOL = self.orderedListPattern.match(matchedText)
if matchOL is not None:
matchedPrefix = matchOL.group(1)
matchedNumber = int(matchOL.group(2))
nextNumber = matchedNumber if self.settings.orderedListMode == 'repeat' else matchedNumber + 1
matchedText = matchedPrefix + str(nextNumber) + ". "
else:
matchedText = ''
# Reset the cursor
cursor = self.textCursor()
cursor.insertText('\n' + matchedText)
self.ensureCursorVisible()
def moveLineUp(self):
self.moveLine(QTextCursor.PreviousBlock)
def moveLineDown(self):
self.moveLine(QTextCursor.NextBlock)
def moveLine(self, direction):
cursor = self.textCursor()
# Select the current block
cursor.movePosition(QTextCursor.StartOfBlock, QTextCursor.MoveAnchor)
cursor.movePosition(QTextCursor.NextBlock, QTextCursor.KeepAnchor)
text = cursor.selectedText()
# Remove it
cursor.removeSelectedText()
# Move to the wanted block
cursor.movePosition(direction, QTextCursor.MoveAnchor)
# Paste the line
cursor.insertText(text)
# Move to the pasted block
cursor.movePosition(QTextCursor.PreviousBlock, QTextCursor.MoveAnchor)
# Update cursor
self.setTextCursor(cursor)
def lineNumberAreaWidth(self):
if not globalSettings.lineNumbersEnabled:
return 0
cursor = QTextCursor(self.document())
cursor.movePosition(QTextCursor.End)
if globalSettings.relativeLineNumbers:
digits = len(str(cursor.blockNumber())) + 1
else:
digits = len(str(cursor.blockNumber() + 1))
return 5 + self.fontMetrics().width('9') * digits
def updateLineNumberAreaWidth(self, blockcount=0):
self.setViewportMargins(self.lineNumberAreaWidth(), 0, 0, 0)
def resizeEvent(self, event):
QTextEdit.resizeEvent(self, event)
rect = self.contentsRect()
self.resized.emit(rect)
self.lineNumberArea.setGeometry(rect.left(), rect.top(),
self.lineNumberAreaWidth(), rect.height())
self.infoArea.updateTextAndGeometry()
self.statsArea.updateTextAndGeometry()
self.setWrapModeAndWidth()
self.ensureCursorVisible()
def highlightCurrentLine(self):
if globalSettings.relativeLineNumbers:
self.lineNumberArea.update()
if globalSettings.highlightCurrentLine == 'disabled':
return self.setExtraSelections([])
selection = QTextEdit.ExtraSelection()
selection.format.setBackground(colorValues['currentLineHighlight'])
selection.format.setProperty(QTextFormat.FullWidthSelection, True)
selection.cursor = self.textCursor()
selection.cursor.clearSelection()
selections = [selection]
if globalSettings.highlightCurrentLine == 'wrapped-line':
selections.append(QTextEdit.ExtraSelection())
selections[0].cursor.movePosition(QTextCursor.StartOfBlock)
selections[0].cursor.movePosition(QTextCursor.EndOfBlock, QTextCursor.KeepAnchor)
selections[1].format.setBackground(colorValues['currentLineHighlight'])
selections[1].format.setProperty(QTextFormat.FullWidthSelection, True)
selections[1].cursor = self.textCursor()
selections[1].cursor.movePosition(QTextCursor.EndOfBlock)
self.setExtraSelections(selections)
def enableTableMode(self, enable):
self.tableModeEnabled = enable
def backupCursorPositionOnLine(self):
return self.textCursor().positionInBlock()
def restoreCursorPositionOnLine(self, positionOnLine):
cursor = self.textCursor()
cursor.setPosition(cursor.block().position() + positionOnLine)
self.setTextCursor(cursor)
def contentsChange(self, pos, removed, added):
if self.tableModeEnabled:
markupClass = self.tab.getActiveMarkupClass()
cursorPosition = self.backupCursorPositionOnLine()
tablemode.adjustTableToChanges(self.document(), pos, added - removed, markupClass)
self.restoreCursorPositionOnLine(cursorPosition)
self.lineNumberArea.update()
self.updateTextStatistics()
def findNextImageName(self, filenames):
highestNumber = 0
for filename in filenames:
m = re.match(r'image(\d+).png', filename, re.IGNORECASE)
if m:
number = int(m.group(1))
highestNumber = max(number, highestNumber)
return 'image%04d.png' % (highestNumber + 1)
def getImageFilename(self):
if self.tab.fileName:
saveDir = os.path.dirname(self.tab.fileName)
else:
saveDir = os.getcwd()
imageFileName = self.findNextImageName(os.listdir(saveDir))
return QFileDialog.getSaveFileName(self,
self.tr('Save image'),
os.path.join(saveDir, imageFileName),
self.tr('Images (*.png *.jpg)'))[0]
def makeFileNameRelative(self, fileName):
"""Tries to make the given fileName relative. If the document is
not saved, or the fileName is on a different root, returns the
original fileName.
"""
if self.tab.fileName:
currentDir = os.path.dirname(self.tab.fileName)
try:
return os.path.relpath(fileName, currentDir)
except ValueError: # different roots
return fileName
return fileName
def getImageMarkup(self, fileName):
"""Returns markup for image in the current markup language.
This method is also accessed in ReTextWindow.insertImage.
"""
link = self.makeFileNameRelative(fileName)
markupClass = self.tab.getActiveMarkupClass()
if markupClass == MarkdownMarkup:
return '' % (QFileInfo(link).baseName(), link)
elif markupClass == ReStructuredTextMarkup:
return '.. image:: %s' % link
elif markupClass == TextileMarkup:
return '!%s!' % link
def pasteImage(self):
mimeData = QApplication.instance().clipboard().mimeData()
fileName = self.getImageFilename()
if not fileName or not mimeData.hasImage():
return
image = QImage(mimeData.imageData())
image.save(fileName)
imageText = self.getImageMarkup(fileName)
self.textCursor().insertText(imageText)
def installFakeVimHandler(self):
if ReTextFakeVimHandler:
fakeVimEditor = ReTextFakeVimHandler(self, self.parent)
fakeVimEditor.setSaveAction(self.parent.actionSave)
fakeVimEditor.setQuitAction(self.parent.actionQuit)
self.parent.actionFakeVimMode.triggered.connect(fakeVimEditor.remove)
def updateTextStatistics(self):
if not globalSettings.documentStatsEnabled:
return
text = self.toPlainText()
wordCount = len(self.wordPattern.findall(text))
alphaNums = self.nonAlphaNumPattern.sub('', text)
alphaNumCount = len(alphaNums)
characterCount = len(text)
self.statistics = (wordCount, alphaNumCount, characterCount)
class LineNumberArea(QWidget):
def __init__(self, editor):
QWidget.__init__(self, editor)
self.editor = editor
def sizeHint(self):
return QSize(self.editor.lineNumberAreaWidth(), 0)
def paintEvent(self, event):
if not globalSettings.lineNumbersEnabled:
return QWidget.paintEvent(self, event)
painter = QPainter(self)
painter.fillRect(event.rect(), colorValues['lineNumberArea'])
painter.setPen(colorValues['lineNumberAreaText'])
cursor = self.editor.cursorForPosition(QPoint(0, 0))
atEnd = False
fontHeight = self.fontMetrics().height()
height = self.editor.height()
if globalSettings.relativeLineNumbers:
relativeTo = self.editor.textCursor().blockNumber()
else:
relativeTo = -1
while not atEnd:
rect = self.editor.cursorRect(cursor)
if rect.top() >= height:
break
number = str(cursor.blockNumber() - relativeTo).replace('-', '−')
painter.drawText(0, rect.top(), self.width() - 2,
fontHeight, Qt.AlignRight, number)
cursor.movePosition(QTextCursor.EndOfBlock)
atEnd = cursor.atEnd()
if not atEnd:
cursor.movePosition(QTextCursor.NextBlock)
class InfoArea(QLabel):
def __init__(self, editor, baseColor):
QWidget.__init__(self, editor)
self.editor = editor
self.editor.cursorPositionChanged.connect(self.updateTextAndGeometry)
self.updateTextAndGeometry()
self.setAutoFillBackground(True)
self.baseColor = baseColor
palette = self.palette()
palette.setColor(QPalette.Window, self.baseColor)
self.setPalette(palette)
self.setCursor(Qt.IBeamCursor)
def updateTextAndGeometry(self):
text = self.getText()
(w, h) = self.getAreaSize(text)
(x, y) = self.getAreaPosition(w, h)
self.setText(text)
self.resize(w, h)
self.move(x, y)
self.setVisible(not globalSettings.useFakeVim)
def getAreaSize(self, text):
metrics = self.fontMetrics()
width = metrics.width(text)
height = metrics.height()
return width, height
def getAreaPosition(self, width, height):
return 0, 0
def getText(self):
return ""
def enterEvent(self, event):
palette = self.palette()
windowColor = QColor(self.baseColor)
windowColor.setAlpha(0x20)
palette.setColor(QPalette.Window, windowColor)
textColor = palette.color(QPalette.WindowText)
textColor.setAlpha(0x20)
palette.setColor(QPalette.WindowText, textColor)
self.setPalette(palette)
def leaveEvent(self, event):
palette = self.palette()
palette.setColor(QPalette.Window, self.baseColor)
palette.setColor(QPalette.WindowText,
self.editor.palette().color(QPalette.WindowText))
self.setPalette(palette)
def mousePressEvent(self, event):
pos = self.mapToParent(event.pos())
pos.setX(pos.x() - self.editor.lineNumberAreaWidth())
newEvent = QMouseEvent(event.type(), pos,
event.button(), event.buttons(),
event.modifiers())
self.editor.mousePressEvent(newEvent)
mouseReleaseEvent = mousePressEvent
mouseDoubleClickEvent = mousePressEvent
mouseMoveEvent = mousePressEvent
class LineInfoArea(InfoArea):
def __init__(self, editor):
InfoArea.__init__(self, editor, colorValues['infoArea'])
def getAreaPosition(self, width, height):
viewport = self.editor.viewport()
rightSide = viewport.width() + self.editor.lineNumberAreaWidth()
if globalSettings.documentStatsEnabled:
return rightSide - width, viewport.height() - (2 * height)
else:
return rightSide - width, viewport.height() - height
def getText(self):
template = '%d : %d'
cursor = self.editor.textCursor()
block = cursor.blockNumber() + 1
position = cursor.positionInBlock()
return template % (block, position)
class TextInfoArea(InfoArea):
def __init__(self, editor):
InfoArea.__init__(self, editor, colorValues['statsArea'])
def getAreaPosition(self, width, height):
viewport = self.editor.viewport()
rightSide = viewport.width() + self.editor.lineNumberAreaWidth()
return rightSide - width, viewport.height() - height
def getText(self):
if not globalSettings.documentStatsEnabled:
return
template = self.tr('%d w | %d a | %d c',
'count of words, alphanumeric characters, all characters')
words, alphaNums, characters = self.editor.statistics
return template % (words, alphaNums, characters)
|
import warnings
from io import BytesIO
from urllib.parse import urlparse, parse_qsl
from .util import CaseInsensitiveDict
import logging
log = logging.getLogger(__name__)
class Request:
"""
VCR's representation of a request.
"""
def __init__(self, method, uri, body, headers):
self.method = method
self.uri = uri
self._was_file = hasattr(body, "read")
if self._was_file:
self.body = body.read()
else:
self.body = body
self.headers = headers
log.debug("Invoking Request %s", self.uri)
@property
def headers(self):
return self._headers
@headers.setter
def headers(self, value):
if not isinstance(value, HeadersDict):
value = HeadersDict(value)
self._headers = value
@property
def body(self):
return BytesIO(self._body) if self._was_file else self._body
@body.setter
def body(self, value):
if isinstance(value, str):
value = value.encode("utf-8")
self._body = value
def add_header(self, key, value):
warnings.warn(
"Request.add_header is deprecated. " "Please assign to request.headers instead.",
DeprecationWarning,
)
self.headers[key] = value
@property
def scheme(self):
return urlparse(self.uri).scheme
@property
def host(self):
return urlparse(self.uri).hostname
@property
def port(self):
parse_uri = urlparse(self.uri)
port = parse_uri.port
if port is None:
try:
port = {"https": 443, "http": 80}[parse_uri.scheme]
except KeyError:
pass
return port
@property
def path(self):
return urlparse(self.uri).path
@property
def query(self):
q = urlparse(self.uri).query
return sorted(parse_qsl(q))
# alias for backwards compatibility
@property
def url(self):
return self.uri
# alias for backwards compatibility
@property
def protocol(self):
return self.scheme
def __str__(self):
return "<Request ({}) {}>".format(self.method, self.uri)
def __repr__(self):
return self.__str__()
def _to_dict(self):
return {
"method": self.method,
"uri": self.uri,
"body": self.body,
"headers": {k: [v] for k, v in self.headers.items()},
}
@classmethod
def _from_dict(cls, dct):
return Request(**dct)
class HeadersDict(CaseInsensitiveDict):
"""
There is a weird quirk in HTTP. You can send the same header twice. For
this reason, headers are represented by a dict, with lists as the values.
However, it appears that HTTPlib is completely incapable of sending the
same header twice. This puts me in a weird position: I want to be able to
accurately represent HTTP headers in cassettes, but I don't want the extra
step of always having to do [0] in the general case, i.e.
request.headers['key'][0]
In addition, some servers sometimes send the same header more than once,
and httplib *can* deal with this situation.
Furthermore, I wanted to keep the request and response cassette format as
similar as possible.
For this reason, in cassettes I keep a dict with lists as keys, but once
deserialized into VCR, I keep them as plain, naked dicts.
"""
def __setitem__(self, key, value):
if isinstance(value, (tuple, list)):
value = value[0]
# Preserve the case from the first time this key was set.
old = self._store.get(key.lower())
if old:
key = old[0]
super().__setitem__(key, value)
|
from django.db import migrations
from weblate.utils.hash import calculate_hash
def migrate_json_units(apps, schema_editor):
Unit = apps.get_model("trans", "Unit")
db_alias = schema_editor.connection.alias
units = (
Unit.objects.using(db_alias)
.filter(
translation__component__file_format__in=(
"json",
"arb",
"go-i18n-json",
"i18next",
"webextension",
"json-nested",
)
)
.prefetch_related("translation__component")
)
for unit in units.iterator():
if unit.translation.component.template:
newid = calculate_hash(unit.context)
else:
newid = calculate_hash(unit.source, unit.context)
if newid != unit.id_hash:
unit.id_hash = newid
unit.save(update_fields=["id_hash"])
class Migration(migrations.Migration):
dependencies = [
("trans", "0094_project_language_aliases"),
]
operations = [
migrations.RunPython(
migrate_json_units, migrations.RunPython.noop, elidable=True
),
]
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from s3 import S3BucketCollector
##########################################################################
class TestS3BucketCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('S3BucketCollector', {
'interval': 10
})
self.collector = S3BucketCollector(config, None)
def test_import(self):
self.assertTrue(S3BucketCollector)
##########################################################################
if __name__ == "__main__":
unittest.main()
|
import logging
from absl import flags
from perfkitbenchmarker import errors
from perfkitbenchmarker import vm_util
FLAGS = flags.FLAGS
MEMCACHED_PORT = 11211
flags.DEFINE_integer('memcached_size_mb', 64,
'Size of memcached cache in megabytes.')
flags.DEFINE_integer('memcached_num_threads', 4,
'Number of worker threads.')
def _Install(vm):
"""Installs the memcached server on the VM."""
vm.InstallPackages('memcached')
vm.InstallPackages('libmemcached-tools')
def YumInstall(vm):
"""Installs the memcache package on the VM."""
_Install(vm)
def AptInstall(vm):
"""Installs the memcache package on the VM."""
_Install(vm)
@vm_util.Retry(poll_interval=5, timeout=300,
retryable_exceptions=(errors.Resource.RetryableCreationError))
def _WaitForServerUp(vm):
"""Block until the memcached server is up and responsive.
Will timeout after 5 minutes, and raise an exception. Before the timeout
expires any exceptions are caught and the status check is retried.
We check the status of the server by issuing a 'stats' command. This should
return many lines of form 'STAT <name> <value>' if the server is up and
running.
Args:
vm: VirtualMachine memcached has been installed on.
Raises:
errors.Resource.RetryableCreationError when response is not as expected or
if there is an error connecting to the port or otherwise running the
remote check command.
"""
address = vm.internal_ip
port = MEMCACHED_PORT
logging.info('Trying to connect to memcached at %s:%s', address, port)
try:
out, _ = vm.RemoteCommand(
'(echo -e "stats\n")| netcat -q 1 %s %s' % (address, port))
if out.startswith('STAT '):
logging.info('memcached server stats received. Server up and running.')
return
except errors.VirtualMachine.RemoteCommandError as e:
raise errors.Resource.RetryableCreationError(
'memcached server not up yet: %s.' % str(e))
else:
raise errors.Resource.RetryableCreationError(
'memcached server not up yet. Expected "STAT" but got "%s".' % out)
def ConfigureAndStart(vm, smp_affinity=False):
"""Prepare the memcached server on a VM.
Args:
vm: VirtualMachine to install and start memcached on.
smp_affinity: Boolean. Whether or not to set smp_affinity.
"""
vm.Install('memcached_server')
if smp_affinity:
vm.SetSmpAffinity()
for scratch_disk in vm.scratch_disks:
vm.RemoteCommand('sudo umount %s' % scratch_disk.mount_point)
# update security config to allow incoming network
vm.RemoteCommand(
'sudo sed -i "s/-l .*/-l 0.0.0.0/g" /etc/memcached.conf')
# update memory size
vm.RemoteCommand(
'sudo sed -i "s/-m .*/-m {size}/g" /etc/memcached.conf'.format(
size=FLAGS.memcached_size_mb))
# update default port
vm.RemoteCommand(
'sudo sed -i "s/-p .*/-p {port}/g" /etc/memcached.conf'.format(
port=MEMCACHED_PORT))
vm.RemoteCommand(
'echo "-t {threads}" | sudo tee -a /etc/memcached.conf'.format(
threads=FLAGS.memcached_num_threads))
# restart the default running memcached to run it with custom configurations.
vm.RemoteCommand('sudo service memcached restart')
_WaitForServerUp(vm)
logging.info('memcached server configured and started.')
def GetVersion(vm):
"""Returns the version of the memcached server installed."""
results, _ = vm.RemoteCommand('memcached -help |grep -m 1 "memcached"'
'| tr -d "\n"')
return results
def StopMemcached(vm):
vm.RemoteCommand('sudo service memcached stop')
def FlushMemcachedServer(ip, port):
vm_util.IssueCommand(
'(echo -e "flush_all\n" ; sleep 1)| netcat %s %s' % (ip, port))
def AptUninstall(vm):
"""Removes the memcache package on the VM."""
vm.RemoteCommand('sudo apt-get --purge autoremove -y memcached')
|
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.core import callback
# pylint: disable=unused-import
from .const import CALC_METHODS, CONF_CALC_METHOD, DEFAULT_CALC_METHOD, DOMAIN, NAME
class IslamicPrayerFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle the Islamic Prayer config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL
@staticmethod
@callback
def async_get_options_flow(config_entry):
"""Get the options flow for this handler."""
return IslamicPrayerOptionsFlowHandler(config_entry)
async def async_step_user(self, user_input=None):
"""Handle a flow initialized by the user."""
if self._async_current_entries():
return self.async_abort(reason="single_instance_allowed")
if user_input is None:
return self.async_show_form(step_id="user")
return self.async_create_entry(title=NAME, data=user_input)
async def async_step_import(self, import_config):
"""Import from config."""
return await self.async_step_user(user_input=import_config)
class IslamicPrayerOptionsFlowHandler(config_entries.OptionsFlow):
"""Handle Islamic Prayer client options."""
def __init__(self, config_entry):
"""Initialize options flow."""
self.config_entry = config_entry
async def async_step_init(self, user_input=None):
"""Manage options."""
if user_input is not None:
return self.async_create_entry(title="", data=user_input)
options = {
vol.Optional(
CONF_CALC_METHOD,
default=self.config_entry.options.get(
CONF_CALC_METHOD, DEFAULT_CALC_METHOD
),
): vol.In(CALC_METHODS)
}
return self.async_show_form(step_id="init", data_schema=vol.Schema(options))
|
from .spark_model import SparkModel
try:
from elephas.java import java_classes, adapter
except:
raise Exception("Warning: java classes couldn't be loaded.")
class ParameterAveragingModel(SparkModel):
def __init__(self, java_spark_context, model, num_workers, batch_size, averaging_frequency=5,
num_batches_prefetch=0, collect_stats=False, save_file='temp.h5', *args, **kwargs):
"""ParameterAveragingModel
:param java_spark_context JavaSparkContext, initialized through pyjnius
:param model: compiled Keras model
:param num_workers: number of Spark workers/executors.
:param batch_size: batch size used for model training
:param averaging_frequency: int, after how many batches of training averaging takes place
:param num_batches_prefetch: int, how many batches to pre-fetch, deactivated if 0.
:param collect_stats: boolean, if statistics get collected during training
:param save_file: where to store elephas model temporarily.
"""
SparkModel.__init__(self, model=model, batch_size=batch_size, mode='synchronous',
averaging_frequency=averaging_frequency, num_batches_prefetch=num_batches_prefetch,
num_workers=num_workers, collect_stats=collect_stats, *args, **kwargs)
self.save(save_file)
model_file = java_classes.File(save_file)
keras_model_type = model.__class__.__name__
self.java_spark_model = dl4j_import(
java_spark_context, model_file, keras_model_type)
def fit_rdd(self, data_set_rdd, epochs):
for _ in range(epochs):
self.java_spark_model.fit(data_set_rdd)
def get_keras_model(self):
model = self.master_network
java_model = self.java_spark_model.getNetwork()
weights = adapter.retrieve_keras_weights(java_model)
model.set_weights(weights)
return model
class ParameterSharingModel(SparkModel):
def __init__(self, java_spark_context, model, num_workers, batch_size,
shake_frequency=0, min_threshold=1e-5, update_threshold=1e-3, workers_per_node=-1,
num_batches_prefetch=0, step_delay=50, step_trigger=0.05, threshold_step=1e-5,
collect_stats=False, save_file='temp.h5', *args, **kwargs):
"""ParameterSharingModel
:param java_spark_context JavaSparkContext, initialized through pyjnius
:param model: compiled Keras model
:param num_workers: number of Spark workers/executors.
:param batch_size: batch size used for model training
:param shake_frequency:
:param min_threshold:
:param update_threshold:
:param workers_per_node:
:param num_batches_prefetch:
:param step_delay:
:param step_trigger:
:param threshold_step:
:param collect_stats:
:param save_file:
:param args:
:param kwargs:
"""
SparkModel.__init__(self, model=model, num_workers=num_workers, batch_size=batch_size, mode='asynchronous',
shake_frequency=shake_frequency, min_threshold=min_threshold,
update_threshold=update_threshold, workers_per_node=workers_per_node,
num_batches_prefetch=num_batches_prefetch, step_delay=step_delay, step_trigger=step_trigger,
threshold_step=threshold_step, collect_stats=collect_stats, *args, **kwargs)
self.save(save_file)
model_file = java_classes.File(save_file)
keras_model_type = model.__class__.__name__
self.java_spark_model = dl4j_import(
java_spark_context, model_file, keras_model_type)
def fit_rdd(self, data_set_rdd, epochs):
for _ in range(epochs):
self.java_spark_model.fit(data_set_rdd)
def get_keras_model(self):
model = self.master_network
java_model = self.java_spark_model.getNetwork()
weights = adapter.retrieve_keras_weights(java_model)
model.set_weights(weights)
return model
def dl4j_import(jsc, model_file, keras_model_type):
emi = java_classes.ElephasModelImport
if keras_model_type == "Sequential":
try:
return emi.importElephasSequentialModelAndWeights(
jsc, model_file.absolutePath)
except:
print("Couldn't load Keras model into DL4J")
elif keras_model_type == "Model":
try:
return emi.importElephasModelAndWeights(jsc, model_file.absolutePath)
except:
print("Couldn't load Keras model into DL4J")
else:
raise Exception(
"Keras model not understood, got: {}".format(keras_model_type))
|
Subsets and Splits