text
stringlengths 213
32.3k
|
---|
from datetime import timedelta
import logging
from typing import Callable, NamedTuple
from pyHS100 import SmartDeviceException
import pytest
from homeassistant.components import tplink
from homeassistant.components.homeassistant import (
DOMAIN as HA_DOMAIN,
SERVICE_UPDATE_ENTITY,
)
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_COLOR_TEMP,
ATTR_HS_COLOR,
DOMAIN as LIGHT_DOMAIN,
)
from homeassistant.components.tplink.common import (
CONF_DIMMER,
CONF_DISCOVERY,
CONF_LIGHT,
)
from homeassistant.components.tplink.light import SLEEP_TIME
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_HOST,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
)
from homeassistant.core import HomeAssistant
from homeassistant.setup import async_setup_component
from homeassistant.util.dt import utcnow
from tests.async_mock import Mock, PropertyMock, patch
from tests.common import async_fire_time_changed
class LightMockData(NamedTuple):
"""Mock light data."""
sys_info: dict
light_state: dict
set_light_state: Callable[[dict], None]
set_light_state_mock: Mock
get_light_state_mock: Mock
current_consumption_mock: Mock
get_sysinfo_mock: Mock
get_emeter_daily_mock: Mock
get_emeter_monthly_mock: Mock
class SmartSwitchMockData(NamedTuple):
"""Mock smart switch data."""
sys_info: dict
state_mock: Mock
brightness_mock: Mock
get_sysinfo_mock: Mock
@pytest.fixture(name="light_mock_data")
def light_mock_data_fixture() -> None:
"""Create light mock data."""
sys_info = {
"sw_ver": "1.2.3",
"hw_ver": "2.3.4",
"mac": "aa:bb:cc:dd:ee:ff",
"mic_mac": "00:11:22:33:44",
"type": "light",
"hwId": "1234",
"fwId": "4567",
"oemId": "891011",
"dev_name": "light1",
"rssi": 11,
"latitude": "0",
"longitude": "0",
"is_color": True,
"is_dimmable": True,
"is_variable_color_temp": True,
"model": "LB120",
"alias": "light1",
}
light_state = {
"on_off": True,
"dft_on_state": {
"brightness": 12,
"color_temp": 3200,
"hue": 110,
"saturation": 90,
},
"brightness": 13,
"color_temp": 3300,
"hue": 110,
"saturation": 90,
}
def set_light_state(state) -> None:
nonlocal light_state
drt_on_state = light_state["dft_on_state"]
drt_on_state.update(state.get("dft_on_state", {}))
light_state.update(state)
light_state["dft_on_state"] = drt_on_state
return light_state
set_light_state_patch = patch(
"homeassistant.components.tplink.common.SmartBulb.set_light_state",
side_effect=set_light_state,
)
get_light_state_patch = patch(
"homeassistant.components.tplink.common.SmartBulb.get_light_state",
return_value=light_state,
)
current_consumption_patch = patch(
"homeassistant.components.tplink.common.SmartDevice.current_consumption",
return_value=3.23,
)
get_sysinfo_patch = patch(
"homeassistant.components.tplink.common.SmartDevice.get_sysinfo",
return_value=sys_info,
)
get_emeter_daily_patch = patch(
"homeassistant.components.tplink.common.SmartDevice.get_emeter_daily",
return_value={
1: 1.01,
2: 1.02,
3: 1.03,
4: 1.04,
5: 1.05,
6: 1.06,
7: 1.07,
8: 1.08,
9: 1.09,
10: 1.10,
11: 1.11,
12: 1.12,
},
)
get_emeter_monthly_patch = patch(
"homeassistant.components.tplink.common.SmartDevice.get_emeter_monthly",
return_value={
1: 2.01,
2: 2.02,
3: 2.03,
4: 2.04,
5: 2.05,
6: 2.06,
7: 2.07,
8: 2.08,
9: 2.09,
10: 2.10,
11: 2.11,
12: 2.12,
},
)
with set_light_state_patch as set_light_state_mock, get_light_state_patch as get_light_state_mock, current_consumption_patch as current_consumption_mock, get_sysinfo_patch as get_sysinfo_mock, get_emeter_daily_patch as get_emeter_daily_mock, get_emeter_monthly_patch as get_emeter_monthly_mock:
yield LightMockData(
sys_info=sys_info,
light_state=light_state,
set_light_state=set_light_state,
set_light_state_mock=set_light_state_mock,
get_light_state_mock=get_light_state_mock,
current_consumption_mock=current_consumption_mock,
get_sysinfo_mock=get_sysinfo_mock,
get_emeter_daily_mock=get_emeter_daily_mock,
get_emeter_monthly_mock=get_emeter_monthly_mock,
)
@pytest.fixture(name="dimmer_switch_mock_data")
def dimmer_switch_mock_data_fixture() -> None:
"""Create dimmer switch mock data."""
sys_info = {
"sw_ver": "1.2.3",
"hw_ver": "2.3.4",
"mac": "aa:bb:cc:dd:ee:ff",
"mic_mac": "00:11:22:33:44",
"type": "switch",
"hwId": "1234",
"fwId": "4567",
"oemId": "891011",
"dev_name": "dimmer1",
"rssi": 11,
"latitude": "0",
"longitude": "0",
"is_color": False,
"is_dimmable": True,
"is_variable_color_temp": False,
"model": "HS220",
"alias": "dimmer1",
"feature": ":",
"relay_state": 1,
"brightness": 13,
}
def state(*args, **kwargs):
nonlocal sys_info
if len(args) == 0:
return sys_info["relay_state"]
if args[0] == "ON":
sys_info["relay_state"] = 1
else:
sys_info["relay_state"] = 0
def brightness(*args, **kwargs):
nonlocal sys_info
if len(args) == 0:
return sys_info["brightness"]
if sys_info["brightness"] == 0:
sys_info["relay_state"] = 0
else:
sys_info["relay_state"] = 1
sys_info["brightness"] = args[0]
get_sysinfo_patch = patch(
"homeassistant.components.tplink.common.SmartDevice.get_sysinfo",
return_value=sys_info,
)
state_patch = patch(
"homeassistant.components.tplink.common.SmartPlug.state",
new_callable=PropertyMock,
side_effect=state,
)
brightness_patch = patch(
"homeassistant.components.tplink.common.SmartPlug.brightness",
new_callable=PropertyMock,
side_effect=brightness,
)
with brightness_patch as brightness_mock, state_patch as state_mock, get_sysinfo_patch as get_sysinfo_mock:
yield SmartSwitchMockData(
sys_info=sys_info,
brightness_mock=brightness_mock,
state_mock=state_mock,
get_sysinfo_mock=get_sysinfo_mock,
)
async def update_entity(hass: HomeAssistant, entity_id: str) -> None:
"""Run an update action for an entity."""
await hass.services.async_call(
HA_DOMAIN,
SERVICE_UPDATE_ENTITY,
{ATTR_ENTITY_ID: entity_id},
blocking=True,
)
await hass.async_block_till_done()
async def test_smartswitch(
hass: HomeAssistant, dimmer_switch_mock_data: SmartSwitchMockData
) -> None:
"""Test function."""
sys_info = dimmer_switch_mock_data.sys_info
await async_setup_component(hass, HA_DOMAIN, {})
await hass.async_block_till_done()
await async_setup_component(
hass,
tplink.DOMAIN,
{
tplink.DOMAIN: {
CONF_DISCOVERY: False,
CONF_DIMMER: [{CONF_HOST: "123.123.123.123"}],
}
},
)
await hass.async_block_till_done()
assert hass.states.get("light.dimmer1")
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: "light.dimmer1"},
blocking=True,
)
await hass.async_block_till_done()
await update_entity(hass, "light.dimmer1")
assert hass.states.get("light.dimmer1").state == "off"
assert sys_info["relay_state"] == 0
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "light.dimmer1", ATTR_BRIGHTNESS: 50},
blocking=True,
)
await hass.async_block_till_done()
await update_entity(hass, "light.dimmer1")
state = hass.states.get("light.dimmer1")
assert state.state == "on"
assert state.attributes["brightness"] == 51
assert sys_info["relay_state"] == 1
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "light.dimmer1", ATTR_BRIGHTNESS: 55},
blocking=True,
)
await hass.async_block_till_done()
await update_entity(hass, "light.dimmer1")
state = hass.states.get("light.dimmer1")
assert state.state == "on"
assert state.attributes["brightness"] == 56
assert sys_info["brightness"] == 22
sys_info["relay_state"] = 0
sys_info["brightness"] = 66
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: "light.dimmer1"},
blocking=True,
)
await hass.async_block_till_done()
await update_entity(hass, "light.dimmer1")
state = hass.states.get("light.dimmer1")
assert state.state == "off"
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "light.dimmer1"},
blocking=True,
)
await hass.async_block_till_done()
await update_entity(hass, "light.dimmer1")
state = hass.states.get("light.dimmer1")
assert state.state == "on"
assert state.attributes["brightness"] == 168
assert sys_info["brightness"] == 66
async def test_light(hass: HomeAssistant, light_mock_data: LightMockData) -> None:
"""Test function."""
light_state = light_mock_data.light_state
set_light_state = light_mock_data.set_light_state
await async_setup_component(hass, HA_DOMAIN, {})
await hass.async_block_till_done()
await async_setup_component(
hass,
tplink.DOMAIN,
{
tplink.DOMAIN: {
CONF_DISCOVERY: False,
CONF_LIGHT: [{CONF_HOST: "123.123.123.123"}],
}
},
)
await hass.async_block_till_done()
assert hass.states.get("light.light1")
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: "light.light1"},
blocking=True,
)
await hass.async_block_till_done()
await update_entity(hass, "light.light1")
assert hass.states.get("light.light1").state == "off"
assert light_state["on_off"] == 0
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "light.light1", ATTR_COLOR_TEMP: 222, ATTR_BRIGHTNESS: 50},
blocking=True,
)
await hass.async_block_till_done()
await update_entity(hass, "light.light1")
state = hass.states.get("light.light1")
assert state.state == "on"
assert state.attributes["brightness"] == 51
assert state.attributes["hs_color"] == (110, 90)
assert state.attributes["color_temp"] == 222
assert light_state["on_off"] == 1
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "light.light1", ATTR_BRIGHTNESS: 55, ATTR_HS_COLOR: (23, 27)},
blocking=True,
)
await hass.async_block_till_done()
await update_entity(hass, "light.light1")
state = hass.states.get("light.light1")
assert state.state == "on"
assert state.attributes["brightness"] == 56
assert state.attributes["hs_color"] == (23, 27)
assert light_state["brightness"] == 22
assert light_state["hue"] == 23
assert light_state["saturation"] == 27
light_state["on_off"] = 0
light_state["dft_on_state"]["on_off"] = 0
light_state["brightness"] = 66
light_state["dft_on_state"]["brightness"] = 66
light_state["color_temp"] = 6400
light_state["dft_on_state"]["color_temp"] = 123
light_state["hue"] = 77
light_state["dft_on_state"]["hue"] = 77
light_state["saturation"] = 78
light_state["dft_on_state"]["saturation"] = 78
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: "light.light1"},
blocking=True,
)
await hass.async_block_till_done()
await update_entity(hass, "light.light1")
state = hass.states.get("light.light1")
assert state.state == "off"
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "light.light1"},
blocking=True,
)
await hass.async_block_till_done()
await update_entity(hass, "light.light1")
state = hass.states.get("light.light1")
assert state.state == "on"
assert state.attributes["brightness"] == 168
assert state.attributes["hs_color"] == (77, 78)
assert state.attributes["color_temp"] == 156
assert light_state["brightness"] == 66
assert light_state["hue"] == 77
assert light_state["saturation"] == 78
set_light_state({"brightness": 91, "dft_on_state": {"brightness": 91}})
await update_entity(hass, "light.light1")
state = hass.states.get("light.light1")
assert state.attributes["brightness"] == 232
async def test_get_light_state_retry(
hass: HomeAssistant, light_mock_data: LightMockData
) -> None:
"""Test function."""
# Setup test for retries for sysinfo.
get_sysinfo_call_count = 0
def get_sysinfo_side_effect():
nonlocal get_sysinfo_call_count
get_sysinfo_call_count += 1
# Need to fail on the 2nd call because the first call is used to
# determine if the device is online during the light platform's
# setup hook.
if get_sysinfo_call_count == 2:
raise SmartDeviceException()
return light_mock_data.sys_info
light_mock_data.get_sysinfo_mock.side_effect = get_sysinfo_side_effect
# Setup test for retries of setting state information.
set_state_call_count = 0
def set_light_state_side_effect(state_data: dict):
nonlocal set_state_call_count, light_mock_data
set_state_call_count += 1
if set_state_call_count == 1:
raise SmartDeviceException()
return light_mock_data.set_light_state(state_data)
light_mock_data.set_light_state_mock.side_effect = set_light_state_side_effect
# Setup component.
await async_setup_component(hass, HA_DOMAIN, {})
await hass.async_block_till_done()
await async_setup_component(
hass,
tplink.DOMAIN,
{
tplink.DOMAIN: {
CONF_DISCOVERY: False,
CONF_LIGHT: [{CONF_HOST: "123.123.123.123"}],
}
},
)
await hass.async_block_till_done()
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: "light.light1"},
blocking=True,
)
await hass.async_block_till_done()
await update_entity(hass, "light.light1")
assert light_mock_data.get_sysinfo_mock.call_count > 1
assert light_mock_data.get_light_state_mock.call_count > 1
assert light_mock_data.set_light_state_mock.call_count > 1
assert light_mock_data.get_sysinfo_mock.call_count < 40
assert light_mock_data.get_light_state_mock.call_count < 40
assert light_mock_data.set_light_state_mock.call_count < 10
async def test_update_failure(
hass: HomeAssistant, light_mock_data: LightMockData, caplog
):
"""Test that update failures are logged."""
await async_setup_component(hass, HA_DOMAIN, {})
await hass.async_block_till_done()
await async_setup_component(
hass,
tplink.DOMAIN,
{
tplink.DOMAIN: {
CONF_DISCOVERY: False,
CONF_LIGHT: [{CONF_HOST: "123.123.123.123"}],
}
},
)
await hass.async_block_till_done()
caplog.clear()
caplog.set_level(logging.WARNING)
await hass.helpers.entity_component.async_update_entity("light.light1")
assert caplog.text == ""
with patch("homeassistant.components.tplink.light.MAX_ATTEMPTS", 0):
caplog.clear()
caplog.set_level(logging.WARNING)
await hass.helpers.entity_component.async_update_entity("light.light1")
assert "Could not read state for 123.123.123.123|light1" in caplog.text
get_state_call_count = 0
def get_light_state_side_effect():
nonlocal get_state_call_count
get_state_call_count += 1
if get_state_call_count == 1:
raise SmartDeviceException()
return light_mock_data.light_state
light_mock_data.get_light_state_mock.side_effect = get_light_state_side_effect
with patch("homeassistant.components.tplink.light", MAX_ATTEMPTS=2, SLEEP_TIME=0):
caplog.clear()
caplog.set_level(logging.DEBUG)
await update_entity(hass, "light.light1")
assert (
f"Retrying in {SLEEP_TIME} seconds for 123.123.123.123|light1"
in caplog.text
)
assert "Device 123.123.123.123|light1 responded after " in caplog.text
async def test_async_setup_entry_unavailable(
hass: HomeAssistant, light_mock_data: LightMockData, caplog
):
"""Test unavailable devices trigger a later retry."""
caplog.clear()
caplog.set_level(logging.WARNING)
with patch(
"homeassistant.components.tplink.common.SmartDevice.get_sysinfo",
side_effect=SmartDeviceException,
):
await async_setup_component(hass, HA_DOMAIN, {})
await hass.async_block_till_done()
await async_setup_component(
hass,
tplink.DOMAIN,
{
tplink.DOMAIN: {
CONF_DISCOVERY: False,
CONF_LIGHT: [{CONF_HOST: "123.123.123.123"}],
}
},
)
await hass.async_block_till_done()
assert not hass.states.get("light.light1")
future = utcnow() + timedelta(seconds=30)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
assert hass.states.get("light.light1")
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from absl import flags
try:
from absl.testing import _bazel_selected_py3
except ImportError:
_bazel_selected_py3 = None
FLAGS = flags.FLAGS
def get_executable_path(py_binary_name):
"""Returns the executable path of a py_binary.
This returns the executable path of a py_binary that is in another Bazel
target's data dependencies.
On Linux/macOS, the path and __file__ has the same root directory.
On Windows, bazel builds an .exe file and we need to use the MANIFEST file
the location the actual binary.
Args:
py_binary_name: string, the name of a py_binary that is in another Bazel
target's data dependencies.
Raises:
RuntimeError: Raised when it cannot locate the executable path.
"""
root, ext = os.path.splitext(py_binary_name)
suffix = 'py3' if _bazel_selected_py3 else 'py2'
py_binary_name = '{}_{}{}'.format(root, suffix, ext)
if os.name == 'nt':
py_binary_name += '.exe'
manifest_file = os.path.join(FLAGS.test_srcdir, 'MANIFEST')
workspace_name = os.environ['TEST_WORKSPACE']
manifest_entry = '{}/{}'.format(workspace_name, py_binary_name)
with open(manifest_file, 'r') as manifest_fd:
for line in manifest_fd:
tokens = line.strip().split(' ')
if len(tokens) != 2:
continue
if manifest_entry == tokens[0]:
return tokens[1]
raise RuntimeError(
'Cannot locate executable path for {}, MANIFEST file: {}.'.format(
py_binary_name, manifest_file))
else:
# NOTE: __file__ may be .py or .pyc, depending on how the module was
# loaded and executed.
path = __file__
# Use the package name to find the root directory: every dot is
# a directory, plus one for ourselves.
for _ in range(__name__.count('.') + 1):
path = os.path.dirname(path)
root_directory = path
return os.path.join(root_directory, py_binary_name)
|
from lemur.plugins.base import Plugin
class MetricPlugin(Plugin):
type = "metric"
def submit(
self, metric_name, metric_type, metric_value, metric_tags=None, options=None
):
raise NotImplementedError
|
import asyncio
from pyruckus import Ruckus
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import device_registry
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC
from .const import (
API_AP,
API_DEVICE_NAME,
API_ID,
API_MAC,
API_MODEL,
API_SYSTEM_OVERVIEW,
API_VERSION,
COORDINATOR,
DOMAIN,
MANUFACTURER,
PLATFORMS,
UNDO_UPDATE_LISTENERS,
)
from .coordinator import RuckusUnleashedDataUpdateCoordinator
async def async_setup(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up the Ruckus Unleashed component."""
hass.data[DOMAIN] = {}
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up Ruckus Unleashed from a config entry."""
try:
ruckus = await hass.async_add_executor_job(
Ruckus,
entry.data[CONF_HOST],
entry.data[CONF_USERNAME],
entry.data[CONF_PASSWORD],
)
except ConnectionError as error:
raise ConfigEntryNotReady from error
coordinator = RuckusUnleashedDataUpdateCoordinator(hass, ruckus=ruckus)
await coordinator.async_refresh()
if not coordinator.last_update_success:
raise ConfigEntryNotReady
system_info = await hass.async_add_executor_job(ruckus.system_info)
registry = await device_registry.async_get_registry(hass)
ap_info = await hass.async_add_executor_job(ruckus.ap_info)
for device in ap_info[API_AP][API_ID].values():
registry.async_get_or_create(
config_entry_id=entry.entry_id,
connections={(CONNECTION_NETWORK_MAC, device[API_MAC])},
identifiers={(CONNECTION_NETWORK_MAC, device[API_MAC])},
manufacturer=MANUFACTURER,
name=device[API_DEVICE_NAME],
model=device[API_MODEL],
sw_version=system_info[API_SYSTEM_OVERVIEW][API_VERSION],
)
hass.data[DOMAIN][entry.entry_id] = {
COORDINATOR: coordinator,
UNDO_UPDATE_LISTENERS: [],
}
for platform in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, platform)
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
if unload_ok:
for listener in hass.data[DOMAIN][entry.entry_id][UNDO_UPDATE_LISTENERS]:
listener()
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
|
import sys
import os
import mne
def run():
"""Run command."""
from mne.commands.utils import get_optparser, _add_verbose_flag
parser = get_optparser(__file__)
parser.add_option('--bem', dest='bem_fname',
help='The name of the file containing the '
'triangulations of the BEM surfaces and the '
'conductivities of the compartments. The standard '
'ending for this file is -bem.fif.',
metavar="FILE")
parser.add_option('--sol', dest='bem_sol_fname',
help='The name of the resulting file containing BEM '
'solution (geometry matrix). It uses the linear '
'collocation approach. The file should end with '
'-bem-sof.fif.',
metavar='FILE', default=None)
_add_verbose_flag(parser)
options, args = parser.parse_args()
bem_fname = options.bem_fname
bem_sol_fname = options.bem_sol_fname
verbose = True if options.verbose is not None else False
if bem_fname is None:
parser.print_help()
sys.exit(1)
if bem_sol_fname is None:
base, _ = os.path.splitext(bem_fname)
bem_sol_fname = base + '-sol.fif'
bem_model = mne.read_bem_surfaces(bem_fname, patch_stats=False,
verbose=verbose)
bem_solution = mne.make_bem_solution(bem_model, verbose=verbose)
mne.write_bem_solution(bem_sol_fname, bem_solution)
mne.utils.run_command_if_main()
|
from homeassistant import data_entry_flow
from homeassistant.components.shopping_list.const import DOMAIN
from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER
async def test_import(hass):
"""Test entry will be imported."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data={}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
async def test_user(hass):
"""Test we can start a config flow."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
async def test_user_confirm(hass):
"""Test we can finish a config flow."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data={}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["result"].data == {}
|
import docker_registry.testing as testing
class TestQueryDumb(testing.Query):
def __init__(self):
self.scheme = 'dumb'
class TestDriverDumb(testing.Driver):
def __init__(self):
self.scheme = 'dumb'
self.path = ''
self.config = testing.Config({})
class TestQueryFile(testing.Query):
def __init__(self):
self.scheme = 'file'
class TestDriverFile(testing.Driver):
def __init__(self):
self.scheme = 'file'
self.path = ''
self.config = testing.Config({})
|
from datetime import timedelta
import logging
from dwdwfsapi import DwdWeatherWarningsAPI
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import ATTR_ATTRIBUTION, CONF_MONITORED_CONDITIONS, CONF_NAME
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
ATTRIBUTION = "Data provided by DWD"
ATTR_REGION_NAME = "region_name"
ATTR_REGION_ID = "region_id"
ATTR_LAST_UPDATE = "last_update"
ATTR_WARNING_COUNT = "warning_count"
API_ATTR_WARNING_NAME = "event"
API_ATTR_WARNING_TYPE = "event_code"
API_ATTR_WARNING_LEVEL = "level"
API_ATTR_WARNING_HEADLINE = "headline"
API_ATTR_WARNING_DESCRIPTION = "description"
API_ATTR_WARNING_INSTRUCTION = "instruction"
API_ATTR_WARNING_START = "start_time"
API_ATTR_WARNING_END = "end_time"
API_ATTR_WARNING_PARAMETERS = "parameters"
API_ATTR_WARNING_COLOR = "color"
DEFAULT_NAME = "DWD-Weather-Warnings"
CONF_REGION_NAME = "region_name"
CURRENT_WARNING_SENSOR = "current_warning_level"
ADVANCE_WARNING_SENSOR = "advance_warning_level"
SCAN_INTERVAL = timedelta(minutes=15)
MONITORED_CONDITIONS = {
CURRENT_WARNING_SENSOR: [
"Current Warning Level",
None,
"mdi:close-octagon-outline",
],
ADVANCE_WARNING_SENSOR: [
"Advance Warning Level",
None,
"mdi:close-octagon-outline",
],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_REGION_NAME): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(
CONF_MONITORED_CONDITIONS, default=list(MONITORED_CONDITIONS)
): vol.All(cv.ensure_list, [vol.In(MONITORED_CONDITIONS)]),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the DWD-Weather-Warnings sensor."""
name = config.get(CONF_NAME)
region_name = config.get(CONF_REGION_NAME)
api = WrappedDwDWWAPI(DwdWeatherWarningsAPI(region_name))
sensors = []
for sensor_type in config[CONF_MONITORED_CONDITIONS]:
sensors.append(DwdWeatherWarningsSensor(api, name, sensor_type))
add_entities(sensors, True)
class DwdWeatherWarningsSensor(Entity):
"""Representation of a DWD-Weather-Warnings sensor."""
def __init__(self, api, name, sensor_type):
"""Initialize a DWD-Weather-Warnings sensor."""
self._api = api
self._name = name
self._sensor_type = sensor_type
@property
def name(self):
"""Return the name of the sensor."""
return f"{self._name} {MONITORED_CONDITIONS[self._sensor_type][0]}"
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return MONITORED_CONDITIONS[self._sensor_type][2]
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return MONITORED_CONDITIONS[self._sensor_type][1]
@property
def state(self):
"""Return the state of the device."""
if self._sensor_type == CURRENT_WARNING_SENSOR:
return self._api.api.current_warning_level
return self._api.api.expected_warning_level
@property
def device_state_attributes(self):
"""Return the state attributes of the DWD-Weather-Warnings."""
data = {
ATTR_ATTRIBUTION: ATTRIBUTION,
ATTR_REGION_NAME: self._api.api.warncell_name,
ATTR_REGION_ID: self._api.api.warncell_id,
ATTR_LAST_UPDATE: self._api.api.last_update,
}
if self._sensor_type == CURRENT_WARNING_SENSOR:
searched_warnings = self._api.api.current_warnings
else:
searched_warnings = self._api.api.expected_warnings
data[ATTR_WARNING_COUNT] = len(searched_warnings)
for i, warning in enumerate(searched_warnings, 1):
data[f"warning_{i}_name"] = warning[API_ATTR_WARNING_NAME]
data[f"warning_{i}_type"] = warning[API_ATTR_WARNING_TYPE]
data[f"warning_{i}_level"] = warning[API_ATTR_WARNING_LEVEL]
data[f"warning_{i}_headline"] = warning[API_ATTR_WARNING_HEADLINE]
data[f"warning_{i}_description"] = warning[API_ATTR_WARNING_DESCRIPTION]
data[f"warning_{i}_instruction"] = warning[API_ATTR_WARNING_INSTRUCTION]
data[f"warning_{i}_start"] = warning[API_ATTR_WARNING_START]
data[f"warning_{i}_end"] = warning[API_ATTR_WARNING_END]
data[f"warning_{i}_parameters"] = warning[API_ATTR_WARNING_PARAMETERS]
data[f"warning_{i}_color"] = warning[API_ATTR_WARNING_COLOR]
# Dictionary for the attribute containing the complete warning
warning_copy = warning.copy()
warning_copy[API_ATTR_WARNING_START] = data[f"warning_{i}_start"]
warning_copy[API_ATTR_WARNING_END] = data[f"warning_{i}_end"]
data[f"warning_{i}"] = warning_copy
return data
@property
def available(self):
"""Could the device be accessed during the last update call."""
return self._api.api.data_valid
def update(self):
"""Get the latest data from the DWD-Weather-Warnings API."""
_LOGGER.debug(
"Update requested for %s (%s) by %s",
self._api.api.warncell_name,
self._api.api.warncell_id,
self._sensor_type,
)
self._api.update()
class WrappedDwDWWAPI:
"""Wrapper for the DWD-Weather-Warnings api."""
def __init__(self, api):
"""Initialize a DWD-Weather-Warnings wrapper."""
self.api = api
@Throttle(SCAN_INTERVAL)
def update(self):
"""Get the latest data from the DWD-Weather-Warnings API."""
self.api.update()
_LOGGER.debug("Update performed")
|
from collections import OrderedDict
from typing import Callable, Optional
import voluptuous as vol
from .const import POLICY_CONTROL, POLICY_EDIT, POLICY_READ, SUBCAT_ALL
from .models import PermissionLookup
from .types import CategoryType, SubCategoryDict, ValueType
from .util import SubCatLookupType, compile_policy, lookup_all
SINGLE_ENTITY_SCHEMA = vol.Any(
True,
vol.Schema(
{
vol.Optional(POLICY_READ): True,
vol.Optional(POLICY_CONTROL): True,
vol.Optional(POLICY_EDIT): True,
}
),
)
ENTITY_DOMAINS = "domains"
ENTITY_AREAS = "area_ids"
ENTITY_DEVICE_IDS = "device_ids"
ENTITY_ENTITY_IDS = "entity_ids"
ENTITY_VALUES_SCHEMA = vol.Any(True, vol.Schema({str: SINGLE_ENTITY_SCHEMA}))
ENTITY_POLICY_SCHEMA = vol.Any(
True,
vol.Schema(
{
vol.Optional(SUBCAT_ALL): SINGLE_ENTITY_SCHEMA,
vol.Optional(ENTITY_AREAS): ENTITY_VALUES_SCHEMA,
vol.Optional(ENTITY_DEVICE_IDS): ENTITY_VALUES_SCHEMA,
vol.Optional(ENTITY_DOMAINS): ENTITY_VALUES_SCHEMA,
vol.Optional(ENTITY_ENTITY_IDS): ENTITY_VALUES_SCHEMA,
}
),
)
def _lookup_domain(
perm_lookup: PermissionLookup, domains_dict: SubCategoryDict, entity_id: str
) -> Optional[ValueType]:
"""Look up entity permissions by domain."""
return domains_dict.get(entity_id.split(".", 1)[0])
def _lookup_area(
perm_lookup: PermissionLookup, area_dict: SubCategoryDict, entity_id: str
) -> Optional[ValueType]:
"""Look up entity permissions by area."""
entity_entry = perm_lookup.entity_registry.async_get(entity_id)
if entity_entry is None or entity_entry.device_id is None:
return None
device_entry = perm_lookup.device_registry.async_get(entity_entry.device_id)
if device_entry is None or device_entry.area_id is None:
return None
return area_dict.get(device_entry.area_id)
def _lookup_device(
perm_lookup: PermissionLookup, devices_dict: SubCategoryDict, entity_id: str
) -> Optional[ValueType]:
"""Look up entity permissions by device."""
entity_entry = perm_lookup.entity_registry.async_get(entity_id)
if entity_entry is None or entity_entry.device_id is None:
return None
return devices_dict.get(entity_entry.device_id)
def _lookup_entity_id(
perm_lookup: PermissionLookup, entities_dict: SubCategoryDict, entity_id: str
) -> Optional[ValueType]:
"""Look up entity permission by entity id."""
return entities_dict.get(entity_id)
def compile_entities(
policy: CategoryType, perm_lookup: PermissionLookup
) -> Callable[[str, str], bool]:
"""Compile policy into a function that tests policy."""
subcategories: SubCatLookupType = OrderedDict()
subcategories[ENTITY_ENTITY_IDS] = _lookup_entity_id
subcategories[ENTITY_DEVICE_IDS] = _lookup_device
subcategories[ENTITY_AREAS] = _lookup_area
subcategories[ENTITY_DOMAINS] = _lookup_domain
subcategories[SUBCAT_ALL] = lookup_all
return compile_policy(policy, subcategories, perm_lookup)
|
from collections import namedtuple
from datetime import timedelta
import logging
from typing import List
import pybbox
import voluptuous as vol
from homeassistant.components.device_tracker import (
DOMAIN,
PLATFORM_SCHEMA,
DeviceScanner,
)
from homeassistant.const import CONF_HOST
import homeassistant.helpers.config_validation as cv
from homeassistant.util import Throttle
import homeassistant.util.dt as dt_util
_LOGGER = logging.getLogger(__name__)
DEFAULT_HOST = "192.168.1.254"
MIN_TIME_BETWEEN_SCANS = timedelta(seconds=60)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string}
)
def get_scanner(hass, config):
"""Validate the configuration and return a Bbox scanner."""
scanner = BboxDeviceScanner(config[DOMAIN])
return scanner if scanner.success_init else None
Device = namedtuple("Device", ["mac", "name", "ip", "last_update"])
class BboxDeviceScanner(DeviceScanner):
"""This class scans for devices connected to the bbox."""
def __init__(self, config):
"""Get host from config."""
self.host = config[CONF_HOST]
"""Initialize the scanner."""
self.last_results: List[Device] = []
self.success_init = self._update_info()
_LOGGER.info("Scanner initialized")
def scan_devices(self):
"""Scan for new devices and return a list with found device IDs."""
self._update_info()
return [device.mac for device in self.last_results]
def get_device_name(self, device):
"""Return the name of the given device or None if we don't know."""
filter_named = [
result.name for result in self.last_results if result.mac == device
]
if filter_named:
return filter_named[0]
return None
@Throttle(MIN_TIME_BETWEEN_SCANS)
def _update_info(self):
"""Check the Bbox for devices.
Returns boolean if scanning successful.
"""
_LOGGER.info("Scanning...")
box = pybbox.Bbox(ip=self.host)
result = box.get_all_connected_devices()
now = dt_util.now()
last_results = []
for device in result:
if device["active"] != 1:
continue
last_results.append(
Device(
device["macaddress"], device["hostname"], device["ipaddress"], now
)
)
self.last_results = last_results
_LOGGER.info("Scan successful")
return True
|
import os
import io
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
from diamond.collector import Collector
from mdstat import MdStatCollector
class TestMdStatCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('MdStatCollector', {
'interval': 10
})
self.collector = MdStatCollector(config, None)
def test_import(self):
self.assertTrue(MdStatCollector)
@patch('__builtin__.open')
@patch('os.access', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_should_open_proc_loadavg(self, publish_mock, open_mock):
MdStatCollector.MDSTAT_PATH = '/proc/mdstat'
if not os.path.exists('/proc/mdstat'):
# on platforms that don't provide /proc/mdstat: don't bother
# testing this.
return
open_mock.return_value = io.BytesIO('')
self.collector.collect()
open_mock.assert_called_once_with('/proc/mdstat', 'r')
@patch.object(Collector, 'publish')
def test_mdstat_empty(self, publish_mock):
MdStatCollector.MDSTAT_PATH = self.getFixturePath('mdstat_empty')
self.collector.collect()
metrics = {}
self.assertPublishedMany(publish_mock, metrics)
@patch.object(Collector, 'publish')
def test_mdstat_multiple(self, publish_mock):
MdStatCollector.MDSTAT_PATH = self.getFixturePath('mdstat_multiple')
self.collector.collect()
metrics = {
'md2.status.superblock_version': 1.2,
'md2.status.actual_members': 2,
'md2.status.total_members': 2,
'md2.status.blocks': 102320,
'md2.member_count.active': 2,
'md2.member_count.faulty': 0,
'md2.member_count.spare': 0,
'md0.status.total_members': 3,
'md0.status.blocks': 39058432,
'md0.status.algorithm': 2,
'md0.status.superblock_version': 1.2,
'md0.status.raid_level': 5,
'md0.status.chunk_size': 524288,
'md0.status.actual_members': 3,
'md0.member_count.active': 3,
'md0.member_count.faulty': 0,
'md0.member_count.spare': 0,
'md1.status.superblock_version': 1.2,
'md1.status.blocks': 199800,
'md1.status.rounding_factor': 1022976,
'md1.member_count.active': 2,
'md1.member_count.faulty': 0,
'md1.member_count.spare': 0
}
self.assertPublishedMany(publish_mock, metrics)
@patch.object(Collector, 'publish')
def test_mdstat_linear(self, publish_mock):
MdStatCollector.MDSTAT_PATH = self.getFixturePath('mdstat_linear')
self.collector.collect()
metrics = {
'md0.status.superblock_version': 1.2,
'md0.status.blocks': 199800,
'md0.status.rounding_factor': 1022976,
'md0.member_count.active': 2,
'md0.member_count.faulty': 0,
'md0.member_count.spare': 0
}
self.assertPublishedMany(publish_mock, metrics)
@patch.object(Collector, 'publish')
def test_mdstat_multipath(self, publish_mock):
MdStatCollector.MDSTAT_PATH = self.getFixturePath('mdstat_multipath')
self.collector.collect()
metrics = {
'md0.status.superblock_version': 1.2,
'md0.status.actual_members': 2,
'md0.status.total_members': 2,
'md0.status.blocks': 102320,
'md0.member_count.active': 2,
'md0.member_count.faulty': 0,
'md0.member_count.spare': 0
}
self.assertPublishedMany(publish_mock, metrics)
@patch.object(Collector, 'publish')
def test_mdstat_raid1(self, publish_mock):
MdStatCollector.MDSTAT_PATH = self.getFixturePath('mdstat_raid1')
self.collector.collect()
metrics = {
'md0.status.superblock_version': 1.2,
'md0.status.actual_members': 2,
'md0.status.total_members': 2,
'md0.status.blocks': 100171776,
'md0.member_count.active': 2,
'md0.member_count.faulty': 0,
'md0.member_count.spare': 0,
'md0.bitmap.total_pages': 1,
'md0.bitmap.allocated_pages': 1,
'md0.bitmap.page_size': 4,
'md0.bitmap.chunk_size': 65536
}
self.assertPublishedMany(publish_mock, metrics)
@patch.object(Collector, 'publish')
def test_mdstat_raid1_failed(self, publish_mock):
MdStatCollector.MDSTAT_PATH = \
self.getFixturePath('mdstat_raid1-failed')
self.collector.collect()
metrics = {
'md0.status.superblock_version': 1.2,
'md0.status.actual_members': 1,
'md0.status.total_members': 2,
'md0.status.blocks': 102272,
'md0.member_count.active': 1,
'md0.member_count.faulty': 1,
'md0.member_count.spare': 0
}
self.assertPublishedMany(publish_mock, metrics)
@patch.object(Collector, 'publish')
def test_mdstat_raid1_recover(self, publish_mock):
MdStatCollector.MDSTAT_PATH = \
self.getFixturePath('mdstat_raid1-recover')
self.collector.collect()
metrics = {
'md0.status.superblock_version': 1.2,
'md0.status.actual_members': 1,
'md0.status.total_members': 2,
'md0.status.blocks': 102272,
'md0.recovery.percent': 99.5,
'md0.recovery.speed': 104726528,
'md0.recovery.remaining_time': 802199,
'md0.member_count.active': 2,
'md0.member_count.faulty': 0,
'md0.member_count.spare': 0,
'md0.bitmap.total_pages': 1,
'md0.bitmap.allocated_pages': 1,
'md0.bitmap.page_size': 4,
'md0.bitmap.chunk_size': 65536
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
@patch.object(Collector, 'publish')
def test_mdstat_raid1_spare(self, publish_mock):
MdStatCollector.MDSTAT_PATH = \
self.getFixturePath('mdstat_raid1-spare')
self.collector.collect()
metrics = {
'md0.status.superblock_version': 1.2,
'md0.status.actual_members': 2,
'md0.status.total_members': 2,
'md0.status.blocks': 100171776,
'md0.member_count.active': 2,
'md0.member_count.faulty': 0,
'md0.member_count.spare': 1,
'md0.bitmap.total_pages': 1,
'md0.bitmap.allocated_pages': 1,
'md0.bitmap.page_size': 4,
'md0.bitmap.chunk_size': 65536
}
self.assertPublishedMany(publish_mock, metrics)
@patch.object(Collector, 'publish')
def test_mdstat_raid5(self, publish_mock):
MdStatCollector.MDSTAT_PATH = \
self.getFixturePath('mdstat_raid5')
self.collector.collect()
metrics = {
'md0.status.total_members': 3,
'md0.status.blocks': 39058432,
'md0.status.algorithm': 2,
'md0.status.superblock_version': 1.2,
'md0.status.raid_level': 5,
'md0.status.chunk_size': 524288,
'md0.status.actual_members': 3,
'md0.member_count.active': 3,
'md0.member_count.faulty': 0,
'md0.member_count.spare': 0
}
self.assertPublishedMany(publish_mock, metrics)
if __name__ == "__main__":
unittest.main()
|
import json
import logging
from absl import flags
from perfkitbenchmarker import errors
from perfkitbenchmarker import nfs_service
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.providers import aws
from perfkitbenchmarker.providers.aws import aws_network
from perfkitbenchmarker.providers.aws import util
FLAGS = flags.FLAGS
class AwsNfsService(nfs_service.BaseNfsService):
"""An AWS NFS resource.
Creates the AWS EFS file system and mount point for use with NFS clients.
See https://aws.amazon.com/efs/
"""
CLOUD = aws.CLOUD
NFS_TIERS = ('generalPurpose', 'maxIO')
DEFAULT_NFS_VERSION = '4.1'
DEFAULT_TIER = 'generalPurpose'
def __init__(self, disk_spec, zone):
super(AwsNfsService, self).__init__(disk_spec, zone)
self.region = util.GetRegionFromZone(self.zone)
self.aws_commands = AwsEfsCommands(self.region)
self.disk_spec.disk_size = 0
self.filer_id = None
self.mount_id = None
self.throughput_mode = FLAGS.efs_throughput_mode
self.provisioned_throughput = FLAGS.efs_provisioned_throughput
@property
def network(self):
network_spec = aws_network.AwsNetworkSpec(self.zone)
return aws_network.AwsNetwork.GetNetworkFromNetworkSpec(network_spec)
@property
def subnet_id(self):
if hasattr(self.network, 'subnet'):
return self.network.subnet.id
else:
raise errors.Config.InvalidValue('No subnet in network %s' % self.network)
@property
def security_group(self):
if hasattr(self.network, 'vpc'):
return self.network.vpc.default_security_group_id
# not required when making the mount target
return None
def _Create(self):
logging.info('Creating NFS resource, subnet: %s, security group: %s',
self.subnet_id, self.security_group)
self._CreateFiler()
logging.info('Waiting for filer to start up')
self.aws_commands.WaitUntilFilerAvailable(self.filer_id)
# create the mount point but do not wait for it, superclass will call the
# _IsReady() method.
self._CreateMount()
def _Delete(self):
# deletes on the file-system and mount-target are immediate
self._DeleteMount()
if not FLAGS.aws_delete_file_system:
return
self._DeleteFiler()
def GetRemoteAddress(self):
if self.filer_id is None:
raise errors.Resource.RetryableGetError('Filer not created')
return '{name}.efs.{region}.amazonaws.com'.format(
name=self.filer_id, region=self.region)
def _IsReady(self):
return self.aws_commands.IsMountAvailable(self.mount_id)
def _CreateFiler(self):
"""Creates the AWS EFS service."""
if self.filer_id:
logging.warn('_CreateFiler() already called for %s', self.filer_id)
return
if FLAGS.aws_efs_token:
filer = self.aws_commands.GetFiler(FLAGS.aws_efs_token)
if filer:
self.nfs_tier = filer['PerformanceMode']
self.filer_id = filer['FileSystemId']
self.disk_spec.disk_size = int(
round(filer['SizeInBytes']['Value'] / 10.0 ** 9))
return
token = FLAGS.aws_efs_token or 'nfs-token-%s' % FLAGS.run_uri
self.filer_id = self.aws_commands.CreateFiler(
token, self.nfs_tier, self.throughput_mode, self.provisioned_throughput)
self.aws_commands.AddTagsToFiler(self.filer_id)
logging.info('Created filer %s with address %s', self.filer_id,
self.GetRemoteAddress())
def _CreateMount(self):
"""Creates an NFS mount point on an EFS service."""
if self.mount_id:
logging.warn('_CreateMount() already called for %s', self.mount_id)
return
if not self.filer_id:
raise errors.Resource.CreationError('Did not create a filer first')
logging.info('Creating NFS mount point')
self.mount_id = self.aws_commands.CreateMount(
self.filer_id, self.subnet_id, self.security_group)
logging.info('Mount target %s starting up', self.mount_id)
def _DeleteMount(self):
"""Deletes the EFS mount point.
"""
if not self.mount_id:
return
logging.info('Deleting NFS mount mount %s', self.mount_id)
self.aws_commands.DeleteMount(self.mount_id)
self.mount_id = None
def _DeleteFiler(self):
"""Deletes the EFS service.
Raises:
RetryableDeletionError: If the mount point exists.
"""
if not self.filer_id:
return
if self.mount_id:
# this isn't retryable as the mount point wasn't deleted
raise errors.Resource.RetryableDeletionError(
'Did not delete mount point first')
logging.info('Deleting NFS filer %s', self.filer_id)
self.aws_commands.DeleteFiler(self.filer_id)
self.filer_id = None
class AwsEfsCommands(object):
"""Commands for interacting with AWS EFS.
Args:
region: AWS region for the NFS service.
"""
def __init__(self, region):
self.efs_prefix = util.AWS_PREFIX + ['--region', region, 'efs']
def GetFiler(self, token):
"""Returns the filer using the creation token or None."""
args = ['describe-file-systems', '--creation-token', token]
response = self._IssueAwsCommand(args)
file_systems = response['FileSystems']
if not file_systems:
return None
assert len(file_systems) < 2, 'Too many file systems.'
return file_systems[0]
def CreateFiler(self, token, nfs_tier, throughput_mode,
provisioned_throughput):
args = ['create-file-system', '--creation-token', token]
if nfs_tier is not None:
args += ['--performance-mode', nfs_tier]
args += ['--throughput-mode', throughput_mode]
if throughput_mode == 'provisioned':
args += ['--provisioned-throughput-in-mibps', provisioned_throughput]
return self._IssueAwsCommand(args)['FileSystemId']
def AddTagsToFiler(self, filer_id):
tags = util.MakeFormattedDefaultTags()
args = ['create-tags', '--file-system-id', filer_id, '--tags'] + tags
self._IssueAwsCommand(args, False)
@vm_util.Retry()
def WaitUntilFilerAvailable(self, filer_id):
if not self._IsAvailable('describe-file-systems', '--file-system-id',
'FileSystems', filer_id):
raise errors.Resource.RetryableCreationError(
'{} not ready'.format(filer_id))
@vm_util.Retry()
def DeleteFiler(self, file_system_id):
args = self.efs_prefix + [
'delete-file-system', '--file-system-id', file_system_id]
_, stderr, retcode = vm_util.IssueCommand(args, raise_on_failure=False)
if retcode and 'FileSystemInUse' in stderr:
raise Exception('Mount Point hasn\'t finished deleting.')
def CreateMount(self, file_system_id, subnet_id, security_group=None):
args = [
'create-mount-target', '--file-system-id', file_system_id,
'--subnet-id', subnet_id
]
if security_group:
args += ['--security-groups', security_group]
return self._IssueAwsCommand(args)['MountTargetId']
def IsMountAvailable(self, mount_target_id):
if mount_target_id is None:
# caller called _IsReady() before the mount point was created
return False
return self._IsAvailable('describe-mount-targets', '--mount-target-id',
'MountTargets', mount_target_id)
def DeleteMount(self, mount_target_id):
self._IssueAwsCommand(
['delete-mount-target', '--mount-target-id', mount_target_id], False)
def _IsAvailable(self, describe_cmd, id_attr, response_attribute, id_value):
describe = self._IssueAwsCommand([describe_cmd, id_attr, id_value])
status = describe[response_attribute][0].get('LifeCycleState')
return status == 'available'
def _IssueAwsCommand(self, args, return_json=True):
args = self.efs_prefix + [str(arg) for arg in args]
stdout, _, retcode = vm_util.IssueCommand(args, raise_on_failure=False)
if retcode:
return None
return json.loads(stdout) if return_json else stdout
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from nets import resnet_utils
slim = tf.contrib.slim
resnet_arg_scope = resnet_utils.resnet_arg_scope
@slim.add_arg_scope
def bottleneck(inputs, depth, depth_bottleneck, stride, rate=1,
outputs_collections=None, scope=None):
"""Bottleneck residual unit variant with BN before convolutions.
This is the full preactivation residual unit variant proposed in [2]. See
Fig. 1(b) of [2] for its definition. Note that we use here the bottleneck
variant which has an extra bottleneck layer.
When putting together two consecutive ResNet blocks that use this unit, one
should use stride = 2 in the last unit of the first block.
Args:
inputs: A tensor of size [batch, height, width, channels].
depth: The depth of the ResNet unit output.
depth_bottleneck: The depth of the bottleneck layers.
stride: The ResNet unit's stride. Determines the amount of downsampling of
the units output compared to its input.
rate: An integer, rate for atrous convolution.
outputs_collections: Collection to add the ResNet unit output.
scope: Optional variable_scope.
Returns:
The ResNet unit's output.
"""
with tf.variable_scope(scope, 'bottleneck_v2', [inputs]) as sc:
depth_in = slim.utils.last_dimension(inputs.get_shape(), min_rank=4)
preact = slim.batch_norm(inputs, activation_fn=tf.nn.relu, scope='preact')
if depth == depth_in:
shortcut = resnet_utils.subsample(inputs, stride, 'shortcut')
else:
shortcut = slim.conv2d(preact, depth, [1, 1], stride=stride,
normalizer_fn=None, activation_fn=None,
scope='shortcut')
residual = slim.conv2d(preact, depth_bottleneck, [1, 1], stride=1,
scope='conv1')
residual = resnet_utils.conv2d_same(residual, depth_bottleneck, 3, stride,
rate=rate, scope='conv2')
residual = slim.conv2d(residual, depth, [1, 1], stride=1,
normalizer_fn=None, activation_fn=None,
scope='conv3')
output = shortcut + residual
return slim.utils.collect_named_outputs(outputs_collections,
sc.original_name_scope,
output)
def resnet_v2(inputs,
blocks,
num_classes=None,
is_training=True,
global_pool=True,
output_stride=None,
include_root_block=True,
spatial_squeeze=True,
reuse=None,
scope=None):
"""Generator for v2 (preactivation) ResNet models.
This function generates a family of ResNet v2 models. See the resnet_v2_*()
methods for specific model instantiations, obtained by selecting different
block instantiations that produce ResNets of various depths.
Training for image classification on Imagenet is usually done with [224, 224]
inputs, resulting in [7, 7] feature maps at the output of the last ResNet
block for the ResNets defined in [1] that have nominal stride equal to 32.
However, for dense prediction tasks we advise that one uses inputs with
spatial dimensions that are multiples of 32 plus 1, e.g., [321, 321]. In
this case the feature maps at the ResNet output will have spatial shape
[(height - 1) / output_stride + 1, (width - 1) / output_stride + 1]
and corners exactly aligned with the input image corners, which greatly
facilitates alignment of the features to the image. Using as input [225, 225]
images results in [8, 8] feature maps at the output of the last ResNet block.
For dense prediction tasks, the ResNet needs to run in fully-convolutional
(FCN) mode and global_pool needs to be set to False. The ResNets in [1, 2] all
have nominal stride equal to 32 and a good choice in FCN mode is to use
output_stride=16 in order to increase the density of the computed features at
small computational and memory overhead, cf. http://arxiv.org/abs/1606.00915.
Args:
inputs: A tensor of size [batch, height_in, width_in, channels].
blocks: A list of length equal to the number of ResNet blocks. Each element
is a resnet_utils.Block object describing the units in the block.
num_classes: Number of predicted classes for classification tasks. If None
we return the features before the logit layer.
is_training: whether is training or not.
global_pool: If True, we perform global average pooling before computing the
logits. Set to True for image classification, False for dense prediction.
output_stride: If None, then the output will be computed at the nominal
network stride. If output_stride is not None, it specifies the requested
ratio of input to output spatial resolution.
include_root_block: If True, include the initial convolution followed by
max-pooling, if False excludes it. If excluded, `inputs` should be the
results of an activation-less convolution.
spatial_squeeze: if True, logits is of shape [B, C], if false logits is
of shape [B, 1, 1, C], where B is batch_size and C is number of classes.
reuse: whether or not the network and its variables should be reused. To be
able to reuse 'scope' must be given.
scope: Optional variable_scope.
Returns:
net: A rank-4 tensor of size [batch, height_out, width_out, channels_out].
If global_pool is False, then height_out and width_out are reduced by a
factor of output_stride compared to the respective height_in and width_in,
else both height_out and width_out equal one. If num_classes is None, then
net is the output of the last ResNet block, potentially after global
average pooling. If num_classes is not None, net contains the pre-softmax
activations.
end_points: A dictionary from components of the network to the corresponding
activation.
Raises:
ValueError: If the target output_stride is not valid.
"""
with tf.variable_scope(scope, 'resnet_v2', [inputs], reuse=reuse) as sc:
end_points_collection = sc.name + '_end_points'
with slim.arg_scope([slim.conv2d, bottleneck,
resnet_utils.stack_blocks_dense],
outputs_collections=end_points_collection):
with slim.arg_scope([slim.batch_norm], is_training=is_training):
net = inputs
if include_root_block:
if output_stride is not None:
if output_stride % 4 != 0:
raise ValueError('The output_stride needs to be a multiple of 4.')
output_stride /= 4
# We do not include batch normalization or activation functions in
# conv1 because the first ResNet unit will perform these. Cf.
# Appendix of [2].
with slim.arg_scope([slim.conv2d],
activation_fn=None, normalizer_fn=None):
net = resnet_utils.conv2d_same(net, 64, 7, stride=2, scope='conv1')
net = slim.max_pool2d(net, [3, 3], stride=2, scope='pool1')
net = resnet_utils.stack_blocks_dense(net, blocks, output_stride)
# This is needed because the pre-activation variant does not have batch
# normalization or activation functions in the residual unit output. See
# Appendix of [2].
net = slim.batch_norm(net, activation_fn=tf.nn.relu, scope='postnorm')
if global_pool:
# Global average pooling.
net = tf.reduce_mean(net, [1, 2], name='pool5', keep_dims=True)
if num_classes is not None:
net = slim.conv2d(net, num_classes, [1, 1], activation_fn=None,
normalizer_fn=None, scope='logits')
if spatial_squeeze:
logits = tf.squeeze(net, [1, 2], name='SpatialSqueeze')
# Convert end_points_collection into a dictionary of end_points.
end_points = slim.utils.convert_collection_to_dict(end_points_collection)
if num_classes is not None:
end_points['predictions'] = slim.softmax(logits, scope='predictions')
return logits, end_points
resnet_v2.default_image_size = 224
def resnet_v2_50(inputs,
num_classes=None,
is_training=True,
global_pool=True,
output_stride=None,
reuse=None,
scope='resnet_v2_50'):
"""ResNet-50 model of [1]. See resnet_v2() for arg and return description."""
blocks = [
resnet_utils.Block(
'block1', bottleneck, [(256, 64, 1)] * 2 + [(256, 64, 2)]),
resnet_utils.Block(
'block2', bottleneck, [(512, 128, 1)] * 3 + [(512, 128, 2)]),
resnet_utils.Block(
'block3', bottleneck, [(1024, 256, 1)] * 5 + [(1024, 256, 2)]),
resnet_utils.Block(
'block4', bottleneck, [(2048, 512, 1)] * 3)]
return resnet_v2(inputs, blocks, num_classes, is_training=is_training,
global_pool=global_pool, output_stride=output_stride,
include_root_block=True, reuse=reuse, scope=scope)
resnet_v2_50.default_image_size = resnet_v2.default_image_size
def resnet_v2_101(inputs,
num_classes=None,
is_training=True,
global_pool=True,
output_stride=None,
reuse=None,
scope='resnet_v2_101'):
"""ResNet-101 model of [1]. See resnet_v2() for arg and return description."""
blocks = [
resnet_utils.Block(
'block1', bottleneck, [(256, 64, 1)] * 2 + [(256, 64, 2)]),
resnet_utils.Block(
'block2', bottleneck, [(512, 128, 1)] * 3 + [(512, 128, 2)]),
resnet_utils.Block(
'block3', bottleneck, [(1024, 256, 1)] * 22 + [(1024, 256, 2)]),
resnet_utils.Block(
'block4', bottleneck, [(2048, 512, 1)] * 3)]
return resnet_v2(inputs, blocks, num_classes, is_training=is_training,
global_pool=global_pool, output_stride=output_stride,
include_root_block=True, reuse=reuse, scope=scope)
resnet_v2_101.default_image_size = resnet_v2.default_image_size
def resnet_v2_152(inputs,
num_classes=None,
is_training=True,
global_pool=True,
output_stride=None,
reuse=None,
scope='resnet_v2_152'):
"""ResNet-152 model of [1]. See resnet_v2() for arg and return description."""
blocks = [
resnet_utils.Block(
'block1', bottleneck, [(256, 64, 1)] * 2 + [(256, 64, 2)]),
resnet_utils.Block(
'block2', bottleneck, [(512, 128, 1)] * 7 + [(512, 128, 2)]),
resnet_utils.Block(
'block3', bottleneck, [(1024, 256, 1)] * 35 + [(1024, 256, 2)]),
resnet_utils.Block(
'block4', bottleneck, [(2048, 512, 1)] * 3)]
return resnet_v2(inputs, blocks, num_classes, is_training=is_training,
global_pool=global_pool, output_stride=output_stride,
include_root_block=True, reuse=reuse, scope=scope)
resnet_v2_152.default_image_size = resnet_v2.default_image_size
def resnet_v2_200(inputs,
num_classes=None,
is_training=True,
global_pool=True,
output_stride=None,
reuse=None,
scope='resnet_v2_200'):
"""ResNet-200 model of [2]. See resnet_v2() for arg and return description."""
blocks = [
resnet_utils.Block(
'block1', bottleneck, [(256, 64, 1)] * 2 + [(256, 64, 2)]),
resnet_utils.Block(
'block2', bottleneck, [(512, 128, 1)] * 23 + [(512, 128, 2)]),
resnet_utils.Block(
'block3', bottleneck, [(1024, 256, 1)] * 35 + [(1024, 256, 2)]),
resnet_utils.Block(
'block4', bottleneck, [(2048, 512, 1)] * 3)]
return resnet_v2(inputs, blocks, num_classes, is_training=is_training,
global_pool=global_pool, output_stride=output_stride,
include_root_block=True, reuse=reuse, scope=scope)
resnet_v2_200.default_image_size = resnet_v2.default_image_size
|
from datetime import datetime, timedelta
import logging
from pytz import timezone
from homeassistant.components.pvpc_hourly_pricing import ATTR_TARIFF, DOMAIN
from homeassistant.const import CONF_NAME
from homeassistant.core import ATTR_NOW, EVENT_TIME_CHANGED
from homeassistant.setup import async_setup_component
from .conftest import check_valid_state
from tests.async_mock import patch
from tests.common import date_util
from tests.test_util.aiohttp import AiohttpClientMocker
async def _process_time_step(
hass, mock_data, key_state=None, value=None, tariff="discrimination", delta_min=60
):
state = hass.states.get("sensor.test_dst")
check_valid_state(state, tariff=tariff, value=value, key_attr=key_state)
mock_data["return_time"] += timedelta(minutes=delta_min)
hass.bus.async_fire(EVENT_TIME_CHANGED, {ATTR_NOW: mock_data["return_time"]})
await hass.async_block_till_done()
return state
async def test_sensor_availability(
hass, caplog, legacy_patchable_time, pvpc_aioclient_mock: AiohttpClientMocker
):
"""Test sensor availability and handling of cloud access."""
hass.config.time_zone = timezone("Europe/Madrid")
config = {DOMAIN: [{CONF_NAME: "test_dst", ATTR_TARIFF: "discrimination"}]}
mock_data = {"return_time": datetime(2019, 10, 27, 20, 0, 0, tzinfo=date_util.UTC)}
def mock_now():
return mock_data["return_time"]
with patch("homeassistant.util.dt.utcnow", new=mock_now):
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
caplog.clear()
assert pvpc_aioclient_mock.call_count == 2
await _process_time_step(hass, mock_data, "price_21h", 0.13896)
await _process_time_step(hass, mock_data, "price_22h", 0.06893)
assert pvpc_aioclient_mock.call_count == 4
await _process_time_step(hass, mock_data, "price_23h", 0.06935)
assert pvpc_aioclient_mock.call_count == 5
# sensor has no more prices, state is "unavailable" from now on
await _process_time_step(hass, mock_data, value="unavailable")
await _process_time_step(hass, mock_data, value="unavailable")
num_errors = sum(
1
for x in caplog.records
if x.levelno == logging.ERROR and "unknown job listener" not in x.msg
)
num_warnings = sum(1 for x in caplog.records if x.levelno == logging.WARNING)
assert num_warnings == 1
assert num_errors == 0
assert pvpc_aioclient_mock.call_count == 9
# check that it is silent until it becomes available again
caplog.clear()
with caplog.at_level(logging.WARNING):
# silent mode
for _ in range(21):
await _process_time_step(hass, mock_data, value="unavailable")
assert pvpc_aioclient_mock.call_count == 30
assert len(caplog.messages) == 0
# warning about data access recovered
await _process_time_step(hass, mock_data, value="unavailable")
assert pvpc_aioclient_mock.call_count == 31
assert len(caplog.messages) == 1
assert caplog.records[0].levelno == logging.WARNING
# working ok again
await _process_time_step(hass, mock_data, "price_00h", value=0.06821)
assert pvpc_aioclient_mock.call_count == 32
await _process_time_step(hass, mock_data, "price_01h", value=0.06627)
assert pvpc_aioclient_mock.call_count == 33
assert len(caplog.messages) == 1
assert caplog.records[0].levelno == logging.WARNING
|
import filelock
import glob
import os
import shutil
import numpy as np
from chainer.dataset import download
from chainercv.chainer_experimental.datasets.sliceable import GetterDataset
from chainercv import utils
from chainercv.utils import read_image
from chainercv.utils import read_label
root = 'pfnet/chainercv/camvid'
url = 'https://github.com/alexgkendall/SegNet-Tutorial/archive/master.zip'
# https://github.com/alexgkendall/SegNet-Tutorial/blob/master/
# Scripts/test_segmentation_camvid.py#L62
camvid_label_names = (
'Sky',
'Building',
'Pole',
'Road',
'Pavement',
'Tree',
'SignSymbol',
'Fence',
'Car',
'Pedestrian',
'Bicyclist',
)
camvid_label_colors = (
(128, 128, 128),
(128, 0, 0),
(192, 192, 128),
(128, 64, 128),
(60, 40, 222),
(128, 128, 0),
(192, 128, 128),
(64, 64, 128),
(64, 0, 128),
(64, 64, 0),
(0, 128, 192),
)
camvid_ignore_label_color = (0, 0, 0)
def get_camvid():
# To support ChainerMN, the target directory should be locked.
with filelock.FileLock(os.path.join(download.get_dataset_directory(
'pfnet/chainercv/.lock'), 'camvid.lock')):
data_root = download.get_dataset_directory(root)
download_file_path = utils.cached_download(url)
if len(glob.glob(os.path.join(data_root, '*'))) != 9:
utils.extractall(
download_file_path, data_root, os.path.splitext(url)[1])
data_dir = os.path.join(data_root, 'SegNet-Tutorial-master/CamVid')
if os.path.exists(data_dir):
for fn in glob.glob(os.path.join(data_dir, '*')):
shutil.move(fn, os.path.join(data_root, os.path.basename(fn)))
shutil.rmtree(os.path.dirname(data_dir))
return data_root
class CamVidDataset(GetterDataset):
"""Semantic segmentation dataset for `CamVid`_.
.. _`CamVid`:
https://github.com/alexgkendall/SegNet-Tutorial/tree/master/CamVid
Args:
data_dir (string): Path to the root of the training data. If this is
:obj:`auto`, this class will automatically download data for you
under :obj:`$CHAINER_DATASET_ROOT/pfnet/chainercv/camvid`.
split ({'train', 'val', 'test'}): Select from dataset splits used
in CamVid Dataset.
This dataset returns the following data.
.. csv-table::
:header: name, shape, dtype, format
:obj:`img`, ":math:`(3, H, W)`", :obj:`float32`, \
"RGB, :math:`[0, 255]`"
:obj:`label`, ":math:`(H, W)`", :obj:`int32`, \
":math:`[-1, \#class - 1]`"
"""
def __init__(self, data_dir='auto', split='train'):
super(CamVidDataset, self).__init__()
if split not in ['train', 'val', 'test']:
raise ValueError(
'Please pick split from \'train\', \'val\', \'test\'')
if data_dir == 'auto':
data_dir = get_camvid()
img_list_path = os.path.join(data_dir, '{}.txt'.format(split))
self.paths = [
[os.path.join(data_dir, fn.replace('/SegNet/CamVid/', ''))
for fn in line.split()] for line in open(img_list_path)]
self.add_getter('img', self._get_image)
self.add_getter('iabel', self._get_label)
def __len__(self):
return len(self.paths)
def _get_image(self, i):
img_path, _ = self.paths[i]
return read_image(img_path, color=True)
def _get_label(self, i):
_, label_path = self.paths[i]
label = read_label(label_path, dtype=np.int32)
# Label id 11 is for unlabeled pixels.
label[label == 11] = -1
return label
|
import datetime
from django.utils import timezone
from django.views.generic.dates import BaseArchiveIndexView
from django.views.generic.dates import BaseDayArchiveView
from django.views.generic.dates import BaseMonthArchiveView
from django.views.generic.dates import BaseTodayArchiveView
from django.views.generic.dates import BaseWeekArchiveView
from django.views.generic.dates import BaseYearArchiveView
from zinnia.models.entry import Entry
from zinnia.views.mixins.archives import ArchiveMixin
from zinnia.views.mixins.archives import PreviousNextPublishedMixin
from zinnia.views.mixins.callable_queryset import CallableQuerysetMixin
from zinnia.views.mixins.prefetch_related import PrefetchCategoriesAuthorsMixin
from zinnia.views.mixins.templates import \
EntryQuerysetArchiveTemplateResponseMixin
from zinnia.views.mixins.templates import \
EntryQuerysetArchiveTodayTemplateResponseMixin
class EntryArchiveMixin(ArchiveMixin,
PreviousNextPublishedMixin,
PrefetchCategoriesAuthorsMixin,
CallableQuerysetMixin,
EntryQuerysetArchiveTemplateResponseMixin):
"""
Mixin combinating:
- ArchiveMixin configuration centralizing conf for archive views.
- PrefetchCategoriesAuthorsMixin to prefetch related objects.
- PreviousNextPublishedMixin for returning published archives.
- CallableQueryMixin to force the update of the queryset.
- EntryQuerysetArchiveTemplateResponseMixin to provide a
custom templates for archives.
"""
queryset = Entry.published.all
class EntryIndex(EntryArchiveMixin,
EntryQuerysetArchiveTodayTemplateResponseMixin,
BaseArchiveIndexView):
"""
View returning the archive index.
"""
context_object_name = 'entry_list'
class EntryYear(EntryArchiveMixin, BaseYearArchiveView):
"""
View returning the archives for a year.
"""
make_object_list = True
template_name_suffix = '_archive_year'
class EntryMonth(EntryArchiveMixin, BaseMonthArchiveView):
"""
View returning the archives for a month.
"""
template_name_suffix = '_archive_month'
class EntryWeek(EntryArchiveMixin, BaseWeekArchiveView):
"""
View returning the archive for a week.
"""
template_name_suffix = '_archive_week'
def get_dated_items(self):
"""
Override get_dated_items to add a useful 'week_end_day'
variable in the extra context of the view.
"""
self.date_list, self.object_list, extra_context = super(
EntryWeek, self).get_dated_items()
self.date_list = self.get_date_list(self.object_list, 'day')
extra_context['week_end_day'] = extra_context[
'week'] + datetime.timedelta(days=6)
return self.date_list, self.object_list, extra_context
class EntryDay(EntryArchiveMixin, BaseDayArchiveView):
"""
View returning the archive for a day.
"""
template_name_suffix = '_archive_day'
class EntryToday(EntryArchiveMixin, BaseTodayArchiveView):
"""
View returning the archive for the current day.
"""
template_name_suffix = '_archive_today'
def get_dated_items(self):
"""
Return (date_list, items, extra_context) for this request.
And defines self.year/month/day for
EntryQuerysetArchiveTemplateResponseMixin.
"""
now = timezone.now()
if timezone.is_aware(now):
now = timezone.localtime(now)
today = now.date()
self.year, self.month, self.day = today.isoformat().split('-')
return self._get_dated_items(today)
|
import re
import inspect
import logging
import functools
import datetime
import types
from typing import (
Any, Callable, List, Mapping, MutableSequence, Optional, Sequence, Type, Union)
from PyQt5.QtCore import Qt, QEvent, QMetaMethod, QObject, pyqtBoundSignal
from PyQt5.QtWidgets import QApplication
from qutebrowser.utils import log, utils, qtutils, objreg
from qutebrowser.qt import sip
def log_events(klass: Type) -> Type:
"""Class decorator to log Qt events."""
old_event = klass.event
@functools.wraps(old_event)
def new_event(self: Any, e: QEvent) -> bool:
"""Wrapper for event() which logs events."""
log.misc.debug("Event in {}: {}".format(utils.qualname(klass),
qenum_key(QEvent, e.type())))
return old_event(self, e)
klass.event = new_event
return klass
def log_signals(obj: QObject) -> QObject:
"""Log all signals of an object or class.
Can be used as class decorator.
"""
def log_slot(obj: QObject, signal: pyqtBoundSignal, *args: Any) -> None:
"""Slot connected to a signal to log it."""
dbg = dbg_signal(signal, args)
try:
r = repr(obj)
except RuntimeError: # pragma: no cover
r = '<deleted>'
log.signals.debug("Signal in {}: {}".format(r, dbg))
def connect_log_slot(obj: QObject) -> None:
"""Helper function to connect all signals to a logging slot."""
metaobj = obj.metaObject()
for i in range(metaobj.methodCount()):
meta_method = metaobj.method(i)
qtutils.ensure_valid(meta_method)
if meta_method.methodType() == QMetaMethod.Signal:
name = meta_method.name().data().decode('ascii')
if name != 'destroyed':
signal = getattr(obj, name)
try:
signal.connect(functools.partial(
log_slot, obj, signal))
except TypeError: # pragma: no cover
pass
if inspect.isclass(obj):
old_init = obj.__init__ # type: ignore[misc]
@functools.wraps(old_init)
def new_init(self: Any, *args: Any, **kwargs: Any) -> None:
"""Wrapper for __init__() which logs signals."""
old_init(self, *args, **kwargs)
connect_log_slot(self)
obj.__init__ = new_init # type: ignore[misc]
else:
connect_log_slot(obj)
return obj
def qenum_key(base: Type,
value: Union[int, sip.simplewrapper],
add_base: bool = False,
klass: Type = None) -> str:
"""Convert a Qt Enum value to its key as a string.
Args:
base: The object the enum is in, e.g. QFrame.
value: The value to get.
add_base: Whether the base should be added to the printed name.
klass: The enum class the value belongs to.
If None, the class will be auto-guessed.
Return:
The key associated with the value as a string if it could be found.
The original value as a string if not.
"""
if klass is None:
klass = value.__class__
if klass == int:
raise TypeError("Can't guess enum class of an int!")
try:
idx = base.staticMetaObject.indexOfEnumerator(klass.__name__)
meta_enum = base.staticMetaObject.enumerator(idx)
ret = meta_enum.valueToKey(int(value)) # type: ignore[arg-type]
except AttributeError:
ret = None
if ret is None:
for name, obj in vars(base).items():
if isinstance(obj, klass) and obj == value:
ret = name
break
else:
ret = '0x{:04x}'.format(int(value)) # type: ignore[arg-type]
if add_base and hasattr(base, '__name__'):
return '.'.join([base.__name__, ret])
else:
return ret
def qflags_key(base: Type,
value: Union[int, sip.simplewrapper],
add_base: bool = False,
klass: Type = None) -> str:
"""Convert a Qt QFlags value to its keys as string.
Note: Passing a combined value (such as Qt.AlignCenter) will get the names
for the individual bits (e.g. Qt.AlignVCenter | Qt.AlignHCenter). FIXME
https://github.com/qutebrowser/qutebrowser/issues/42
Args:
base: The object the flags are in, e.g. QtCore.Qt
value: The value to get.
add_base: Whether the base should be added to the printed names.
klass: The flags class the value belongs to.
If None, the class will be auto-guessed.
Return:
The keys associated with the flags as a '|' separated string if they
could be found. Hex values as a string if not.
"""
if klass is None:
# We have to store klass here because it will be lost when iterating
# over the bits.
klass = value.__class__
if klass == int:
raise TypeError("Can't guess enum class of an int!")
if not value:
return qenum_key(base, value, add_base, klass)
bits = []
names = []
mask = 0x01
value = int(value) # type: ignore[arg-type]
while mask <= value:
if value & mask:
bits.append(mask)
mask <<= 1
for bit in bits:
# We have to re-convert to an enum type here or we'll sometimes get an
# empty string back.
enum_value = klass(bit) # type: ignore[call-arg]
names.append(qenum_key(base, enum_value, add_base))
return '|'.join(names)
def signal_name(sig: pyqtBoundSignal) -> str:
"""Get a cleaned up name of a signal.
Unfortunately, the way to get the name of a signal differs based on
bound vs. unbound signals.
Here, we try to get the name from .signal or .signatures, or if all else
fails, extract it from the repr().
Args:
sig: A bound signal.
Return:
The cleaned up signal name.
"""
if hasattr(sig, 'signal'):
# Bound signal
# Examples:
# sig.signal == '2signal1'
# sig.signal == '2signal2(QString,QString)'
m = re.fullmatch(r'[0-9]+(?P<name>.*)\(.*\)', sig.signal)
else:
# Unbound signal, PyQt >= 5.11
# Examples:
# sig.signatures == ('signal1()',)
# sig.signatures == ('signal2(QString,QString)',)
m = re.fullmatch(r'(?P<name>.*)\(.*\)',
sig.signatures[0]) # type: ignore[attr-defined]
assert m is not None, sig
return m.group('name')
def format_args(args: Sequence = None, kwargs: Mapping = None) -> str:
"""Format a list of arguments/kwargs to a function-call like string."""
if args is not None:
arglist = [utils.compact_text(repr(arg), 200) for arg in args]
else:
arglist = []
if kwargs is not None:
for k, v in kwargs.items():
arglist.append('{}={}'.format(k, utils.compact_text(repr(v), 200)))
return ', '.join(arglist)
def dbg_signal(sig: pyqtBoundSignal, args: Any) -> str:
"""Get a string representation of a signal for debugging.
Args:
sig: A bound signal.
args: The arguments as list of strings.
Return:
A human-readable string representation of signal/args.
"""
return '{}({})'.format(signal_name(sig), format_args(args))
def format_call(func: Callable,
args: Sequence = None,
kwargs: Mapping = None,
full: bool = True) -> str:
"""Get a string representation of a function calls with the given args.
Args:
func: The callable to print.
args: A list of positional arguments.
kwargs: A dict of named arguments.
full: Whether to print the full name
Return:
A string with the function call.
"""
if full:
name = utils.qualname(func)
else:
name = func.__name__
return '{}({})'.format(name, format_args(args, kwargs))
class log_time: # noqa: N801,N806 pylint: disable=invalid-name
"""Log the time an operation takes.
Usable as context manager or as decorator.
"""
def __init__(self, logger: Union[logging.Logger, str],
action: str = 'operation') -> None:
"""Constructor.
Args:
logger: The logging.Logger to use for logging, or a logger name.
action: A description of what's being done.
"""
if isinstance(logger, str):
self._logger = logging.getLogger(logger)
else:
self._logger = logger
self._started: Optional[datetime.datetime] = None
self._action = action
def __enter__(self) -> None:
self._started = datetime.datetime.now()
def __exit__(self,
_exc_type: Optional[Type[BaseException]],
_exc_val: Optional[BaseException],
_exc_tb: Optional[types.TracebackType]) -> None:
assert self._started is not None
finished = datetime.datetime.now()
delta = (finished - self._started).total_seconds()
self._logger.debug("{} took {} seconds.".format(
self._action.capitalize(), delta))
def __call__(self, func: Callable) -> Callable:
@functools.wraps(func)
def wrapped(*args: Any, **kwargs: Any) -> Any:
"""Call the original function."""
with self:
return func(*args, **kwargs)
return wrapped
def _get_widgets() -> Sequence[str]:
"""Get a string list of all widgets."""
widgets = QApplication.instance().allWidgets()
widgets.sort(key=repr)
return [repr(w) for w in widgets]
def _get_pyqt_objects(lines: MutableSequence[str],
obj: QObject,
depth: int = 0) -> None:
"""Recursive method for get_all_objects to get Qt objects."""
for kid in obj.findChildren(QObject, '', Qt.FindDirectChildrenOnly):
lines.append(' ' * depth + repr(kid))
_get_pyqt_objects(lines, kid, depth + 1)
def get_all_objects(start_obj: QObject = None) -> str:
"""Get all children of an object recursively as a string."""
output = ['']
widget_lines = _get_widgets()
widget_lines = [' ' + e for e in widget_lines]
widget_lines.insert(0, "Qt widgets - {} objects:".format(
len(widget_lines)))
output += widget_lines
if start_obj is None:
start_obj = QApplication.instance()
pyqt_lines: List[str] = []
_get_pyqt_objects(pyqt_lines, start_obj)
pyqt_lines = [' ' + e for e in pyqt_lines]
pyqt_lines.insert(0, 'Qt objects - {} objects:'.format(len(pyqt_lines)))
output += ['']
output += pyqt_lines
output += objreg.dump_objects()
return '\n'.join(output)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from nets import alexnet
slim = tf.contrib.slim
class AlexnetV2Test(tf.test.TestCase):
def testBuild(self):
batch_size = 5
height, width = 224, 224
num_classes = 1000
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = alexnet.alexnet_v2(inputs, num_classes)
self.assertEquals(logits.op.name, 'alexnet_v2/fc8/squeezed')
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, num_classes])
def testFullyConvolutional(self):
batch_size = 1
height, width = 300, 400
num_classes = 1000
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = alexnet.alexnet_v2(inputs, num_classes, spatial_squeeze=False)
self.assertEquals(logits.op.name, 'alexnet_v2/fc8/BiasAdd')
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, 4, 7, num_classes])
def testEndPoints(self):
batch_size = 5
height, width = 224, 224
num_classes = 1000
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
_, end_points = alexnet.alexnet_v2(inputs, num_classes)
expected_names = ['alexnet_v2/conv1',
'alexnet_v2/pool1',
'alexnet_v2/conv2',
'alexnet_v2/pool2',
'alexnet_v2/conv3',
'alexnet_v2/conv4',
'alexnet_v2/conv5',
'alexnet_v2/pool5',
'alexnet_v2/fc6',
'alexnet_v2/fc7',
'alexnet_v2/fc8'
]
self.assertSetEqual(set(end_points.keys()), set(expected_names))
def testModelVariables(self):
batch_size = 5
height, width = 224, 224
num_classes = 1000
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
alexnet.alexnet_v2(inputs, num_classes)
expected_names = ['alexnet_v2/conv1/weights',
'alexnet_v2/conv1/biases',
'alexnet_v2/conv2/weights',
'alexnet_v2/conv2/biases',
'alexnet_v2/conv3/weights',
'alexnet_v2/conv3/biases',
'alexnet_v2/conv4/weights',
'alexnet_v2/conv4/biases',
'alexnet_v2/conv5/weights',
'alexnet_v2/conv5/biases',
'alexnet_v2/fc6/weights',
'alexnet_v2/fc6/biases',
'alexnet_v2/fc7/weights',
'alexnet_v2/fc7/biases',
'alexnet_v2/fc8/weights',
'alexnet_v2/fc8/biases',
]
model_variables = [v.op.name for v in slim.get_model_variables()]
self.assertSetEqual(set(model_variables), set(expected_names))
def testEvaluation(self):
batch_size = 2
height, width = 224, 224
num_classes = 1000
with self.test_session():
eval_inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = alexnet.alexnet_v2(eval_inputs, is_training=False)
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, num_classes])
predictions = tf.argmax(logits, 1)
self.assertListEqual(predictions.get_shape().as_list(), [batch_size])
def testTrainEvalWithReuse(self):
train_batch_size = 2
eval_batch_size = 1
train_height, train_width = 224, 224
eval_height, eval_width = 300, 400
num_classes = 1000
with self.test_session():
train_inputs = tf.random_uniform(
(train_batch_size, train_height, train_width, 3))
logits, _ = alexnet.alexnet_v2(train_inputs)
self.assertListEqual(logits.get_shape().as_list(),
[train_batch_size, num_classes])
tf.get_variable_scope().reuse_variables()
eval_inputs = tf.random_uniform(
(eval_batch_size, eval_height, eval_width, 3))
logits, _ = alexnet.alexnet_v2(eval_inputs, is_training=False,
spatial_squeeze=False)
self.assertListEqual(logits.get_shape().as_list(),
[eval_batch_size, 4, 7, num_classes])
logits = tf.reduce_mean(logits, [1, 2])
predictions = tf.argmax(logits, 1)
self.assertEquals(predictions.get_shape().as_list(), [eval_batch_size])
def testForward(self):
batch_size = 1
height, width = 224, 224
with self.test_session() as sess:
inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = alexnet.alexnet_v2(inputs)
sess.run(tf.global_variables_initializer())
output = sess.run(logits)
self.assertTrue(output.any())
if __name__ == '__main__':
tf.test.main()
|
from rest_framework import serializers
from shop.conf import app_settings
from shop.serializers.bases import BaseOrderItemSerializer
class OrderItemSerializer(BaseOrderItemSerializer):
summary = serializers.SerializerMethodField(
help_text="Sub-serializer for fields to be shown in the product's summary.")
class Meta(BaseOrderItemSerializer.Meta):
fields = ['line_total', 'unit_price', 'product_code', 'quantity', 'summary', 'extra']
def get_summary(self, order_item):
label = self.context.get('render_label', 'order')
serializer_class = app_settings.PRODUCT_SUMMARY_SERIALIZER
serializer = serializer_class(order_item.product, context=self.context,
read_only=True, label=label)
return serializer.data
|
import queue
import hashlib
import logging
import os
import subprocess
import tempfile
import threading
import time
from shutil import copyfile, rmtree
from ...common.util import SecuredShell
from ..Telegraf.config import AgentConfig, create_agent_py
from ..Telegraf.reader import MonitoringReader
logger = logging.getLogger(__name__)
logging.getLogger("paramiko.transport").setLevel(logging.WARNING)
def generate_file_md5(filename, blocksize=2 ** 20):
m = hashlib.md5()
with open(filename, "rb") as f:
while True:
buf = f.read(blocksize)
if not buf:
break
m.update(buf)
return m.hexdigest()
class LocalhostClient(object):
""" localhost client setup """
AGENT_FILENAME = 'agent.py'
def __init__(self, config, old_style_configs, kill_old):
# config
self.kill_old = '--kill-old' if kill_old else ''
self.python = config['python']
self.host = "localhost"
self.telegraf = config['telegraf']
self.config = AgentConfig(config, old_style_configs)
# connection
self.incoming_queue = queue.Queue()
self.buffer = ""
self.workdir = None
self.reader = MonitoringReader(self.incoming_queue)
self.path = {
'AGENT_LOCAL_PATH': create_agent_py(self.AGENT_FILENAME),
'TELEGRAF_LOCAL_PATH': self.telegraf,
}
def install(self):
self.workdir = tempfile.mkdtemp()
logger.info("Created temp dir %s", self.workdir)
agent_config = self.config.create_collector_config(self.workdir)
startup_config = self.config.create_startup_config()
customs_script = self.config.create_custom_exec_script()
try:
copyfile(
self.path['AGENT_LOCAL_PATH'],
os.path.join(
self.workdir,
self.AGENT_FILENAME))
copyfile(agent_config, os.path.join(self.workdir, 'agent.cfg'))
copyfile(startup_config, os.path.join(
self.workdir,
'agent_startup.cfg'))
copyfile(
customs_script,
os.path.join(
self.workdir,
'agent_customs.sh'))
if not os.path.isfile(self.path['TELEGRAF_LOCAL_PATH']):
logger.error(
'Telegraf binary not found at specified path: %s\n'
'You can find telegraf binaries here: https://github.com/influxdata/telegraf\n'
'or install debian package: `telegraf`',
self.path['TELEGRAF_LOCAL_PATH'])
return None, None, None
except Exception:
logger.error("Failed to copy agent to %s on localhost", self.workdir)
logger.debug("Failed to copy agent to %s on localhost", self.workdir, exc_info=True)
return None, None, None
return agent_config, startup_config, customs_script
@staticmethod
def popen(args):
return subprocess.Popen(
args,
bufsize=0,
preexec_fn=os.setsid,
close_fds=True,
shell=False,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
stdin=subprocess.PIPE, )
def start(self):
"""Start local agent"""
args = self.python.split() + [
os.path.join(
self.workdir,
self.AGENT_FILENAME),
'--telegraf',
self.path['TELEGRAF_LOCAL_PATH'],
'--host',
self.host]
if self.kill_old:
args.append(self.kill_old)
logger.info('Starting agent on localhost: {}'.format(args))
self.session = self.popen(args)
self.reader_thread = threading.Thread(target=self.read_buffer)
self.reader_thread.setDaemon(True)
return self.session
def read_buffer(self):
while self.session:
try:
chunk = self.session.stdout.read(4096).decode('utf8')
if chunk:
parts = chunk.rsplit('\n', 1)
if len(parts) > 1:
ready_chunk = self.buffer + parts[0] + '\n'
self.buffer = parts[1]
self.incoming_queue.put(ready_chunk)
else:
self.buffer += parts[0]
except ValueError:
logger.debug(
'this exc most likely raised during interpreter shutdown\n'
'otherwise something really nasty happend',
exc_info=True)
def _stop_agent(self):
if self.session:
self.session.terminate()
logger.info('Waiting localhost agent to terminate...')
def uninstall(self):
"""
Remove agent's files from remote host
"""
if self.session:
self.session.wait()
self.session = None
log_filename = "agent_{host}.log".format(host="localhost")
data_filename = "agent_{host}.rawdata".format(host="localhost")
try:
logger.info('Saving monitoring artifacts from localhost')
copyfile(self.workdir + "/_agent.log", log_filename)
copyfile(self.workdir + "/monitoring.rawdata", data_filename)
logger.info('Deleting temp directory: %s', self.workdir)
rmtree(self.workdir)
except Exception:
logger.error("Exception while uninstalling agent", exc_info=True)
logger.info("Removing agent from: localhost")
return log_filename, data_filename
class SSHClient(object):
"""remote agent client setup """
AGENT_FILENAME = 'agent.py'
def __init__(self, config, old_style_configs, timeout, kill_old):
# config
self.kill_old = '--kill-old' if kill_old else ''
self.host = config['host']
self.username = config['username']
self.python = config['python']
self.port = config['port']
self.telegraf = config['telegraf']
self.config = AgentConfig(config, old_style_configs)
# connection
self.session = None
self.ssh = SecuredShell(self.host, self.port, self.username, timeout)
self.incoming_queue = queue.Queue()
self.buffer = ""
self.stop_sent = None
self.successfull_stop = None
self.reader = MonitoringReader(self.incoming_queue)
handle, cfg_path = tempfile.mkstemp('.cfg', 'agent_')
os.close(handle)
self.path = {
# Destination path on remote host
'AGENT_REMOTE_FOLDER': '/tmp/',
# Source path on tank
'AGENT_LOCAL_PATH': create_agent_py(self.AGENT_FILENAME),
'TELEGRAF_REMOTE_PATH': '/tmp/telegraf',
'TELEGRAF_LOCAL_PATH': self.telegraf,
}
self.agent_remote_folder = None
def install(self):
"""Create folder and copy agent and metrics scripts to remote host"""
logger.info(
"Installing monitoring agent at %s@%s...",
self.username,
self.host)
# create remote temp dir
cmd = self.python + ' -c "import tempfile; print(tempfile.mkdtemp());"'
logger.info("Creating temp dir on %s", self.host)
try:
out, errors, err_code = self.ssh.execute(cmd)
except Exception:
logger.error(
"Failed to install monitoring agent to %s",
self.host,
exc_info=True)
return None, None, None
if errors:
logger.error("[%s] error: '%s'", self.host, errors)
logger.error("Cancelling agent installation on %s", self.host)
return None, None, None
if err_code:
logger.error(
"Failed to create remote dir via SSH at %s@%s, code %s: %s" %
(self.username, self.host, err_code, out.strip()))
return None, None, None
remote_dir = out.strip()
if remote_dir:
self.path['AGENT_REMOTE_FOLDER'] = remote_dir
self.agent_remote_folder = remote_dir
logger.debug(
"Remote dir at %s:%s", self.host, self.path['AGENT_REMOTE_FOLDER'])
# create collector config
agent_config = self.config.create_collector_config(
self.path['AGENT_REMOTE_FOLDER'])
startup_config = self.config.create_startup_config()
customs_script = self.config.create_custom_exec_script()
# trying to detect os version/architecture and get information about telegraf client
# DO NOT DELETE indices in string format below. Python 2.6 does not
# support string formatting without indices
remote_cmd = 'import os; print os.path.isfile("' + self.path[
'TELEGRAF_REMOTE_PATH'] + '")'
cmd = self.python + ' -c \'{cmd}\''.format(cmd=remote_cmd)
remote_telegraf_exists = "False"
try:
out, err, err_code = self.ssh.execute(cmd)
except Exception:
logger.error(
"SSH execute error trying to check telegraf availability on host %s",
self.host,
exc_info=True)
else:
if err:
logger.error("[%s] error: '%s'", self.host, errors)
if out.strip():
remote_telegraf_exists = out.strip()
try:
if remote_telegraf_exists in "True":
logger.debug('Found telegraf client on %s..', self.host)
else:
logger.debug(
'Not found telegraf client on %s, trying to install from tank. Copying..',
self.host)
if os.path.isfile(self.path['TELEGRAF_LOCAL_PATH']):
self.ssh.send_file(
self.path['TELEGRAF_LOCAL_PATH'],
self.path['TELEGRAF_REMOTE_PATH'])
elif os.path.isfile("/usr/bin/telegraf"):
self.ssh.send_file(
'/usr/bin/telegraf', self.path['TELEGRAF_REMOTE_PATH'])
else:
logger.error(
'Telegraf binary not found neither on %s nor on localhost at specified path: %s\n'
'You can find telegraf binaries here: https://github.com/influxdata/telegraf\n'
'or install debian package: `telegraf`', self.host, self.path['TELEGRAF_LOCAL_PATH'])
return None, None, None
self.ssh.send_file(
self.path['AGENT_LOCAL_PATH'],
os.path.join(
self.path['AGENT_REMOTE_FOLDER'],
self.AGENT_FILENAME))
self.ssh.send_file(
agent_config,
os.path.join(
self.path['AGENT_REMOTE_FOLDER'],
'agent.cfg'))
self.ssh.send_file(
startup_config,
os.path.join(
self.path['AGENT_REMOTE_FOLDER'],
'agent_startup.cfg'))
self.ssh.send_file(
customs_script,
os.path.join(
self.path['AGENT_REMOTE_FOLDER'],
'agent_customs.sh'))
except Exception:
logger.error(
"Failed to install agent on %s", self.host, exc_info=True)
return None, None, None
return agent_config, startup_config, customs_script
def start(self):
"""Start remote agent"""
logger.info('Starting agent: %s', self.host)
command = "{python} {agent_path} --telegraf {telegraf_path} --host {host} {kill_old}".format(
python=self.python,
agent_path=os.path.join(
self.path['AGENT_REMOTE_FOLDER'],
self.AGENT_FILENAME),
telegraf_path=self.path['TELEGRAF_REMOTE_PATH'],
host=self.host,
kill_old=self.kill_old)
logger.debug('Command to start agent: %s', command)
self.session = self.ssh.async_session(command)
self.reader_thread = threading.Thread(target=self.read_buffer)
self.reader_thread.setDaemon(True)
return self.session
def read_buffer(self):
while self.session and not self.stop_sent:
chunk = self.session.read_maybe()
if chunk:
parts = chunk.rsplit('\n', 1)
if len(parts) > 1:
ready_chunk = self.buffer + parts[0] + '\n'
self.buffer = parts[1]
self.incoming_queue.put(ready_chunk)
else:
self.buffer += parts[0]
else:
time.sleep(1)
logger.info('Daemon reader stopped')
def _stop_agent(self):
"""
Stop data collection. Separated from uninstall to speed up multihost processing
"""
try:
if self.session:
self.session.send("stop\n")
self.stop_sent = time.time()
except BaseException:
logger.warning(
'Unable to correctly stop monitoring agent - session is broken on %s.',
self.host,
exc_info=True)
def uninstall(self):
"""
Remove agent's files from remote host
"""
log_filename = "agent_{host}.log".format(host=self.host)
data_filename = "agent_{host}.rawdata".format(host=self.host)
try:
if self.session:
self._wait_for_stop()
self.session.close()
self.session = None
except BaseException:
logger.warning(
'Unable to correctly stop monitoring agent - session is broken. Pay attention to agent log (%s).',
log_filename,
exc_info=True)
try:
self.ssh.get_file(
os.path.join(
self.path['AGENT_REMOTE_FOLDER'],
"_agent.log"),
log_filename)
self.ssh.get_file(
os.path.join(
self.path['AGENT_REMOTE_FOLDER'],
"monitoring.rawdata"),
data_filename)
self.ssh.rm_r(self.path['AGENT_REMOTE_FOLDER'])
except Exception:
logger.error("Unable to get agent artefacts", exc_info=True)
if not self.successfull_stop:
self._kill_agent()
return log_filename, data_filename
def _wait_for_stop(self):
done = False
agent_stop_timeout = 5
while not done:
if not self.stop_sent:
logger.info('Session was broken on %s, switch to kill', self.host)
break
if (self.stop_sent + agent_stop_timeout) < time.time():
logger.info("Agent hasn't finished in %s sec, force quit on %s",
agent_stop_timeout,
self.host)
break
if self.session.finished():
logger.debug('Session ended with status %s with %s',
self.session.exit_status(),
self.host)
self.successfull_stop = True
done = True
agent_stderr = self.session.read_err_maybe()
if agent_stderr and 'stopped' in agent_stderr:
logger.debug('Got stopped message from %s', self.host)
done = True
def _kill_agent(self):
if self.agent_remote_folder:
tpl = ('main_p=$(pgrep -f "[p]ython.*{folder}");'
'tlgrf_p=$(pgrep -f "[t]elegraf.*{folder}");'
'descendent_pids(){{ if [ "x$1" != "x" ]; then '
'pids=$(pgrep -P $1); echo $pids;'
'for p in $pids; do descendent_pids $p; done; fi }};'
'all_p=$(descendent_pids ${{main_p}});'
'if [ "x${{main_p}}${{tlgrf_p}}${{all_p}}" != "x" ] ; then '
' kill -9 ${{main_p}} ${{tlgrf_p}} ${{all_p}}; fi')
cmd = tpl.format(folder=self.agent_remote_folder)
out, errors, err_code = self.ssh.execute(cmd)
if errors:
logger.error(
"[%s] error while killing agent: '%s'",
self.host,
errors)
|
from __future__ import absolute_import, unicode_literals
import logging
import time
from colorama import Fore, Style
LEVEL_COLORS = {
'DEBUG': Fore.BLUE, # Blue
'INFO': Fore.GREEN, # Green
'WARNING': Fore.YELLOW,
'ERROR': Fore.RED,
'CRITICAL': Fore.RED,
}
def default_log_template(self, record):
"""
Return the prefix for the log message. Template for Formatter.
Parameters
----------
:py:class:`logging.LogRecord` :
object. this is passed in from inside the
:py:meth:`logging.Formatter.format` record.
Returns
-------
str
template for logger message
"""
reset = Style.RESET_ALL
levelname = (
LEVEL_COLORS.get(record.levelname)
+ Style.BRIGHT
+ '(%(levelname)s)'
+ Style.RESET_ALL
+ ' '
)
asctime = (
'['
+ Fore.BLACK
+ Style.DIM
+ Style.BRIGHT
+ '%(asctime)s'
+ Fore.RESET
+ Style.RESET_ALL
+ ']'
)
name = (
' '
+ Fore.WHITE
+ Style.DIM
+ Style.BRIGHT
+ '%(name)s'
+ Fore.RESET
+ Style.RESET_ALL
+ ' '
)
tpl = reset + levelname + asctime + name + reset
return tpl
class LogFormatter(logging.Formatter):
template = default_log_template
def __init__(self, color=True, *args, **kwargs):
logging.Formatter.__init__(self, *args, **kwargs)
def format(self, record):
try:
record.message = record.getMessage()
except Exception as e:
record.message = "Bad message (%r): %r" % (e, record.__dict__)
date_format = '%H:%m:%S'
record.asctime = time.strftime(date_format, self.converter(record.created))
prefix = self.template(record) % record.__dict__
formatted = prefix + " " + record.message
return formatted.replace("\n", "\n ")
def debug_log_template(self, record):
"""
Return the prefix for the log message. Template for Formatter.
Parameters
----------
record : :py:class:`logging.LogRecord`
This is passed in from inside the :py:meth:`logging.Formatter.format`
record.
Returns
-------
str
Log template.
"""
reset = Style.RESET_ALL
levelname = (
LEVEL_COLORS.get(record.levelname)
+ Style.BRIGHT
+ '(%(levelname)1.1s)'
+ Style.RESET_ALL
+ ' '
)
asctime = (
'['
+ Fore.BLACK
+ Style.DIM
+ Style.BRIGHT
+ '%(asctime)s'
+ Fore.RESET
+ Style.RESET_ALL
+ ']'
)
name = (
' '
+ Fore.WHITE
+ Style.DIM
+ Style.BRIGHT
+ '%(name)s'
+ Fore.RESET
+ Style.RESET_ALL
+ ' '
)
module_funcName = Fore.GREEN + Style.BRIGHT + '%(module)s.%(funcName)s()'
lineno = (
Fore.BLACK
+ Style.DIM
+ Style.BRIGHT
+ ':'
+ Style.RESET_ALL
+ Fore.CYAN
+ '%(lineno)d'
)
tpl = reset + levelname + asctime + name + module_funcName + lineno + reset
return tpl
class DebugLogFormatter(LogFormatter):
"""Provides greater technical details than standard log Formatter."""
template = debug_log_template
|
import logging
import voluptuous as vol
from homeassistant.components.fan import (
ATTR_DIRECTION,
ATTR_OSCILLATING,
ATTR_SPEED,
DIRECTION_FORWARD,
DIRECTION_REVERSE,
ENTITY_ID_FORMAT,
SPEED_HIGH,
SPEED_LOW,
SPEED_MEDIUM,
SUPPORT_DIRECTION,
SUPPORT_OSCILLATE,
SUPPORT_SET_SPEED,
FanEntity,
)
from homeassistant.const import (
CONF_ENTITY_ID,
CONF_FRIENDLY_NAME,
CONF_UNIQUE_ID,
CONF_VALUE_TEMPLATE,
STATE_OFF,
STATE_ON,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
)
from homeassistant.core import callback
from homeassistant.exceptions import TemplateError
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import async_generate_entity_id
from homeassistant.helpers.reload import async_setup_reload_service
from homeassistant.helpers.script import Script
from .const import CONF_AVAILABILITY_TEMPLATE, DOMAIN, PLATFORMS
from .template_entity import TemplateEntity
_LOGGER = logging.getLogger(__name__)
CONF_FANS = "fans"
CONF_SPEED_LIST = "speeds"
CONF_SPEED_TEMPLATE = "speed_template"
CONF_OSCILLATING_TEMPLATE = "oscillating_template"
CONF_DIRECTION_TEMPLATE = "direction_template"
CONF_ON_ACTION = "turn_on"
CONF_OFF_ACTION = "turn_off"
CONF_SET_SPEED_ACTION = "set_speed"
CONF_SET_OSCILLATING_ACTION = "set_oscillating"
CONF_SET_DIRECTION_ACTION = "set_direction"
_VALID_STATES = [STATE_ON, STATE_OFF]
_VALID_OSC = [True, False]
_VALID_DIRECTIONS = [DIRECTION_FORWARD, DIRECTION_REVERSE]
FAN_SCHEMA = vol.All(
cv.deprecated(CONF_ENTITY_ID),
vol.Schema(
{
vol.Optional(CONF_FRIENDLY_NAME): cv.string,
vol.Required(CONF_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_SPEED_TEMPLATE): cv.template,
vol.Optional(CONF_OSCILLATING_TEMPLATE): cv.template,
vol.Optional(CONF_DIRECTION_TEMPLATE): cv.template,
vol.Optional(CONF_AVAILABILITY_TEMPLATE): cv.template,
vol.Required(CONF_ON_ACTION): cv.SCRIPT_SCHEMA,
vol.Required(CONF_OFF_ACTION): cv.SCRIPT_SCHEMA,
vol.Optional(CONF_SET_SPEED_ACTION): cv.SCRIPT_SCHEMA,
vol.Optional(CONF_SET_OSCILLATING_ACTION): cv.SCRIPT_SCHEMA,
vol.Optional(CONF_SET_DIRECTION_ACTION): cv.SCRIPT_SCHEMA,
vol.Optional(
CONF_SPEED_LIST, default=[SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH]
): cv.ensure_list,
vol.Optional(CONF_ENTITY_ID): cv.entity_ids,
vol.Optional(CONF_UNIQUE_ID): cv.string,
}
),
)
PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA.extend(
{vol.Required(CONF_FANS): cv.schema_with_slug_keys(FAN_SCHEMA)}
)
async def _async_create_entities(hass, config):
"""Create the Template Fans."""
fans = []
for device, device_config in config[CONF_FANS].items():
friendly_name = device_config.get(CONF_FRIENDLY_NAME, device)
state_template = device_config[CONF_VALUE_TEMPLATE]
speed_template = device_config.get(CONF_SPEED_TEMPLATE)
oscillating_template = device_config.get(CONF_OSCILLATING_TEMPLATE)
direction_template = device_config.get(CONF_DIRECTION_TEMPLATE)
availability_template = device_config.get(CONF_AVAILABILITY_TEMPLATE)
on_action = device_config[CONF_ON_ACTION]
off_action = device_config[CONF_OFF_ACTION]
set_speed_action = device_config.get(CONF_SET_SPEED_ACTION)
set_oscillating_action = device_config.get(CONF_SET_OSCILLATING_ACTION)
set_direction_action = device_config.get(CONF_SET_DIRECTION_ACTION)
speed_list = device_config[CONF_SPEED_LIST]
unique_id = device_config.get(CONF_UNIQUE_ID)
fans.append(
TemplateFan(
hass,
device,
friendly_name,
state_template,
speed_template,
oscillating_template,
direction_template,
availability_template,
on_action,
off_action,
set_speed_action,
set_oscillating_action,
set_direction_action,
speed_list,
unique_id,
)
)
return fans
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the template fans."""
await async_setup_reload_service(hass, DOMAIN, PLATFORMS)
async_add_entities(await _async_create_entities(hass, config))
class TemplateFan(TemplateEntity, FanEntity):
"""A template fan component."""
def __init__(
self,
hass,
device_id,
friendly_name,
state_template,
speed_template,
oscillating_template,
direction_template,
availability_template,
on_action,
off_action,
set_speed_action,
set_oscillating_action,
set_direction_action,
speed_list,
unique_id,
):
"""Initialize the fan."""
super().__init__(availability_template=availability_template)
self.hass = hass
self.entity_id = async_generate_entity_id(
ENTITY_ID_FORMAT, device_id, hass=hass
)
self._name = friendly_name
self._template = state_template
self._speed_template = speed_template
self._oscillating_template = oscillating_template
self._direction_template = direction_template
self._supported_features = 0
domain = __name__.split(".")[-2]
self._on_script = Script(hass, on_action, friendly_name, domain)
self._off_script = Script(hass, off_action, friendly_name, domain)
self._set_speed_script = None
if set_speed_action:
self._set_speed_script = Script(
hass, set_speed_action, friendly_name, domain
)
self._set_oscillating_script = None
if set_oscillating_action:
self._set_oscillating_script = Script(
hass, set_oscillating_action, friendly_name, domain
)
self._set_direction_script = None
if set_direction_action:
self._set_direction_script = Script(
hass, set_direction_action, friendly_name, domain
)
self._state = STATE_OFF
self._speed = None
self._oscillating = None
self._direction = None
if self._speed_template:
self._supported_features |= SUPPORT_SET_SPEED
if self._oscillating_template:
self._supported_features |= SUPPORT_OSCILLATE
if self._direction_template:
self._supported_features |= SUPPORT_DIRECTION
self._unique_id = unique_id
# List of valid speeds
self._speed_list = speed_list
@property
def name(self):
"""Return the display name of this fan."""
return self._name
@property
def unique_id(self):
"""Return the unique id of this fan."""
return self._unique_id
@property
def supported_features(self) -> int:
"""Flag supported features."""
return self._supported_features
@property
def speed_list(self) -> list:
"""Get the list of available speeds."""
return self._speed_list
@property
def is_on(self):
"""Return true if device is on."""
return self._state == STATE_ON
@property
def speed(self):
"""Return the current speed."""
return self._speed
@property
def oscillating(self):
"""Return the oscillation state."""
return self._oscillating
@property
def current_direction(self):
"""Return the oscillation state."""
return self._direction
# pylint: disable=arguments-differ
async def async_turn_on(self, speed: str = None) -> None:
"""Turn on the fan."""
await self._on_script.async_run({ATTR_SPEED: speed}, context=self._context)
self._state = STATE_ON
if speed is not None:
await self.async_set_speed(speed)
# pylint: disable=arguments-differ
async def async_turn_off(self) -> None:
"""Turn off the fan."""
await self._off_script.async_run(context=self._context)
self._state = STATE_OFF
async def async_set_speed(self, speed: str) -> None:
"""Set the speed of the fan."""
if self._set_speed_script is None:
return
if speed in self._speed_list:
self._speed = speed
await self._set_speed_script.async_run(
{ATTR_SPEED: speed}, context=self._context
)
else:
_LOGGER.error(
"Received invalid speed: %s. Expected: %s", speed, self._speed_list
)
async def async_oscillate(self, oscillating: bool) -> None:
"""Set oscillation of the fan."""
if self._set_oscillating_script is None:
return
if oscillating in _VALID_OSC:
self._oscillating = oscillating
await self._set_oscillating_script.async_run(
{ATTR_OSCILLATING: oscillating}, context=self._context
)
else:
_LOGGER.error(
"Received invalid oscillating value: %s. Expected: %s",
oscillating,
", ".join(_VALID_OSC),
)
async def async_set_direction(self, direction: str) -> None:
"""Set the direction of the fan."""
if self._set_direction_script is None:
return
if direction in _VALID_DIRECTIONS:
self._direction = direction
await self._set_direction_script.async_run(
{ATTR_DIRECTION: direction}, context=self._context
)
else:
_LOGGER.error(
"Received invalid direction: %s. Expected: %s",
direction,
", ".join(_VALID_DIRECTIONS),
)
@callback
def _update_state(self, result):
super()._update_state(result)
if isinstance(result, TemplateError):
self._state = None
return
# Validate state
if result in _VALID_STATES:
self._state = result
elif result in [STATE_UNAVAILABLE, STATE_UNKNOWN]:
self._state = None
else:
_LOGGER.error(
"Received invalid fan is_on state: %s. Expected: %s",
result,
", ".join(_VALID_STATES),
)
self._state = None
async def async_added_to_hass(self):
"""Register callbacks."""
self.add_template_attribute("_state", self._template, None, self._update_state)
if self._speed_template is not None:
self.add_template_attribute(
"_speed",
self._speed_template,
None,
self._update_speed,
none_on_template_error=True,
)
if self._oscillating_template is not None:
self.add_template_attribute(
"_oscillating",
self._oscillating_template,
None,
self._update_oscillating,
none_on_template_error=True,
)
if self._direction_template is not None:
self.add_template_attribute(
"_direction",
self._direction_template,
None,
self._update_direction,
none_on_template_error=True,
)
await super().async_added_to_hass()
@callback
def _update_speed(self, speed):
# Validate speed
speed = str(speed)
if speed in self._speed_list:
self._speed = speed
elif speed in [STATE_UNAVAILABLE, STATE_UNKNOWN]:
self._speed = None
else:
_LOGGER.error(
"Received invalid speed: %s. Expected: %s", speed, self._speed_list
)
self._speed = None
@callback
def _update_oscillating(self, oscillating):
# Validate osc
if oscillating == "True" or oscillating is True:
self._oscillating = True
elif oscillating == "False" or oscillating is False:
self._oscillating = False
elif oscillating in [STATE_UNAVAILABLE, STATE_UNKNOWN]:
self._oscillating = None
else:
_LOGGER.error(
"Received invalid oscillating: %s. Expected: True/False",
oscillating,
)
self._oscillating = None
@callback
def _update_direction(self, direction):
# Validate direction
if direction in _VALID_DIRECTIONS:
self._direction = direction
elif direction in [STATE_UNAVAILABLE, STATE_UNKNOWN]:
self._direction = None
else:
_LOGGER.error(
"Received invalid direction: %s. Expected: %s",
direction,
", ".join(_VALID_DIRECTIONS),
)
self._direction = None
|
import asyncio
import json
import logging
from openzwavemqtt import OZWManager, OZWOptions
from openzwavemqtt.const import (
EVENT_INSTANCE_EVENT,
EVENT_NODE_ADDED,
EVENT_NODE_CHANGED,
EVENT_NODE_REMOVED,
EVENT_VALUE_ADDED,
EVENT_VALUE_CHANGED,
EVENT_VALUE_REMOVED,
CommandClass,
ValueType,
)
from openzwavemqtt.models.node import OZWNode
from openzwavemqtt.models.value import OZWValue
import voluptuous as vol
from homeassistant.components import mqtt
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.device_registry import async_get_registry as get_dev_reg
from homeassistant.helpers.dispatcher import async_dispatcher_send
from . import const
from .const import (
DATA_UNSUBSCRIBE,
DOMAIN,
MANAGER,
OPTIONS,
PLATFORMS,
TOPIC_OPENZWAVE,
)
from .discovery import DISCOVERY_SCHEMAS, check_node_schema, check_value_schema
from .entity import (
ZWaveDeviceEntityValues,
create_device_id,
create_device_name,
create_value_id,
)
from .services import ZWaveServices
from .websocket_api import async_register_api
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = vol.Schema({DOMAIN: vol.Schema({})}, extra=vol.ALLOW_EXTRA)
DATA_DEVICES = "zwave-mqtt-devices"
async def async_setup(hass: HomeAssistant, config: dict):
"""Initialize basic config of ozw component."""
if "mqtt" not in hass.config.components:
_LOGGER.error("MQTT integration is not set up")
return False
hass.data[DOMAIN] = {}
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Set up ozw from a config entry."""
ozw_data = hass.data[DOMAIN][entry.entry_id] = {}
ozw_data[DATA_UNSUBSCRIBE] = []
data_nodes = {}
data_values = {}
removed_nodes = []
@callback
def send_message(topic, payload):
mqtt.async_publish(hass, topic, json.dumps(payload))
options = OZWOptions(send_message=send_message, topic_prefix=f"{TOPIC_OPENZWAVE}/")
manager = OZWManager(options)
hass.data[DOMAIN][MANAGER] = manager
hass.data[DOMAIN][OPTIONS] = options
@callback
def async_node_added(node):
# Caution: This is also called on (re)start.
_LOGGER.debug("[NODE ADDED] node_id: %s", node.id)
data_nodes[node.id] = node
if node.id not in data_values:
data_values[node.id] = []
@callback
def async_node_changed(node):
_LOGGER.debug("[NODE CHANGED] node_id: %s", node.id)
data_nodes[node.id] = node
# notify devices about the node change
if node.id not in removed_nodes:
hass.async_create_task(async_handle_node_update(hass, node))
@callback
def async_node_removed(node):
_LOGGER.debug("[NODE REMOVED] node_id: %s", node.id)
data_nodes.pop(node.id)
# node added/removed events also happen on (re)starts of hass/mqtt/ozw
# cleanup device/entity registry if we know this node is permanently deleted
# entities itself are removed by the values logic
if node.id in removed_nodes:
hass.async_create_task(async_handle_remove_node(hass, node))
removed_nodes.remove(node.id)
@callback
def async_instance_event(message):
event = message["event"]
event_data = message["data"]
_LOGGER.debug("[INSTANCE EVENT]: %s - data: %s", event, event_data)
# The actual removal action of a Z-Wave node is reported as instance event
# Only when this event is detected we cleanup the device and entities from hass
# Note: Find a more elegant way of doing this, e.g. a notification of this event from OZW
if event in ["removenode", "removefailednode"] and "Node" in event_data:
removed_nodes.append(event_data["Node"])
@callback
def async_value_added(value):
node = value.node
# Clean up node.node_id and node.id use. They are the same.
node_id = value.node.node_id
# Filter out CommandClasses we're definitely not interested in.
if value.command_class in [
CommandClass.MANUFACTURER_SPECIFIC,
]:
return
_LOGGER.debug(
"[VALUE ADDED] node_id: %s - label: %s - value: %s - value_id: %s - CC: %s",
value.node.id,
value.label,
value.value,
value.value_id_key,
value.command_class,
)
node_data_values = data_values[node_id]
# Check if this value should be tracked by an existing entity
value_unique_id = create_value_id(value)
for values in node_data_values:
values.async_check_value(value)
if values.values_id == value_unique_id:
return # this value already has an entity
# Run discovery on it and see if any entities need created
for schema in DISCOVERY_SCHEMAS:
if not check_node_schema(node, schema):
continue
if not check_value_schema(
value, schema[const.DISC_VALUES][const.DISC_PRIMARY]
):
continue
values = ZWaveDeviceEntityValues(hass, options, schema, value)
values.async_setup()
# This is legacy and can be cleaned up since we are in the main thread:
# We create a new list and update the reference here so that
# the list can be safely iterated over in the main thread
data_values[node_id] = node_data_values + [values]
@callback
def async_value_changed(value):
# if an entity belonging to this value needs updating,
# it's handled within the entity logic
_LOGGER.debug(
"[VALUE CHANGED] node_id: %s - label: %s - value: %s - value_id: %s - CC: %s",
value.node.id,
value.label,
value.value,
value.value_id_key,
value.command_class,
)
# Handle a scene activation message
if value.command_class in [
CommandClass.SCENE_ACTIVATION,
CommandClass.CENTRAL_SCENE,
]:
async_handle_scene_activated(hass, value)
return
@callback
def async_value_removed(value):
_LOGGER.debug(
"[VALUE REMOVED] node_id: %s - label: %s - value: %s - value_id: %s - CC: %s",
value.node.id,
value.label,
value.value,
value.value_id_key,
value.command_class,
)
# signal all entities using this value for removal
value_unique_id = create_value_id(value)
async_dispatcher_send(hass, const.SIGNAL_DELETE_ENTITY, value_unique_id)
# remove value from our local list
node_data_values = data_values[value.node.id]
node_data_values[:] = [
item for item in node_data_values if item.values_id != value_unique_id
]
# Listen to events for node and value changes
for event, event_callback in (
(EVENT_NODE_ADDED, async_node_added),
(EVENT_NODE_CHANGED, async_node_changed),
(EVENT_NODE_REMOVED, async_node_removed),
(EVENT_VALUE_ADDED, async_value_added),
(EVENT_VALUE_CHANGED, async_value_changed),
(EVENT_VALUE_REMOVED, async_value_removed),
(EVENT_INSTANCE_EVENT, async_instance_event),
):
ozw_data[DATA_UNSUBSCRIBE].append(options.listen(event, event_callback))
# Register Services
services = ZWaveServices(hass, manager)
services.async_register()
# Register WebSocket API
async_register_api(hass)
@callback
def async_receive_message(msg):
manager.receive_message(msg.topic, msg.payload)
async def start_platforms():
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_setup(entry, component)
for component in PLATFORMS
]
)
ozw_data[DATA_UNSUBSCRIBE].append(
await mqtt.async_subscribe(
hass, f"{TOPIC_OPENZWAVE}/#", async_receive_message
)
)
hass.async_create_task(start_platforms())
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
# cleanup platforms
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
if not unload_ok:
return False
# unsubscribe all listeners
for unsubscribe_listener in hass.data[DOMAIN][entry.entry_id][DATA_UNSUBSCRIBE]:
unsubscribe_listener()
hass.data[DOMAIN].pop(entry.entry_id)
return True
async def async_handle_remove_node(hass: HomeAssistant, node: OZWNode):
"""Handle the removal of a Z-Wave node, removing all traces in device/entity registry."""
dev_registry = await get_dev_reg(hass)
# grab device in device registry attached to this node
dev_id = create_device_id(node)
device = dev_registry.async_get_device({(DOMAIN, dev_id)}, set())
if not device:
return
devices_to_remove = [device.id]
# also grab slave devices (node instances)
for item in dev_registry.devices.values():
if item.via_device_id == device.id:
devices_to_remove.append(item.id)
# remove all devices in registry related to this node
# note: removal of entity registry is handled by core
for dev_id in devices_to_remove:
dev_registry.async_remove_device(dev_id)
async def async_handle_node_update(hass: HomeAssistant, node: OZWNode):
"""
Handle a node updated event from OZW.
Meaning some of the basic info like name/model is updated.
We want these changes to be pushed to the device registry.
"""
dev_registry = await get_dev_reg(hass)
# grab device in device registry attached to this node
dev_id = create_device_id(node)
device = dev_registry.async_get_device({(DOMAIN, dev_id)}, set())
if not device:
return
# update device in device registry with (updated) info
for item in dev_registry.devices.values():
if item.id != device.id and item.via_device_id != device.id:
continue
dev_name = create_device_name(node)
dev_registry.async_update_device(
item.id,
manufacturer=node.node_manufacturer_name,
model=node.node_product_name,
name=dev_name,
)
@callback
def async_handle_scene_activated(hass: HomeAssistant, scene_value: OZWValue):
"""Handle a (central) scene activation message."""
node_id = scene_value.node.id
scene_id = scene_value.index
scene_label = scene_value.label
if scene_value.command_class == CommandClass.SCENE_ACTIVATION:
# legacy/network scene
scene_value_id = scene_value.value
scene_value_label = scene_value.label
else:
# central scene command
if scene_value.type != ValueType.LIST:
return
scene_value_label = scene_value.value["Selected"]
scene_value_id = scene_value.value["Selected_id"]
_LOGGER.debug(
"[SCENE_ACTIVATED] node_id: %s - scene_id: %s - scene_value_id: %s",
node_id,
scene_id,
scene_value_id,
)
# Simply forward it to the hass event bus
hass.bus.async_fire(
const.EVENT_SCENE_ACTIVATED,
{
const.ATTR_NODE_ID: node_id,
const.ATTR_SCENE_ID: scene_id,
const.ATTR_SCENE_LABEL: scene_label,
const.ATTR_SCENE_VALUE_ID: scene_value_id,
const.ATTR_SCENE_VALUE_LABEL: scene_value_label,
},
)
|
import io
import os
import re
import sys
__LXML_VERSION = None
def version():
global __LXML_VERSION
if __LXML_VERSION is None:
with open(os.path.join(get_base_dir(), 'src', 'lxml', '__init__.py')) as f:
__LXML_VERSION = re.search(r'__version__\s*=\s*"([^"]+)"', f.read(250)).group(1)
assert __LXML_VERSION
return __LXML_VERSION
def branch_version():
return version()[:3]
def is_pre_release():
version_string = version()
return "a" in version_string or "b" in version_string
def dev_status():
_version = version()
if 'a' in _version:
return 'Development Status :: 3 - Alpha'
elif 'b' in _version or 'c' in _version:
return 'Development Status :: 4 - Beta'
else:
return 'Development Status :: 5 - Production/Stable'
def changes():
"""Extract part of changelog pertaining to version.
"""
_version = version()
with io.open(os.path.join(get_base_dir(), "CHANGES.txt"), 'r', encoding='utf8') as f:
lines = []
for line in f:
if line.startswith('====='):
if len(lines) > 1:
break
if lines:
lines.append(line)
elif line.startswith(_version):
lines.append(line)
return ''.join(lines[:-1])
def create_version_h():
"""Create lxml-version.h
"""
lxml_version = version()
# make sure we have a triple part version number
parts = lxml_version.split('-')
while parts[0].count('.') < 2:
parts[0] += '.0'
lxml_version = '-'.join(parts).replace('a', '.alpha').replace('b', '.beta')
file_path = os.path.join(get_base_dir(), 'src', 'lxml', 'includes', 'lxml-version.h')
# Avoid changing file timestamp if content didn't change.
if os.path.isfile(file_path):
with open(file_path, 'r') as version_h:
if ('"%s"' % lxml_version) in version_h.read(100):
return
with open(file_path, 'w') as version_h:
version_h.write('''\
#ifndef LXML_VERSION_STRING
#define LXML_VERSION_STRING "%s"
#endif
''' % lxml_version)
def get_base_dir():
return os.path.abspath(os.path.dirname(sys.argv[0]))
|
import pytest
from redbot.pytest.mod import *
@pytest.mark.asyncio
async def test_modlog_register_casetype(mod):
ct = {"name": "ban", "default_setting": True, "image": ":hammer:", "case_str": "Ban"}
casetype = await mod.register_casetype(**ct)
assert casetype is not None
@pytest.mark.asyncio
async def test_modlog_case_create(mod, ctx, member_factory):
from datetime import datetime, timezone
# Run casetype register test to register casetype in this test too
await test_modlog_register_casetype(mod)
usr = member_factory.get()
guild = ctx.guild
bot = ctx.bot
case_type = "ban"
moderator = ctx.author
reason = "Test 12345"
created_at = datetime.now(timezone.utc)
case = await mod.create_case(bot, guild, created_at, case_type, usr, moderator, reason)
assert case is not None
assert case.user == usr
assert case.action_type == case_type
assert case.moderator == moderator
assert case.reason == reason
assert case.created_at == int(created_at.timestamp())
@pytest.mark.asyncio
async def test_modlog_set_modlog_channel(mod, ctx):
await mod.set_modlog_channel(ctx.guild, ctx.channel)
assert await mod.get_modlog_channel(ctx.guild) == ctx.channel.id
|
import voluptuous as vol
from homeassistant.components.binary_sensor import BinarySensorEntity
from . import DOMAIN, PLATFORM_SCHEMA, XBeeDigitalIn, XBeeDigitalInConfig
CONF_ON_STATE = "on_state"
DEFAULT_ON_STATE = "high"
STATES = ["high", "low"]
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({vol.Optional(CONF_ON_STATE): vol.In(STATES)})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the XBee Zigbee binary sensor platform."""
zigbee_device = hass.data[DOMAIN]
add_entities([XBeeBinarySensor(XBeeDigitalInConfig(config), zigbee_device)], True)
class XBeeBinarySensor(XBeeDigitalIn, BinarySensorEntity):
"""Use XBeeDigitalIn as binary sensor."""
|
import os
from typing import Dict
from django import template
from django.conf import settings
from django.utils.safestring import mark_safe
from weblate.utils.errors import report_error
register = template.Library()
CACHE: Dict[str, str] = {}
SPIN = '<span class="icon-spin" {} {}>{}</span>'
@register.simple_tag()
def icon(name):
"""Inlines SVG icon.
Inlining is necessary to be able to apply CSS styles on the path.
"""
if not name:
raise ValueError("Empty icon name")
if name not in CACHE:
icon_file = os.path.join(settings.STATIC_ROOT, "icons", name)
try:
with open(icon_file) as handle:
CACHE[name] = mark_safe(handle.read())
except OSError:
report_error(cause="Failed to load icon")
return ""
return CACHE[name]
@register.simple_tag()
def loading_icon(name=None, hidden=True):
return mark_safe(
SPIN.format(
f'id="loading-{name}"' if name else "",
'style="display: none"' if hidden else "",
icon("loading.svg"),
)
)
|
import posixpath
import urllib2
try:
import json
except ImportError:
import simplejson as json
import diamond.collector
class SolrCollector(diamond.collector.Collector):
def __init__(self, *args, **kwargs):
super(SolrCollector, self).__init__(*args, ** kwargs)
self.config['host'] = self.config['host'].rstrip('/')
def get_default_config_help(self):
config_help = super(SolrCollector, self).get_default_config_help()
config_help.update({
'host': "",
'port': "",
'core': "Which core info should collect (default: all cores)",
'stats': "Available stats: \n"
" - core (Core stats)\n"
" - response (Ping response stats)\n"
" - query (Query Handler stats)\n"
" - update (Update Handler stats)\n"
" - cache (fieldValue, filter,"
" document & queryResult cache stats)\n"
" - jvm (JVM information) \n"
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(SolrCollector, self).get_default_config()
config.update({
'host': 'localhost',
'port': 8983,
'path': 'solr',
'core': None,
'stats': ['jvm', 'core', 'response',
'query', 'update', 'cache'],
})
return config
def _try_convert(self, value):
if isinstance(value, (int, float)):
return value
try:
if '.' in value:
return float(value)
return int(value)
except ValueError:
return value
def _get(self, path):
path = path.lstrip('/')
url = 'http://%s:%i/%s' % (
self.config['host'], int(self.config['port']), path)
try:
response = urllib2.urlopen(url)
except Exception as err:
self.log.error("%s: %s", url, err)
return False
try:
return json.load(response)
except (TypeError, ValueError):
self.log.error("Unable to parse response from solr as a"
" json object")
return False
def collect(self):
if json is None:
self.log.error('Unable to import json')
return {}
cores = []
if self.config['core']:
cores = [self.config['core']]
else:
# If no core is specified, provide statistics for all cores
result = self._get('/solr/admin/cores?action=STATUS&wt=json')
if result:
cores = result['status'].keys()
metrics = {}
for core in cores:
if core:
path = "{}.".format(core)
else:
path = ""
ping_url = posixpath.normpath(
"/solr/{}/admin/ping?wt=json".format(core))
if 'response' in self.config['stats']:
result = self._get(ping_url)
if not result:
continue
metrics.update({
"{}response.QueryTime".format(path):
result["responseHeader"]["QTime"],
"{}response.Status".format(path):
result["responseHeader"]["status"],
})
stats_url = posixpath.normpath(
"/solr/{}/admin/mbeans?stats=true&wt=json".format(core))
result = self._get(stats_url)
if not result:
continue
s = result['solr-mbeans']
stats = dict((s[i], s[i + 1]) for i in xrange(0, len(s), 2))
if 'core' in self.config['stats']:
core_searcher = stats["CORE"]["searcher"]["stats"]
metrics.update([
("{}core.{}".format(path, key),
core_searcher[key])
for key in ("maxDoc", "numDocs", "warmupTime")
])
if 'query' in self.config['stats']:
standard = stats["QUERYHANDLER"]["standard"]["stats"]
update = stats["QUERYHANDLER"]["/update"]["stats"]
metrics.update([
("{}queryhandler.standard.{}".format(path, key),
standard[key])
for key in ("requests", "errors", "timeouts", "totalTime",
"avgTimePerRequest", "avgRequestsPerSecond")
])
metrics.update([
("{}queryhandler.update.{}".format(path, key),
update[key])
for key in ("requests", "errors", "timeouts", "totalTime",
"avgTimePerRequest", "avgRequestsPerSecond")
if update[key] != 'NaN'
])
if 'update' in self.config['stats']:
updatehandler = \
stats["UPDATEHANDLER"]["updateHandler"]["stats"]
metrics.update([
("{}updatehandler.{}".format(path, key),
updatehandler[key])
for key in (
"commits", "autocommits", "optimizes",
"rollbacks", "docsPending", "adds", "errors",
"cumulative_adds", "cumulative_errors")
])
if 'cache' in self.config['stats']:
cache = stats["CACHE"]
metrics.update([
("{}cache.{}.{}".format(path, cache_type, key),
self._try_convert(cache[cache_type]['stats'][key]))
for cache_type in (
'fieldValueCache', 'filterCache',
'documentCache', 'queryResultCache')
for key in (
'lookups', 'hits', 'hitratio', 'inserts',
'evictions', 'size', 'warmupTime',
'cumulative_lookups', 'cumulative_hits',
'cumulative_hitratio', 'cumulative_inserts',
'cumulative_evictions')
])
if 'jvm' in self.config['stats']:
system_url = posixpath.normpath(
"/solr/{}/admin/system?stats=true&wt=json".format(core))
result = self._get(system_url)
if not result:
continue
mem = result['jvm']['memory']
metrics.update([
('{}jvm.mem.{}'.format(path, key),
self._try_convert(mem[key].split()[0]))
for key in ('free', 'total', 'max', 'used')
])
for key in metrics:
self.publish(key, metrics[key])
|
import os
import json
import time
import logging
import signal
import pytest
from PyQt5.QtCore import QFileSystemWatcher
from qutebrowser.commands import userscripts
from qutebrowser.utils import utils
@pytest.mark.posix
class TestQtFIFOReader:
@pytest.fixture
def reader(self, tmpdir, qapp):
fifo_path = str(tmpdir / 'fifo')
os.mkfifo(fifo_path) # pylint: disable=no-member,useless-suppression
reader = userscripts._QtFIFOReader(fifo_path)
yield reader
if reader._notifier.isEnabled():
reader.cleanup()
def test_single_line(self, reader, qtbot):
"""Test QSocketNotifier with a single line of data."""
with qtbot.waitSignal(reader.got_line) as blocker:
with open(reader._filepath, 'w', encoding='utf-8') as f:
f.write('foobar\n')
assert blocker.args == ['foobar']
def test_cleanup(self, reader):
assert not reader._fifo.closed
reader.cleanup()
assert reader._fifo.closed
@pytest.fixture(params=[
userscripts._POSIXUserscriptRunner,
userscripts._WindowsUserscriptRunner,
])
def runner(request, runtime_tmpdir):
if (not utils.is_posix and
request.param is userscripts._POSIXUserscriptRunner):
pytest.skip("Requires a POSIX os")
raise utils.Unreachable
return request.param()
def test_command(qtbot, py_proc, runner):
cmd, args = py_proc(r"""
import os
with open(os.environ['QUTE_FIFO'], 'w') as f:
f.write('foo\n')
""")
with qtbot.waitSignal(runner.finished, timeout=10000):
with qtbot.waitSignal(runner.got_cmd, timeout=10000) as blocker:
runner.prepare_run(cmd, *args)
runner.store_html('')
runner.store_text('')
assert blocker.args == ['foo']
def test_custom_env(qtbot, monkeypatch, py_proc, runner):
monkeypatch.setenv('QUTEBROWSER_TEST_1', '1')
env = {'QUTEBROWSER_TEST_2': '2'}
cmd, args = py_proc(r"""
import os
import json
env = dict(os.environ)
with open(os.environ['QUTE_FIFO'], 'w') as f:
json.dump(env, f)
f.write('\n')
""")
with qtbot.waitSignal(runner.finished, timeout=10000):
with qtbot.waitSignal(runner.got_cmd, timeout=10000) as blocker:
runner.prepare_run(cmd, *args, env=env)
runner.store_html('')
runner.store_text('')
data = blocker.args[0]
ret_env = json.loads(data)
assert 'QUTEBROWSER_TEST_1' in ret_env
assert 'QUTEBROWSER_TEST_2' in ret_env
def test_source(qtbot, py_proc, runner):
"""Make sure the page source is read and cleaned up correctly."""
cmd, args = py_proc(r"""
import os
import json
data = {
'html_file': os.environ['QUTE_HTML'],
'text_file': os.environ['QUTE_TEXT'],
}
with open(os.environ['QUTE_HTML'], 'r') as f:
data['html'] = f.read()
with open(os.environ['QUTE_TEXT'], 'r') as f:
data['text'] = f.read()
with open(os.environ['QUTE_FIFO'], 'w') as f:
json.dump(data, f)
f.write('\n')
""")
with qtbot.waitSignal(runner.finished, timeout=10000):
with qtbot.waitSignal(runner.got_cmd, timeout=10000) as blocker:
runner.prepare_run(cmd, *args)
runner.store_html('This is HTML')
runner.store_text('This is text')
data = blocker.args[0]
parsed = json.loads(data)
assert parsed['text'] == 'This is text'
assert parsed['html'] == 'This is HTML'
assert not os.path.exists(parsed['text_file'])
assert not os.path.exists(parsed['html_file'])
def test_command_with_error(qtbot, py_proc, runner, caplog):
cmd, args = py_proc(r"""
import sys, os, json
with open(os.environ['QUTE_FIFO'], 'w') as f:
json.dump(os.environ['QUTE_TEXT'], f)
f.write('\n')
sys.exit(1)
""")
with caplog.at_level(logging.ERROR):
with qtbot.waitSignal(runner.finished, timeout=10000):
with qtbot.waitSignal(runner.got_cmd, timeout=10000) as blocker:
runner.prepare_run(cmd, *args)
runner.store_text('Hello World')
runner.store_html('')
data = json.loads(blocker.args[0])
assert not os.path.exists(data)
def test_killed_command(qtbot, tmpdir, py_proc, runner, caplog):
data_file = tmpdir / 'data'
watcher = QFileSystemWatcher()
watcher.addPath(str(tmpdir))
cmd, args = py_proc(r"""
import os
import time
import sys
import json
data = {
'pid': os.getpid(),
'text_file': os.environ['QUTE_TEXT'],
}
# We can't use QUTE_FIFO to transmit the PID because that wouldn't work
# on Windows, where QUTE_FIFO is only monitored after the script has
# exited.
with open(sys.argv[1], 'w') as f:
json.dump(data, f)
time.sleep(30)
""")
args.append(str(data_file))
with qtbot.waitSignal(watcher.directoryChanged, timeout=10000):
runner.prepare_run(cmd, *args)
runner.store_text('Hello World')
runner.store_html('')
# Make sure the PID was written to the file, not just the file created
time.sleep(0.5)
data = json.load(data_file)
with caplog.at_level(logging.ERROR):
with qtbot.waitSignal(runner.finished):
os.kill(int(data['pid']), signal.SIGTERM)
assert not os.path.exists(data['text_file'])
def test_temporary_files_failed_cleanup(caplog, qtbot, py_proc, runner):
"""Delete a temporary file from the script so cleanup fails."""
cmd, args = py_proc(r"""
import os
os.remove(os.environ['QUTE_HTML'])
""")
with caplog.at_level(logging.ERROR):
with qtbot.waitSignal(runner.finished, timeout=10000):
runner.prepare_run(cmd, *args)
runner.store_text('')
runner.store_html('')
assert len(caplog.records) == 1
expected = "Failed to delete tempfile"
assert caplog.messages[0].startswith(expected)
def test_unicode_error(caplog, qtbot, py_proc, runner):
cmd, args = py_proc(r"""
import os
with open(os.environ['QUTE_FIFO'], 'wb') as f:
f.write(b'\x80')
""")
with caplog.at_level(logging.ERROR):
with qtbot.waitSignal(runner.finished, timeout=10000):
runner.prepare_run(cmd, *args)
runner.store_text('')
runner.store_html('')
assert len(caplog.records) == 1
expected = "Invalid unicode in userscript output: "
assert caplog.messages[0].startswith(expected)
@pytest.mark.fake_os('unknown')
def test_unsupported(tabbed_browser_stubs):
with pytest.raises(userscripts.UnsupportedError, match="Userscripts are "
"not supported on this platform!"):
userscripts.run_async(tab=None, cmd=None, win_id=0, env=None)
|
from __future__ import absolute_import
import unittest
from lxml import etree
from lxml.builder import E
from .common_imports import HelperTestCase, _bytes
class BuilderTestCase(HelperTestCase):
etree = etree
def test_build_from_xpath_result(self):
class StringSubclass(str): pass
wrapped = E.b(StringSubclass('Hello'))
self.assertEqual(_bytes('<b>Hello</b>'), etree.tostring(wrapped))
def test_unknown_type_raises(self):
class UnknownType(object):
pass
self.assertRaises(TypeError, E.b, UnknownType())
def test_cdata(self):
wrapped = E.b(etree.CDATA('Hello'))
self.assertEqual(_bytes('<b><![CDATA[Hello]]></b>'), etree.tostring(wrapped))
def test_cdata_solo(self):
self.assertRaises(ValueError, E.b, 'Hello', etree.CDATA('World'))
def test_suite():
suite = unittest.TestSuite()
suite.addTests([unittest.makeSuite(BuilderTestCase)])
return suite
if __name__ == '__main__':
print('to test use test.py %s' % __file__)
|
import re
import sys
import subprocess
import unittest.mock
import pytest
from qutebrowser.misc import checkpyver
TEXT = (r"At least Python 3.6 is required to run qutebrowser, but it's "
r"running with \d+\.\d+\.\d+.")
@pytest.mark.not_frozen
@pytest.mark.parametrize('python', ['python2', 'python3.5'])
def test_old_python(python):
"""Run checkpyver with old python versions."""
try:
proc = subprocess.run(
[python, checkpyver.__file__, '--no-err-windows'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
check=False)
except FileNotFoundError:
pytest.skip(f"{python} not found")
assert not proc.stdout
stderr = proc.stderr.decode('utf-8').rstrip()
assert re.fullmatch(TEXT, stderr), stderr
assert proc.returncode == 1
def test_normal(capfd):
checkpyver.check_python_version()
out, err = capfd.readouterr()
assert not out
assert not err
def test_patched_no_errwindow(capfd, monkeypatch):
"""Test with a patched sys.hexversion and --no-err-windows."""
monkeypatch.setattr(checkpyver.sys, 'argv',
[sys.argv[0], '--no-err-windows'])
monkeypatch.setattr(checkpyver.sys, 'hexversion', 0x03040000)
monkeypatch.setattr(checkpyver.sys, 'exit', lambda status: None)
checkpyver.check_python_version()
stdout, stderr = capfd.readouterr()
stderr = stderr.rstrip()
assert not stdout
assert re.fullmatch(TEXT, stderr), stderr
def test_patched_errwindow(capfd, mocker, monkeypatch):
"""Test with a patched sys.hexversion and a fake Tk."""
monkeypatch.setattr(checkpyver.sys, 'hexversion', 0x03040000)
monkeypatch.setattr(checkpyver.sys, 'exit', lambda status: None)
try:
import tkinter # pylint: disable=unused-import
except ImportError:
tk_mock = mocker.patch('qutebrowser.misc.checkpyver.Tk',
spec=['withdraw'], new_callable=mocker.Mock)
msgbox_mock = mocker.patch('qutebrowser.misc.checkpyver.messagebox',
spec=['showerror'])
else:
tk_mock = mocker.patch('qutebrowser.misc.checkpyver.Tk', autospec=True)
msgbox_mock = mocker.patch('qutebrowser.misc.checkpyver.messagebox',
autospec=True)
checkpyver.check_python_version()
stdout, stderr = capfd.readouterr()
assert not stdout
assert not stderr
tk_mock.assert_called_with()
tk_mock().withdraw.assert_called_with()
msgbox_mock.showerror.assert_called_with("qutebrowser: Fatal error!",
unittest.mock.ANY)
|
import unittest
import mock
from kazoo.client import KazooState
from paasta_tools.deployd.leader import PaastaLeaderElection
class TestPaastaLeaderElection(unittest.TestCase):
def setUp(self):
with mock.patch(
"paasta_tools.deployd.leader.Election.__init__", autospec=False
):
self.mock_client = mock.Mock()
self.mock_control = mock.Mock()
self.election = PaastaLeaderElection(
self.mock_client, control=self.mock_control
)
def test_init(self):
assert not self.election.waiting_for_reconnect
def test_log(self):
self.election.log.info("THING")
def test_run(self):
mock_fun = mock.Mock()
mock_arg = mock.Mock()
with mock.patch(
"paasta_tools.deployd.leader.Election.run", autospec=True
) as mock_kazoo_election:
self.election.run(mock_fun, mock_arg)
mock_kazoo_election.assert_called_with(self.election, mock_fun, mock_arg)
def test_connection_listener(self):
with mock.patch(
"paasta_tools.deployd.leader.PaastaThread", autospec=True
) as mock_paasta_thread:
self.election.connection_listener(KazooState.CONNECTED)
self.election.connection_listener(KazooState.SUSPENDED)
mock_paasta_thread.assert_called_with(
target=self.election.reconnection_listener
)
assert self.election.waiting_for_reconnect
self.election.connection_listener(KazooState.LOST)
self.mock_control.put.assert_called_with("ABORT")
def test_reconnection_listener(self):
self.mock_client.state = KazooState.CONNECTED
self.election.reconnection_listener()
assert not self.election.waiting_for_reconnect
assert not self.mock_control.put.called
self.mock_client.state = KazooState.SUSPENDED
self.election.waiting_for_reconnect = True
with mock.patch("time.sleep", autospec=True):
self.election.reconnection_listener()
assert self.election.waiting_for_reconnect
self.mock_control.put.assert_called_with("ABORT")
|
import asyncio
import logging
from aiohttp import web
from aiohttp.web_exceptions import HTTPServiceUnavailable
from homeassistant.components.http import HomeAssistantView
from homeassistant.const import EVENT_HOMEASSISTANT_START
from homeassistant.core import callback
from .const import (
ATTR_ADDON,
ATTR_CONFIG,
ATTR_DISCOVERY,
ATTR_NAME,
ATTR_SERVICE,
ATTR_UUID,
)
from .handler import HassioAPIError
_LOGGER = logging.getLogger(__name__)
@callback
def async_setup_discovery_view(hass: HomeAssistantView, hassio):
"""Discovery setup."""
hassio_discovery = HassIODiscovery(hass, hassio)
hass.http.register_view(hassio_discovery)
# Handle exists discovery messages
async def _async_discovery_start_handler(event):
"""Process all exists discovery on startup."""
try:
data = await hassio.retrieve_discovery_messages()
except HassioAPIError as err:
_LOGGER.error("Can't read discover info: %s", err)
return
jobs = [
hassio_discovery.async_process_new(discovery)
for discovery in data[ATTR_DISCOVERY]
]
if jobs:
await asyncio.wait(jobs)
hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_START, _async_discovery_start_handler
)
class HassIODiscovery(HomeAssistantView):
"""Hass.io view to handle base part."""
name = "api:hassio_push:discovery"
url = "/api/hassio_push/discovery/{uuid}"
def __init__(self, hass: HomeAssistantView, hassio):
"""Initialize WebView."""
self.hass = hass
self.hassio = hassio
async def post(self, request, uuid):
"""Handle new discovery requests."""
# Fetch discovery data and prevent injections
try:
data = await self.hassio.get_discovery_message(uuid)
except HassioAPIError as err:
_LOGGER.error("Can't read discovery data: %s", err)
raise HTTPServiceUnavailable() from None
await self.async_process_new(data)
return web.Response()
async def delete(self, request, uuid):
"""Handle remove discovery requests."""
data = await request.json()
await self.async_process_del(data)
return web.Response()
async def async_process_new(self, data):
"""Process add discovery entry."""
service = data[ATTR_SERVICE]
config_data = data[ATTR_CONFIG]
# Read additional Add-on info
try:
addon_info = await self.hassio.get_addon_info(data[ATTR_ADDON])
except HassioAPIError as err:
_LOGGER.error("Can't read add-on info: %s", err)
return
config_data[ATTR_ADDON] = addon_info[ATTR_NAME]
# Use config flow
await self.hass.config_entries.flow.async_init(
service, context={"source": "hassio"}, data=config_data
)
async def async_process_del(self, data):
"""Process remove discovery entry."""
service = data[ATTR_SERVICE]
uuid = data[ATTR_UUID]
# Check if really deletet / prevent injections
try:
data = await self.hassio.get_discovery_message(uuid)
except HassioAPIError:
pass
else:
_LOGGER.warning("Retrieve wrong unload for %s", service)
return
# Use config flow
for entry in self.hass.config_entries.async_entries(service):
if entry.source != "hassio":
continue
await self.hass.config_entries.async_remove(entry)
|
from simplepush import send, send_encrypted
import voluptuous as vol
from homeassistant.components.notify import (
ATTR_TITLE,
ATTR_TITLE_DEFAULT,
PLATFORM_SCHEMA,
BaseNotificationService,
)
from homeassistant.const import CONF_PASSWORD
import homeassistant.helpers.config_validation as cv
ATTR_ENCRYPTED = "encrypted"
CONF_DEVICE_KEY = "device_key"
CONF_EVENT = "event"
CONF_SALT = "salt"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_DEVICE_KEY): cv.string,
vol.Optional(CONF_EVENT): cv.string,
vol.Inclusive(CONF_PASSWORD, ATTR_ENCRYPTED): cv.string,
vol.Inclusive(CONF_SALT, ATTR_ENCRYPTED): cv.string,
}
)
def get_service(hass, config, discovery_info=None):
"""Get the Simplepush notification service."""
return SimplePushNotificationService(config)
class SimplePushNotificationService(BaseNotificationService):
"""Implementation of the notification service for Simplepush."""
def __init__(self, config):
"""Initialize the Simplepush notification service."""
self._device_key = config.get(CONF_DEVICE_KEY)
self._event = config.get(CONF_EVENT)
self._password = config.get(CONF_PASSWORD)
self._salt = config.get(CONF_SALT)
def send_message(self, message="", **kwargs):
"""Send a message to a Simplepush user."""
title = kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT)
if self._password:
send_encrypted(
self._device_key,
self._password,
self._salt,
title,
message,
event=self._event,
)
else:
send(self._device_key, title, message, event=self._event)
|
from homeassistant.components import geo_location
from homeassistant.components.geo_json_events.geo_location import (
ATTR_EXTERNAL_ID,
SCAN_INTERVAL,
)
from homeassistant.components.geo_location import ATTR_SOURCE
from homeassistant.const import (
ATTR_FRIENDLY_NAME,
ATTR_LATITUDE,
ATTR_LONGITUDE,
ATTR_UNIT_OF_MEASUREMENT,
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_RADIUS,
CONF_URL,
EVENT_HOMEASSISTANT_START,
LENGTH_KILOMETERS,
)
from homeassistant.helpers.dispatcher import DATA_DISPATCHER
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from tests.async_mock import MagicMock, call, patch
from tests.common import assert_setup_component, async_fire_time_changed
URL = "http://geo.json.local/geo_json_events.json"
CONFIG = {
geo_location.DOMAIN: [
{"platform": "geo_json_events", CONF_URL: URL, CONF_RADIUS: 200}
]
}
CONFIG_WITH_CUSTOM_LOCATION = {
geo_location.DOMAIN: [
{
"platform": "geo_json_events",
CONF_URL: URL,
CONF_RADIUS: 200,
CONF_LATITUDE: 15.1,
CONF_LONGITUDE: 25.2,
}
]
}
def _generate_mock_feed_entry(external_id, title, distance_to_home, coordinates):
"""Construct a mock feed entry for testing purposes."""
feed_entry = MagicMock()
feed_entry.external_id = external_id
feed_entry.title = title
feed_entry.distance_to_home = distance_to_home
feed_entry.coordinates = coordinates
return feed_entry
async def test_setup(hass, legacy_patchable_time):
"""Test the general setup of the platform."""
# Set up some mock feed entries for this test.
mock_entry_1 = _generate_mock_feed_entry("1234", "Title 1", 15.5, (-31.0, 150.0))
mock_entry_2 = _generate_mock_feed_entry("2345", "Title 2", 20.5, (-31.1, 150.1))
mock_entry_3 = _generate_mock_feed_entry("3456", "Title 3", 25.5, (-31.2, 150.2))
mock_entry_4 = _generate_mock_feed_entry("4567", "Title 4", 12.5, (-31.3, 150.3))
# Patching 'utcnow' to gain more control over the timed update.
utcnow = dt_util.utcnow()
with patch("homeassistant.util.dt.utcnow", return_value=utcnow), patch(
"geojson_client.generic_feed.GenericFeed"
) as mock_feed:
mock_feed.return_value.update.return_value = (
"OK",
[mock_entry_1, mock_entry_2, mock_entry_3],
)
with assert_setup_component(1, geo_location.DOMAIN):
assert await async_setup_component(hass, geo_location.DOMAIN, CONFIG)
await hass.async_block_till_done()
# Artificially trigger update.
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
# Collect events.
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 3
state = hass.states.get("geo_location.title_1")
assert state is not None
assert state.name == "Title 1"
assert state.attributes == {
ATTR_EXTERNAL_ID: "1234",
ATTR_LATITUDE: -31.0,
ATTR_LONGITUDE: 150.0,
ATTR_FRIENDLY_NAME: "Title 1",
ATTR_UNIT_OF_MEASUREMENT: LENGTH_KILOMETERS,
ATTR_SOURCE: "geo_json_events",
}
assert round(abs(float(state.state) - 15.5), 7) == 0
state = hass.states.get("geo_location.title_2")
assert state is not None
assert state.name == "Title 2"
assert state.attributes == {
ATTR_EXTERNAL_ID: "2345",
ATTR_LATITUDE: -31.1,
ATTR_LONGITUDE: 150.1,
ATTR_FRIENDLY_NAME: "Title 2",
ATTR_UNIT_OF_MEASUREMENT: LENGTH_KILOMETERS,
ATTR_SOURCE: "geo_json_events",
}
assert round(abs(float(state.state) - 20.5), 7) == 0
state = hass.states.get("geo_location.title_3")
assert state is not None
assert state.name == "Title 3"
assert state.attributes == {
ATTR_EXTERNAL_ID: "3456",
ATTR_LATITUDE: -31.2,
ATTR_LONGITUDE: 150.2,
ATTR_FRIENDLY_NAME: "Title 3",
ATTR_UNIT_OF_MEASUREMENT: LENGTH_KILOMETERS,
ATTR_SOURCE: "geo_json_events",
}
assert round(abs(float(state.state) - 25.5), 7) == 0
# Simulate an update - one existing, one new entry,
# one outdated entry
mock_feed.return_value.update.return_value = (
"OK",
[mock_entry_1, mock_entry_4, mock_entry_3],
)
async_fire_time_changed(hass, utcnow + SCAN_INTERVAL)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 3
# Simulate an update - empty data, but successful update,
# so no changes to entities.
mock_feed.return_value.update.return_value = "OK_NO_DATA", None
async_fire_time_changed(hass, utcnow + 2 * SCAN_INTERVAL)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 3
# Simulate an update - empty data, removes all entities
mock_feed.return_value.update.return_value = "ERROR", None
async_fire_time_changed(hass, utcnow + 3 * SCAN_INTERVAL)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 0
async def test_setup_with_custom_location(hass):
"""Test the setup with a custom location."""
# Set up some mock feed entries for this test.
mock_entry_1 = _generate_mock_feed_entry("1234", "Title 1", 2000.5, (-31.1, 150.1))
with patch("geojson_client.generic_feed.GenericFeed") as mock_feed:
mock_feed.return_value.update.return_value = "OK", [mock_entry_1]
with assert_setup_component(1, geo_location.DOMAIN):
assert await async_setup_component(
hass, geo_location.DOMAIN, CONFIG_WITH_CUSTOM_LOCATION
)
await hass.async_block_till_done()
# Artificially trigger update.
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
# Collect events.
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 1
assert mock_feed.call_args == call((15.1, 25.2), URL, filter_radius=200.0)
async def test_setup_race_condition(hass, legacy_patchable_time):
"""Test a particular race condition experienced."""
# 1. Feed returns 1 entry -> Feed manager creates 1 entity.
# 2. Feed returns error -> Feed manager removes 1 entity.
# However, this stayed on and kept listening for dispatcher signals.
# 3. Feed returns 1 entry -> Feed manager creates 1 entity.
# 4. Feed returns 1 entry -> Feed manager updates 1 entity.
# Internally, the previous entity is updating itself, too.
# 5. Feed returns error -> Feed manager removes 1 entity.
# There are now 2 entities trying to remove themselves from HA, but
# the second attempt fails of course.
# Set up some mock feed entries for this test.
mock_entry_1 = _generate_mock_feed_entry("1234", "Title 1", 15.5, (-31.0, 150.0))
delete_signal = "geo_json_events_delete_1234"
update_signal = "geo_json_events_update_1234"
# Patching 'utcnow' to gain more control over the timed update.
utcnow = dt_util.utcnow()
with patch("homeassistant.util.dt.utcnow", return_value=utcnow), patch(
"geojson_client.generic_feed.GenericFeed"
) as mock_feed:
with assert_setup_component(1, geo_location.DOMAIN):
assert await async_setup_component(hass, geo_location.DOMAIN, CONFIG)
await hass.async_block_till_done()
mock_feed.return_value.update.return_value = "OK", [mock_entry_1]
# Artificially trigger update.
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
# Collect events.
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 1
assert len(hass.data[DATA_DISPATCHER][delete_signal]) == 1
assert len(hass.data[DATA_DISPATCHER][update_signal]) == 1
# Simulate an update - empty data, removes all entities
mock_feed.return_value.update.return_value = "ERROR", None
async_fire_time_changed(hass, utcnow + SCAN_INTERVAL)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 0
assert len(hass.data[DATA_DISPATCHER][delete_signal]) == 0
assert len(hass.data[DATA_DISPATCHER][update_signal]) == 0
# Simulate an update - 1 entry
mock_feed.return_value.update.return_value = "OK", [mock_entry_1]
async_fire_time_changed(hass, utcnow + 2 * SCAN_INTERVAL)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 1
assert len(hass.data[DATA_DISPATCHER][delete_signal]) == 1
assert len(hass.data[DATA_DISPATCHER][update_signal]) == 1
# Simulate an update - 1 entry
mock_feed.return_value.update.return_value = "OK", [mock_entry_1]
async_fire_time_changed(hass, utcnow + 3 * SCAN_INTERVAL)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 1
assert len(hass.data[DATA_DISPATCHER][delete_signal]) == 1
assert len(hass.data[DATA_DISPATCHER][update_signal]) == 1
# Simulate an update - empty data, removes all entities
mock_feed.return_value.update.return_value = "ERROR", None
async_fire_time_changed(hass, utcnow + 4 * SCAN_INTERVAL)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 0
# Ensure that delete and update signal targets are now empty.
assert len(hass.data[DATA_DISPATCHER][delete_signal]) == 0
assert len(hass.data[DATA_DISPATCHER][update_signal]) == 0
|
import zeroconf
from homeassistant.components.zeroconf import async_get_instance
from homeassistant.components.zeroconf.usage import install_multiple_zeroconf_catcher
from homeassistant.setup import async_setup_component
from tests.async_mock import Mock, patch
DOMAIN = "zeroconf"
async def test_multiple_zeroconf_instances(hass, mock_zeroconf, caplog):
"""Test creating multiple zeroconf throws without an integration."""
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}})
zeroconf_instance = await async_get_instance(hass)
install_multiple_zeroconf_catcher(zeroconf_instance)
new_zeroconf_instance = zeroconf.Zeroconf()
assert new_zeroconf_instance == zeroconf_instance
assert "Zeroconf" in caplog.text
async def test_multiple_zeroconf_instances_gives_shared(hass, mock_zeroconf, caplog):
"""Test creating multiple zeroconf gives the shared instance to an integration."""
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}})
zeroconf_instance = await async_get_instance(hass)
install_multiple_zeroconf_catcher(zeroconf_instance)
correct_frame = Mock(
filename="/config/custom_components/burncpu/light.py",
lineno="23",
line="self.light.is_on",
)
with patch(
"homeassistant.helpers.frame.extract_stack",
return_value=[
Mock(
filename="/home/dev/homeassistant/core.py",
lineno="23",
line="do_something()",
),
correct_frame,
Mock(
filename="/home/dev/homeassistant/components/zeroconf/usage.py",
lineno="23",
line="self.light.is_on",
),
Mock(
filename="/home/dev/mdns/lights.py",
lineno="2",
line="something()",
),
],
):
assert zeroconf.Zeroconf() == zeroconf_instance
assert "custom_components/burncpu/light.py" in caplog.text
assert "23" in caplog.text
assert "self.light.is_on" in caplog.text
|
import logging
import voluptuous as vol
from xknx import XKNX
from xknx.devices import DateTime, ExposeSensor
from xknx.dpt import DPTArray, DPTBase, DPTBinary
from xknx.exceptions import XKNXException
from xknx.io import (
DEFAULT_MCAST_GRP,
DEFAULT_MCAST_PORT,
ConnectionConfig,
ConnectionType,
)
from xknx.telegram import AddressFilter, GroupAddress, Telegram
from homeassistant.const import (
CONF_ENTITY_ID,
CONF_HOST,
CONF_PORT,
EVENT_HOMEASSISTANT_STOP,
STATE_OFF,
STATE_ON,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
)
from homeassistant.core import callback
from homeassistant.helpers import discovery
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import async_track_state_change_event
from .const import DOMAIN, SupportedPlatforms
from .factory import create_knx_device
from .schema import (
BinarySensorSchema,
ClimateSchema,
ConnectionSchema,
CoverSchema,
ExposeSchema,
LightSchema,
NotifySchema,
SceneSchema,
SensorSchema,
SwitchSchema,
WeatherSchema,
)
_LOGGER = logging.getLogger(__name__)
CONF_KNX_CONFIG = "config_file"
CONF_KNX_ROUTING = "routing"
CONF_KNX_TUNNELING = "tunneling"
CONF_KNX_FIRE_EVENT = "fire_event"
CONF_KNX_FIRE_EVENT_FILTER = "fire_event_filter"
CONF_KNX_INDIVIDUAL_ADDRESS = "individual_address"
CONF_KNX_MCAST_GRP = "multicast_group"
CONF_KNX_MCAST_PORT = "multicast_port"
CONF_KNX_STATE_UPDATER = "state_updater"
CONF_KNX_RATE_LIMIT = "rate_limit"
CONF_KNX_EXPOSE = "expose"
SERVICE_KNX_SEND = "send"
SERVICE_KNX_ATTR_ADDRESS = "address"
SERVICE_KNX_ATTR_PAYLOAD = "payload"
SERVICE_KNX_ATTR_TYPE = "type"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Optional(CONF_KNX_CONFIG): cv.string,
vol.Exclusive(
CONF_KNX_ROUTING, "connection_type"
): ConnectionSchema.ROUTING_SCHEMA,
vol.Exclusive(
CONF_KNX_TUNNELING, "connection_type"
): ConnectionSchema.TUNNELING_SCHEMA,
vol.Inclusive(CONF_KNX_FIRE_EVENT, "fire_ev"): cv.boolean,
vol.Inclusive(CONF_KNX_FIRE_EVENT_FILTER, "fire_ev"): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(
CONF_KNX_INDIVIDUAL_ADDRESS, default=XKNX.DEFAULT_ADDRESS
): cv.string,
vol.Optional(CONF_KNX_MCAST_GRP, default=DEFAULT_MCAST_GRP): cv.string,
vol.Optional(CONF_KNX_MCAST_PORT, default=DEFAULT_MCAST_PORT): cv.port,
vol.Optional(CONF_KNX_STATE_UPDATER, default=True): cv.boolean,
vol.Optional(CONF_KNX_RATE_LIMIT, default=20): vol.All(
vol.Coerce(int), vol.Range(min=1, max=100)
),
vol.Optional(CONF_KNX_EXPOSE): vol.All(
cv.ensure_list, [ExposeSchema.SCHEMA]
),
vol.Optional(SupportedPlatforms.cover.value): vol.All(
cv.ensure_list, [CoverSchema.SCHEMA]
),
vol.Optional(SupportedPlatforms.binary_sensor.value): vol.All(
cv.ensure_list, [BinarySensorSchema.SCHEMA]
),
vol.Optional(SupportedPlatforms.light.value): vol.All(
cv.ensure_list, [LightSchema.SCHEMA]
),
vol.Optional(SupportedPlatforms.climate.value): vol.All(
cv.ensure_list, [ClimateSchema.SCHEMA]
),
vol.Optional(SupportedPlatforms.notify.value): vol.All(
cv.ensure_list, [NotifySchema.SCHEMA]
),
vol.Optional(SupportedPlatforms.switch.value): vol.All(
cv.ensure_list, [SwitchSchema.SCHEMA]
),
vol.Optional(SupportedPlatforms.sensor.value): vol.All(
cv.ensure_list, [SensorSchema.SCHEMA]
),
vol.Optional(SupportedPlatforms.scene.value): vol.All(
cv.ensure_list, [SceneSchema.SCHEMA]
),
vol.Optional(SupportedPlatforms.weather.value): vol.All(
cv.ensure_list, [WeatherSchema.SCHEMA]
),
}
)
},
extra=vol.ALLOW_EXTRA,
)
SERVICE_KNX_SEND_SCHEMA = vol.Schema(
{
vol.Required(SERVICE_KNX_ATTR_ADDRESS): cv.string,
vol.Required(SERVICE_KNX_ATTR_PAYLOAD): vol.Any(
cv.positive_int, [cv.positive_int]
),
vol.Optional(SERVICE_KNX_ATTR_TYPE): vol.Any(int, float, str),
}
)
async def async_setup(hass, config):
"""Set up the KNX component."""
try:
hass.data[DOMAIN] = KNXModule(hass, config)
hass.data[DOMAIN].async_create_exposures()
await hass.data[DOMAIN].start()
except XKNXException as ex:
_LOGGER.warning("Could not connect to KNX interface: %s", ex)
hass.components.persistent_notification.async_create(
f"Could not connect to KNX interface: <br><b>{ex}</b>", title="KNX"
)
for platform in SupportedPlatforms:
if platform.value in config[DOMAIN]:
for device_config in config[DOMAIN][platform.value]:
create_knx_device(platform, hass.data[DOMAIN].xknx, device_config)
# We need to wait until all entities are loaded into the device list since they could also be created from other platforms
for platform in SupportedPlatforms:
hass.async_create_task(
discovery.async_load_platform(hass, platform.value, DOMAIN, {}, config)
)
if not hass.data[DOMAIN].xknx.devices:
_LOGGER.warning(
"No KNX devices are configured. Please read "
"https://www.home-assistant.io/blog/2020/09/17/release-115/#breaking-changes"
)
hass.services.async_register(
DOMAIN,
SERVICE_KNX_SEND,
hass.data[DOMAIN].service_send_to_knx_bus,
schema=SERVICE_KNX_SEND_SCHEMA,
)
return True
class KNXModule:
"""Representation of KNX Object."""
def __init__(self, hass, config):
"""Initialize of KNX module."""
self.hass = hass
self.config = config
self.connected = False
self.init_xknx()
self.register_callbacks()
self.exposures = []
def init_xknx(self):
"""Initialize of KNX object."""
self.xknx = XKNX(
config=self.config_file(),
own_address=self.config[DOMAIN][CONF_KNX_INDIVIDUAL_ADDRESS],
rate_limit=self.config[DOMAIN][CONF_KNX_RATE_LIMIT],
multicast_group=self.config[DOMAIN][CONF_KNX_MCAST_GRP],
multicast_port=self.config[DOMAIN][CONF_KNX_MCAST_PORT],
connection_config=self.connection_config(),
state_updater=self.config[DOMAIN][CONF_KNX_STATE_UPDATER],
)
async def start(self):
"""Start KNX object. Connect to tunneling or Routing device."""
await self.xknx.start()
self.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, self.stop)
self.connected = True
async def stop(self, event):
"""Stop KNX object. Disconnect from tunneling or Routing device."""
await self.xknx.stop()
def config_file(self):
"""Resolve and return the full path of xknx.yaml if configured."""
config_file = self.config[DOMAIN].get(CONF_KNX_CONFIG)
if not config_file:
return None
if not config_file.startswith("/"):
return self.hass.config.path(config_file)
return config_file
def connection_config(self):
"""Return the connection_config."""
if CONF_KNX_TUNNELING in self.config[DOMAIN]:
return self.connection_config_tunneling()
if CONF_KNX_ROUTING in self.config[DOMAIN]:
return self.connection_config_routing()
# config from xknx.yaml always has priority later on
return ConnectionConfig()
def connection_config_routing(self):
"""Return the connection_config if routing is configured."""
local_ip = self.config[DOMAIN][CONF_KNX_ROUTING].get(
ConnectionSchema.CONF_KNX_LOCAL_IP
)
return ConnectionConfig(
connection_type=ConnectionType.ROUTING, local_ip=local_ip
)
def connection_config_tunneling(self):
"""Return the connection_config if tunneling is configured."""
gateway_ip = self.config[DOMAIN][CONF_KNX_TUNNELING][CONF_HOST]
gateway_port = self.config[DOMAIN][CONF_KNX_TUNNELING][CONF_PORT]
local_ip = self.config[DOMAIN][CONF_KNX_TUNNELING].get(
ConnectionSchema.CONF_KNX_LOCAL_IP
)
return ConnectionConfig(
connection_type=ConnectionType.TUNNELING,
gateway_ip=gateway_ip,
gateway_port=gateway_port,
local_ip=local_ip,
auto_reconnect=True,
)
def register_callbacks(self):
"""Register callbacks within XKNX object."""
if (
CONF_KNX_FIRE_EVENT in self.config[DOMAIN]
and self.config[DOMAIN][CONF_KNX_FIRE_EVENT]
):
address_filters = list(
map(AddressFilter, self.config[DOMAIN][CONF_KNX_FIRE_EVENT_FILTER])
)
self.xknx.telegram_queue.register_telegram_received_cb(
self.telegram_received_cb, address_filters
)
@callback
def async_create_exposures(self):
"""Create exposures."""
if CONF_KNX_EXPOSE not in self.config[DOMAIN]:
return
for to_expose in self.config[DOMAIN][CONF_KNX_EXPOSE]:
expose_type = to_expose.get(ExposeSchema.CONF_KNX_EXPOSE_TYPE)
entity_id = to_expose.get(CONF_ENTITY_ID)
attribute = to_expose.get(ExposeSchema.CONF_KNX_EXPOSE_ATTRIBUTE)
default = to_expose.get(ExposeSchema.CONF_KNX_EXPOSE_DEFAULT)
address = to_expose.get(ExposeSchema.CONF_KNX_EXPOSE_ADDRESS)
if expose_type.lower() in ["time", "date", "datetime"]:
exposure = KNXExposeTime(self.xknx, expose_type, address)
exposure.async_register()
self.exposures.append(exposure)
else:
exposure = KNXExposeSensor(
self.hass,
self.xknx,
expose_type,
entity_id,
attribute,
default,
address,
)
exposure.async_register()
self.exposures.append(exposure)
async def telegram_received_cb(self, telegram):
"""Call invoked after a KNX telegram was received."""
self.hass.bus.async_fire(
"knx_event",
{"address": str(telegram.group_address), "data": telegram.payload.value},
)
# False signals XKNX to proceed with processing telegrams.
return False
async def service_send_to_knx_bus(self, call):
"""Service for sending an arbitrary KNX message to the KNX bus."""
attr_payload = call.data.get(SERVICE_KNX_ATTR_PAYLOAD)
attr_address = call.data.get(SERVICE_KNX_ATTR_ADDRESS)
attr_type = call.data.get(SERVICE_KNX_ATTR_TYPE)
def calculate_payload(attr_payload):
"""Calculate payload depending on type of attribute."""
if attr_type is not None:
transcoder = DPTBase.parse_transcoder(attr_type)
if transcoder is None:
raise ValueError(f"Invalid type for knx.send service: {attr_type}")
return DPTArray(transcoder.to_knx(attr_payload))
if isinstance(attr_payload, int):
return DPTBinary(attr_payload)
return DPTArray(attr_payload)
payload = calculate_payload(attr_payload)
address = GroupAddress(attr_address)
telegram = Telegram(group_address=address, payload=payload)
await self.xknx.telegrams.put(telegram)
class KNXExposeTime:
"""Object to Expose Time/Date object to KNX bus."""
def __init__(self, xknx: XKNX, expose_type: str, address: str):
"""Initialize of Expose class."""
self.xknx = xknx
self.expose_type = expose_type
self.address = address
self.device = None
@callback
def async_register(self):
"""Register listener."""
self.device = DateTime(
self.xknx,
name=self.expose_type.capitalize(),
broadcast_type=self.expose_type.upper(),
localtime=True,
group_address=self.address,
)
class KNXExposeSensor:
"""Object to Expose Home Assistant entity to KNX bus."""
def __init__(self, hass, xknx, expose_type, entity_id, attribute, default, address):
"""Initialize of Expose class."""
self.hass = hass
self.xknx = xknx
self.type = expose_type
self.entity_id = entity_id
self.expose_attribute = attribute
self.expose_default = default
self.address = address
self.device = None
@callback
def async_register(self):
"""Register listener."""
if self.expose_attribute is not None:
_name = self.entity_id + "__" + self.expose_attribute
else:
_name = self.entity_id
self.device = ExposeSensor(
self.xknx,
name=_name,
group_address=self.address,
value_type=self.type,
)
async_track_state_change_event(
self.hass, [self.entity_id], self._async_entity_changed
)
async def _async_entity_changed(self, event):
"""Handle entity change."""
new_state = event.data.get("new_state")
if new_state is None:
return
if new_state.state in (STATE_UNKNOWN, STATE_UNAVAILABLE):
return
if self.expose_attribute is not None:
new_attribute = new_state.attributes.get(self.expose_attribute)
old_state = event.data.get("old_state")
if old_state is not None:
old_attribute = old_state.attributes.get(self.expose_attribute)
if old_attribute == new_attribute:
# don't send same value sequentially
return
await self._async_set_knx_value(new_attribute)
else:
await self._async_set_knx_value(new_state.state)
async def _async_set_knx_value(self, value):
"""Set new value on xknx ExposeSensor."""
if value is None:
if self.expose_default is None:
return
value = self.expose_default
if self.type == "binary":
if value == STATE_ON:
value = True
elif value == STATE_OFF:
value = False
await self.device.set(value)
|
from homeassistant.components.brother.const import DOMAIN
from homeassistant.config_entries import (
ENTRY_STATE_LOADED,
ENTRY_STATE_NOT_LOADED,
ENTRY_STATE_SETUP_RETRY,
)
from homeassistant.const import CONF_HOST, CONF_TYPE, STATE_UNAVAILABLE
from tests.async_mock import patch
from tests.common import MockConfigEntry
from tests.components.brother import init_integration
async def test_async_setup_entry(hass):
"""Test a successful setup entry."""
await init_integration(hass)
state = hass.states.get("sensor.hl_l2340dw_status")
assert state is not None
assert state.state != STATE_UNAVAILABLE
assert state.state == "waiting"
async def test_config_not_ready(hass):
"""Test for setup failure if connection to broker is missing."""
entry = MockConfigEntry(
domain=DOMAIN,
title="HL-L2340DW 0123456789",
unique_id="0123456789",
data={CONF_HOST: "localhost", CONF_TYPE: "laser"},
)
with patch("brother.Brother._get_data", side_effect=ConnectionError()):
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
assert entry.state == ENTRY_STATE_SETUP_RETRY
async def test_unload_entry(hass):
"""Test successful unload of entry."""
entry = await init_integration(hass)
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
assert entry.state == ENTRY_STATE_LOADED
assert await hass.config_entries.async_unload(entry.entry_id)
await hass.async_block_till_done()
assert entry.state == ENTRY_STATE_NOT_LOADED
assert not hass.data.get(DOMAIN)
|
import os
from django.core.management import call_command
from django.test import TestCase
from weblate.accounts.models import Profile
from weblate.auth.models import User
from weblate.lang.models import Language
from weblate.trans.models import Project
from weblate.trans.tests.utils import TempDirMixin, get_test_file
USERDATA_JSON = get_test_file("userdata.json")
class CommandTest(TestCase, TempDirMixin):
"""Test for management commands."""
def test_userdata(self):
# Create test user
language = Language.objects.get(code="cs")
user = User.objects.create_user("testuser", "[email protected]", "x")
user.profile.translated = 1000
user.profile.languages.add(language)
user.profile.secondary_languages.add(language)
user.profile.save()
user.profile.watched.add(Project.objects.create(name="name", slug="name"))
try:
self.create_temp()
output = os.path.join(self.tempdir, "users.json")
call_command("dumpuserdata", output)
user.profile.languages.clear()
user.profile.secondary_languages.clear()
call_command("importuserdata", output)
finally:
self.remove_temp()
profile = Profile.objects.get(user__username="testuser")
self.assertEqual(profile.translated, 2000)
self.assertTrue(profile.languages.filter(code="cs").exists())
self.assertTrue(profile.secondary_languages.filter(code="cs").exists())
self.assertTrue(profile.watched.exists())
def test_userdata_compat(self):
"""Test importing user data from pre 3.6 release."""
User.objects.create_user("test-3.6", "[email protected]", "x")
Project.objects.create(name="test", slug="test")
call_command("importuserdata", USERDATA_JSON)
profile = Profile.objects.get(user__username="test-3.6")
self.assertTrue(profile.languages.filter(code="cs").exists())
self.assertTrue(profile.secondary_languages.filter(code="cs").exists())
self.assertTrue(profile.watched.exists())
|
from typing import Callable
from pyisy.constants import ISY_VALUE_UNKNOWN
from homeassistant.components.cover import (
ATTR_POSITION,
DOMAIN as COVER,
SUPPORT_CLOSE,
SUPPORT_OPEN,
SUPPORT_SET_POSITION,
CoverEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.helpers.typing import HomeAssistantType
from .const import (
_LOGGER,
DOMAIN as ISY994_DOMAIN,
ISY994_NODES,
ISY994_PROGRAMS,
UOM_8_BIT_RANGE,
UOM_BARRIER,
)
from .entity import ISYNodeEntity, ISYProgramEntity
from .helpers import migrate_old_unique_ids
async def async_setup_entry(
hass: HomeAssistantType,
entry: ConfigEntry,
async_add_entities: Callable[[list], None],
) -> bool:
"""Set up the ISY994 cover platform."""
hass_isy_data = hass.data[ISY994_DOMAIN][entry.entry_id]
devices = []
for node in hass_isy_data[ISY994_NODES][COVER]:
devices.append(ISYCoverEntity(node))
for name, status, actions in hass_isy_data[ISY994_PROGRAMS][COVER]:
devices.append(ISYCoverProgramEntity(name, status, actions))
await migrate_old_unique_ids(hass, COVER, devices)
async_add_entities(devices)
class ISYCoverEntity(ISYNodeEntity, CoverEntity):
"""Representation of an ISY994 cover device."""
@property
def current_cover_position(self) -> int:
"""Return the current cover position."""
if self._node.status == ISY_VALUE_UNKNOWN:
return None
if self._node.uom == UOM_8_BIT_RANGE:
return int(self._node.status * 100 / 255)
return sorted((0, self._node.status, 100))[1]
@property
def is_closed(self) -> bool:
"""Get whether the ISY994 cover device is closed."""
if self._node.status == ISY_VALUE_UNKNOWN:
return None
return self._node.status == 0
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_OPEN | SUPPORT_CLOSE | SUPPORT_SET_POSITION
def open_cover(self, **kwargs) -> None:
"""Send the open cover command to the ISY994 cover device."""
val = 100 if self._node.uom == UOM_BARRIER else None
if not self._node.turn_on(val=val):
_LOGGER.error("Unable to open the cover")
def close_cover(self, **kwargs) -> None:
"""Send the close cover command to the ISY994 cover device."""
if not self._node.turn_off():
_LOGGER.error("Unable to close the cover")
def set_cover_position(self, **kwargs):
"""Move the cover to a specific position."""
position = kwargs[ATTR_POSITION]
if self._node.uom == UOM_8_BIT_RANGE:
position = int(position * 255 / 100)
if not self._node.turn_on(val=position):
_LOGGER.error("Unable to set cover position")
class ISYCoverProgramEntity(ISYProgramEntity, CoverEntity):
"""Representation of an ISY994 cover program."""
@property
def is_closed(self) -> bool:
"""Get whether the ISY994 cover program is closed."""
return bool(self._node.status)
def open_cover(self, **kwargs) -> None:
"""Send the open cover command to the ISY994 cover program."""
if not self._actions.run_then():
_LOGGER.error("Unable to open the cover")
def close_cover(self, **kwargs) -> None:
"""Send the close cover command to the ISY994 cover program."""
if not self._actions.run_else():
_LOGGER.error("Unable to close the cover")
|
import datetime
import os
import random
import sys
sys.path = [os.path.abspath(os.path.dirname(__file__))] + sys.path
sys.path = [os.path.abspath(os.path.dirname(os.path.dirname(__file__)))] + sys.path
os.environ['is_test_suite'] = 'True'
# os.environ['KERAS_BACKEND'] = 'theano'
from auto_ml import Predictor
from auto_ml.utils_models import load_ml_model
from nose.tools import assert_equal, assert_not_equal, with_setup
from sklearn.metrics import accuracy_score
import dill
import numpy as np
import pandas as pd
from sklearn.datasets import load_boston
from sklearn.model_selection import train_test_split
def get_boston_regression_dataset():
boston = load_boston()
df_boston = pd.DataFrame(boston.data)
df_boston.columns = boston.feature_names
df_boston['MEDV'] = boston['target']
df_boston_train, df_boston_test = train_test_split(df_boston, test_size=0.33, random_state=42)
return df_boston_train, df_boston_test
def regression_test():
# a random seed of 42 has ExtraTreesRegressor getting the best CV score, and that model doesn't generalize as well as GradientBoostingRegressor.
np.random.seed(0)
model_name = 'LGBMRegressor'
df_boston_train, df_boston_test = get_boston_regression_dataset()
many_dfs = []
for i in range(100):
many_dfs.append(df_boston_train)
df_boston_train = pd.concat(many_dfs)
column_descriptions = {
'MEDV': 'output'
, 'CHAS': 'categorical'
}
ml_predictor = Predictor(type_of_estimator='regressor', column_descriptions=column_descriptions)
ml_predictor.train(df_boston_train, model_names=[model_name], perform_feature_scaling=False)
test_score = ml_predictor.score(df_boston_test, df_boston_test.MEDV)
print('test_score')
print(test_score)
lower_bound = -3.2
if model_name == 'DeepLearningRegressor':
lower_bound = -7.8
if model_name == 'LGBMRegressor':
lower_bound = -4.95
if model_name == 'XGBRegressor':
lower_bound = -3.4
assert lower_bound < test_score < -2.8
def get_titanic_binary_classification_dataset(basic=True):
try:
df_titanic = pd.read_csv(os.path.join('tests', 'titanic.csv'))
except Exception as e:
print('Error')
print(e)
dataset_url = 'http://biostat.mc.vanderbilt.edu/wiki/pub/Main/DataSets/titanic3.csv'
df_titanic = pd.read_csv(dataset_url)
# Do not write the index that pandas automatically creates
df_titanic.to_csv(os.path.join('tests', 'titanic.csv'), index=False)
df_titanic = df_titanic.drop(['boat', 'body'], axis=1)
if basic == True:
df_titanic = df_titanic.drop(['name', 'ticket', 'cabin', 'home.dest'], axis=1)
df_titanic_train, df_titanic_test = train_test_split(df_titanic, test_size=0.33, random_state=42)
return df_titanic_train, df_titanic_test
def classification_test():
np.random.seed(0)
# model_name = 'GradientBoostingClassifier'
model_name = 'LGBMClassifier'
df_titanic_train, df_titanic_test = get_titanic_binary_classification_dataset()
df_titanic_train['DELETE_THIS_FIELD'] = 1
column_descriptions = {
'survived': 'output'
, 'embarked': 'categorical'
, 'pclass': 'categorical'
, 'sex': 'categorical'
, 'this_does_not_exist': 'ignore'
, 'DELETE_THIS_FIELD': 'ignore'
}
ml_predictor = Predictor(type_of_estimator='classifier', column_descriptions=column_descriptions)
ml_predictor.train(df_titanic_train, model_names=model_name)
test_score = ml_predictor.score(df_titanic_test, df_titanic_test.survived)
print('test_score')
print(test_score)
lower_bound = -0.16
if model_name == 'DeepLearningClassifier':
lower_bound = -0.245
if model_name == 'LGBMClassifier':
lower_bound = -0.225
assert lower_bound < test_score < -0.135
if __name__ == '__main__':
classification_test()
|
from asyncio import TimeoutError as AsyncIOTimeoutError
import logging
from aiohttp import ClientError, ClientResponseError
from py_nightscout import Api as NightscoutAPI
import voluptuous as vol
from homeassistant import config_entries, exceptions
from homeassistant.const import CONF_API_KEY, CONF_URL
from .const import DOMAIN # pylint:disable=unused-import
from .utils import hash_from_url
_LOGGER = logging.getLogger(__name__)
DATA_SCHEMA = vol.Schema({vol.Required(CONF_URL): str, vol.Optional(CONF_API_KEY): str})
async def _validate_input(data):
"""Validate the user input allows us to connect."""
url = data[CONF_URL]
api_key = data.get(CONF_API_KEY)
try:
api = NightscoutAPI(url, api_secret=api_key)
status = await api.get_server_status()
if status.settings.get("authDefaultRoles") == "status-only":
await api.get_sgvs()
except ClientResponseError as error:
raise InputValidationError("invalid_auth") from error
except (ClientError, AsyncIOTimeoutError, OSError) as error:
raise InputValidationError("cannot_connect") from error
# Return info to be stored in the config entry.
return {"title": status.name}
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Nightscout."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
async def async_step_user(self, user_input=None):
"""Handle the initial step."""
errors = {}
if user_input is not None:
unique_id = hash_from_url(user_input[CONF_URL])
await self.async_set_unique_id(unique_id)
self._abort_if_unique_id_configured()
try:
info = await _validate_input(user_input)
except InputValidationError as error:
errors["base"] = error.base
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return self.async_create_entry(title=info["title"], data=user_input)
return self.async_show_form(
step_id="user", data_schema=DATA_SCHEMA, errors=errors
)
class InputValidationError(exceptions.HomeAssistantError):
"""Error to indicate we cannot proceed due to invalid input."""
def __init__(self, base: str):
"""Initialize with error base."""
super().__init__()
self.base = base
|
from flexx import flx
# Associate CodeMirror's assets with this module so that Flexx will load
# them when (things from) this module is used.
base_url = 'https://cdnjs.cloudflare.com/ajax/libs/codemirror/'
flx.assets.associate_asset(__name__, base_url + '5.21.0/codemirror.min.css')
flx.assets.associate_asset(__name__, base_url + '5.21.0/codemirror.min.js')
flx.assets.associate_asset(__name__, base_url + '5.21.0/mode/python/python.js')
flx.assets.associate_asset(__name__, base_url + '5.21.0/theme/solarized.css')
flx.assets.associate_asset(__name__, base_url + '5.21.0/addon/selection/active-line.js')
flx.assets.associate_asset(__name__, base_url + '5.21.0/addon/edit/matchbrackets.js')
class CodeEditor(flx.Widget):
""" A CodeEditor widget based on CodeMirror.
"""
CSS = """
.flx-CodeEditor > .CodeMirror {
width: 100%;
height: 100%;
}
"""
def init(self):
global window
# https://codemirror.net/doc/manual.html
options = dict(value='import os\n\ndirs = os.walk',
mode='python',
theme='solarized dark',
autofocus=True,
styleActiveLine=True,
matchBrackets=True,
indentUnit=4,
smartIndent=True,
lineWrapping=True,
lineNumbers=True,
firstLineNumber=1,
readOnly=False,
)
self.cm = window.CodeMirror(self.node, options)
@flx.reaction('size')
def __on_size(self, *events):
self.cm.refresh()
if __name__ == '__main__':
flx.launch(CodeEditor, 'app')
flx.run()
|
import random
from difflib import get_close_matches
from hashlib import md5
from typing import Dict, Set
from django.core.cache import cache
from django.core.exceptions import ImproperlyConfigured
from django.utils.functional import cached_property
from requests.exceptions import HTTPError
from weblate.checks.utils import highlight_string
from weblate.lang.models import Language
from weblate.logger import LOGGER
from weblate.utils.errors import report_error
from weblate.utils.hash import calculate_hash
from weblate.utils.requests import request
from weblate.utils.search import Comparer
from weblate.utils.site import get_site_url
def get_machinery_language(language):
if language.code.endswith("_devel"):
return Language.objects.get(code=language.code[:-6])
return language
class MachineTranslationError(Exception):
"""Generic Machine translation error."""
class MissingConfiguration(ImproperlyConfigured):
"""Exception raised when configuraiton is wrong."""
class MachineryRateLimit(MachineTranslationError):
"""Raised when rate limiting is detected."""
class UnsupportedLanguage(MachineTranslationError):
"""Raised when language is not supported."""
class MachineTranslation:
"""Generic object for machine translation services."""
name = "MT"
max_score = 100
rank_boost = 0
cache_translations = True
language_map: Dict[str, str] = {}
same_languages = False
do_cleanup = True
batch_size = 100
@classmethod
def get_rank(cls):
return cls.max_score + cls.rank_boost
def __init__(self):
"""Create new machine translation object."""
self.mtid = self.name.lower().replace(" ", "-")
self.rate_limit_cache = f"{self.mtid}-rate-limit"
self.languages_cache = f"{self.mtid}-languages"
self.comparer = Comparer()
self.supported_languages_error = None
def delete_cache(self):
cache.delete_many([self.rate_limit_cache, self.languages_cache])
def get_identifier(self):
return self.mtid
def get_authentication(self):
"""Hook for backends to allow add authentication headers to request."""
return {}
def request(self, method, url, skip_auth=False, **kwargs):
"""Perform JSON request."""
# Create custom headers
headers = {
"Referer": get_site_url(),
"Accept": "application/json; charset=utf-8",
}
if "headers" in kwargs:
headers.update(kwargs.pop("headers"))
# Optional authentication
if not skip_auth:
headers.update(self.get_authentication())
# Fire request
return request(method, url, headers=headers, timeout=5.0, **kwargs)
def request_status(self, method, url, **kwargs):
response = self.request(method, url, **kwargs)
payload = response.json()
# Check response status
if payload["responseStatus"] != 200:
raise MachineTranslationError(payload["responseDetails"])
# Return data
return payload
def download_languages(self):
"""Download list of supported languages from a service."""
return []
def download_translations(
self,
source,
language,
text: str,
unit,
user,
search: bool,
threshold: int = 75,
):
"""Download list of possible translations from a service.
Should return dict with translation text, translation quality, source of
translation, source string.
You can use self.name as source of translation, if you can not give
better hint and text parameter as source string if you do no fuzzy
matching.
"""
raise NotImplementedError()
def map_language_code(self, code):
"""Map language code to service specific."""
if code == "en_devel":
code = "en"
if code in self.language_map:
return self.language_map[code]
return code
def convert_language(self, language):
"""Convert language to service specific object."""
return self.map_language_code(language.code)
def report_error(self, message):
"""Wrapper for handling error situations."""
report_error(cause="Machinery error")
LOGGER.error(message, self.name)
@cached_property
def supported_languages(self):
"""Return list of supported languages."""
# Try using list from cache
languages = cache.get(self.languages_cache)
if languages is not None:
return languages
if self.is_rate_limited():
return set()
# Download
try:
languages = set(self.download_languages())
except Exception as exc:
self.supported_languages_error = exc
self.report_error("Failed to fetch languages from %s, using defaults")
return set()
# Update cache
cache.set(self.languages_cache, languages, 3600 * 48)
return languages
def is_supported(self, source, language):
"""Check whether given language combination is supported."""
return (
language in self.supported_languages
and source in self.supported_languages
and source != language
)
def is_rate_limited(self):
return cache.get(self.rate_limit_cache, False)
def set_rate_limit(self):
return cache.set(self.rate_limit_cache, True, 1800)
def is_rate_limit_error(self, exc):
if isinstance(exc, MachineryRateLimit):
return True
if not isinstance(exc, HTTPError):
return False
# Apply rate limiting for following status codes:
# HTTP 456 Client Error: Quota Exceeded (DeepL)
# HTTP 429 Too Many Requests
# HTTP 401 Unauthorized
# HTTP 403 Forbidden
# HTTP 503 Service Unavailable
if exc.response.status_code in (456, 429, 401, 403, 503):
return True
return False
def translate_cache_key(self, source, language, text, threshold):
if not self.cache_translations:
return None
return "mt:{}:{}:{}:{}".format(
self.mtid,
calculate_hash(source, language),
calculate_hash(text),
threshold,
)
def cleanup_text(self, unit):
"""Removes placeholder to avoid confusing the machine translation."""
text = unit.source_string
replacements = {}
if not self.do_cleanup:
return text, replacements
highlights = highlight_string(text, unit)
parts = []
start = 0
for h_start, h_end, h_text in highlights:
parts.append(text[start:h_start])
placeholder = f"[{h_start}]"
replacements[placeholder] = h_text
parts.append(placeholder)
start = h_end
parts.append(text[start:])
return "".join(parts), replacements
def uncleanup_results(self, replacements, results):
"""Reverts replacements done by cleanup_text."""
keys = ["text", "source"]
for result in results:
for key in keys:
text = result[key]
for source, target in replacements.items():
text = text.replace(source, target)
result[key] = text
def get_languages(self, source_language, target_language):
def get_variants(language):
code = self.convert_language(language)
yield code
if not isinstance(code, str):
return
code = code.replace("-", "_")
if "_" in code:
yield code.split("_")[0]
if source_language == target_language and not self.same_languages:
raise UnsupportedLanguage("Same languages")
source_variants = get_variants(source_language)
target_variants = get_variants(target_language)
for source in source_variants:
for target in target_variants:
if self.is_supported(source, target):
return source, target
if self.supported_languages_error:
raise MachineTranslationError(repr(self.supported_languages_error))
raise UnsupportedLanguage("Not supported")
def get_cached(self, source, language, text, threshold, replacements):
cache_key = self.translate_cache_key(source, language, text, threshold)
if cache_key:
result = cache.get(cache_key)
if result and replacements:
self.uncleanup_results(replacements, result)
return cache_key, result
return cache_key, None
def translate(self, unit, user=None, search=None, threshold: int = 75):
"""Return list of machine translations."""
try:
source, language = self.get_languages(
unit.translation.component.source_language, unit.translation.language
)
except UnsupportedLanguage:
return []
return self._translate(source, language, unit, user, search, threshold)
def _translate(
self, source, language, unit, user=None, search=None, threshold: int = 75
):
if search:
replacements = {}
text = search
else:
text, replacements = self.cleanup_text(unit)
if not text or self.is_rate_limited():
return []
cache_key, result = self.get_cached(
source, language, text, threshold, replacements
)
if result is not None:
return result
try:
result = list(
self.download_translations(
source,
language,
text,
unit,
user,
search=bool(search),
threshold=threshold,
)
)
if replacements:
self.uncleanup_results(replacements, result)
if cache_key:
cache.set(cache_key, result, 30 * 86400)
return result
except Exception as exc:
if self.is_rate_limit_error(exc):
self.set_rate_limit()
self.report_error("Failed to fetch translations from %s")
if isinstance(exc, MachineTranslationError):
raise
raise MachineTranslationError(self.get_error_message(exc))
def get_error_message(self, exc):
return f"{exc.__class__.__name__}: {exc}"
def signed_salt(self, appid, secret, text):
"""Generates salt and sign as used by Chinese services."""
salt = str(random.randint(0, 10000000000))
payload = appid + text + salt + secret
digest = md5(payload.encode()).hexdigest() # nosec
return salt, digest
def batch_translate(self, units, user=None, threshold: int = 75):
try:
source, language = self.get_languages(
units[0].translation.component.source_language,
units[0].translation.language,
)
except (UnsupportedLanguage, IndexError):
return
self._batch_translate(source, language, units, user=user, threshold=threshold)
def _batch_translate(self, source, language, units, user=None, threshold: int = 75):
for unit in units:
result = unit.machinery
if result["best"] >= self.max_score:
continue
for item in self._translate(
source, language, unit, user=user, threshold=threshold
):
if result["best"] > item["quality"]:
continue
result["best"] = item["quality"]
result["translation"] = item["text"]
class BatchStringMachineTranslation(MachineTranslation):
# Cleanup is not handled in batch mode
do_cleanup = False
def download_batch_strings(
self, source, language, units, texts: Set[str], user=None, threshold: int = 75
):
raise NotImplementedError()
def _batch_translate(self, source, language, units, user=None, threshold: int = 75):
# Get strings we need to translate
lookups = {
unit.source_string: unit
for unit in units
if unit.machinery["best"] < self.max_score
}
lookup_strings = set(lookups.keys())
cutoff = threshold / 100
for source_str, translation in self.download_batch_strings(
source, language, units, lookup_strings, user, threshold
):
for match in get_close_matches(source_str, lookup_strings, cutoff=cutoff):
quality = self.comparer.similarity(match, source_str)
result = lookups[match].machinery
if quality > result["best"]:
result["best"] = quality
result["translation"] = translation
|
from django.db import migrations
def migrate_source_language(apps, schema_editor):
Project = apps.get_model("trans", "Project")
db_alias = schema_editor.connection.alias
for project in Project.objects.using(db_alias).iterator():
project.component_set.update(source_language=project.source_language)
class Migration(migrations.Migration):
dependencies = [
("trans", "0097_component_source_language"),
]
operations = [migrations.RunPython(migrate_source_language, elidable=True)]
|
from abc import ABC, abstractmethod
from datetime import timedelta
from functools import partial
import logging
import broadlink as blk
from broadlink.exceptions import (
AuthorizationError,
BroadlinkException,
CommandNotSupportedError,
NetworkTimeoutError,
StorageError,
)
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from homeassistant.util import dt
_LOGGER = logging.getLogger(__name__)
def get_update_manager(device):
"""Return an update manager for a given Broadlink device."""
if device.api.model.startswith("RM mini"):
return BroadlinkRMMini3UpdateManager(device)
update_managers = {
"A1": BroadlinkA1UpdateManager,
"MP1": BroadlinkMP1UpdateManager,
"RM2": BroadlinkRMUpdateManager,
"RM4": BroadlinkRMUpdateManager,
"SP1": BroadlinkSP1UpdateManager,
"SP2": BroadlinkSP2UpdateManager,
}
return update_managers[device.api.type](device)
class BroadlinkUpdateManager(ABC):
"""Representation of a Broadlink update manager.
Implement this class to manage fetching data from the device and to
monitor device availability.
"""
def __init__(self, device):
"""Initialize the update manager."""
self.device = device
self.coordinator = DataUpdateCoordinator(
device.hass,
_LOGGER,
name=f"{device.name} ({device.api.model} at {device.api.host[0]})",
update_method=self.async_update,
update_interval=timedelta(minutes=1),
)
self.available = None
self.last_update = None
async def async_update(self):
"""Fetch data from the device and update availability."""
try:
data = await self.async_fetch_data()
except (BroadlinkException, OSError) as err:
if self.available and (
dt.utcnow() - self.last_update > timedelta(minutes=3)
or isinstance(err, (AuthorizationError, OSError))
):
self.available = False
_LOGGER.warning(
"Disconnected from %s (%s at %s)",
self.device.name,
self.device.api.model,
self.device.api.host[0],
)
raise UpdateFailed(err) from err
else:
if self.available is False:
_LOGGER.warning(
"Connected to %s (%s at %s)",
self.device.name,
self.device.api.model,
self.device.api.host[0],
)
self.available = True
self.last_update = dt.utcnow()
return data
@abstractmethod
async def async_fetch_data(self):
"""Fetch data from the device."""
class BroadlinkA1UpdateManager(BroadlinkUpdateManager):
"""Manages updates for Broadlink A1 devices."""
async def async_fetch_data(self):
"""Fetch data from the device."""
return await self.device.async_request(self.device.api.check_sensors_raw)
class BroadlinkMP1UpdateManager(BroadlinkUpdateManager):
"""Manages updates for Broadlink MP1 devices."""
async def async_fetch_data(self):
"""Fetch data from the device."""
return await self.device.async_request(self.device.api.check_power)
class BroadlinkRMMini3UpdateManager(BroadlinkUpdateManager):
"""Manages updates for Broadlink RM mini 3 devices."""
async def async_fetch_data(self):
"""Fetch data from the device."""
hello = partial(
blk.discover,
discover_ip_address=self.device.api.host[0],
timeout=self.device.api.timeout,
)
devices = await self.device.hass.async_add_executor_job(hello)
if not devices:
raise NetworkTimeoutError("The device is offline")
return {}
class BroadlinkRMUpdateManager(BroadlinkUpdateManager):
"""Manages updates for Broadlink RM2 and RM4 devices."""
async def async_fetch_data(self):
"""Fetch data from the device."""
return await self.device.async_request(self.device.api.check_sensors)
class BroadlinkSP1UpdateManager(BroadlinkUpdateManager):
"""Manages updates for Broadlink SP1 devices."""
async def async_fetch_data(self):
"""Fetch data from the device."""
return None
class BroadlinkSP2UpdateManager(BroadlinkUpdateManager):
"""Manages updates for Broadlink SP2 devices."""
async def async_fetch_data(self):
"""Fetch data from the device."""
data = {}
data["state"] = await self.device.async_request(self.device.api.check_power)
try:
data["load_power"] = await self.device.async_request(
self.device.api.get_energy
)
except (CommandNotSupportedError, StorageError):
data["load_power"] = None
return data
|
from homeassistant.const import ATTR_ENTITY_ID, ATTR_NAME
from homeassistant.core import callback
from . import DOMAIN, EVENT_SCRIPT_STARTED
@callback
def async_describe_events(hass, async_describe_event):
"""Describe logbook events."""
@callback
def async_describe_logbook_event(event):
"""Describe the logbook event."""
data = event.data
return {
"name": data.get(ATTR_NAME),
"message": "started",
"entity_id": data.get(ATTR_ENTITY_ID),
}
async_describe_event(DOMAIN, EVENT_SCRIPT_STARTED, async_describe_logbook_event)
|
from decimal import Decimal, InvalidOperation
from json import loads
from operator import add, sub
from time import gmtime
from datetime import timedelta
from calendar import timegm
from urllib.parse import quote, urlparse
from ast import literal_eval
from dateutil import parser
from meza.compat import decode
from riko.dates import TODAY, gen_tzinfos, get_date, normalize_date, get_tt
from riko.currencies import CURRENCY_CODES
from riko.locations import LOCATIONS
URL_SAFE = "%/:=&?~#+!$,;'@()*[]"
MATH_WORDS = {'seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years'}
TEXT_WORDS = {'last', 'next', 'week', 'month', 'year'}
TT_KEYS = (
'year', 'month', 'day', 'hour', 'minute', 'second', 'day_of_week',
'day_of_year', 'daylight_savings')
DATES = {
'today': TODAY,
'now': TODAY,
'tomorrow': TODAY + timedelta(days=1),
'yesterday': TODAY - timedelta(days=1)}
TZINFOS = dict(gen_tzinfos())
url_quote = lambda url: quote(url, safe=URL_SAFE)
def literal_parse(string):
if string.lower() in {'true', 'false'}:
parsed = loads(string.lower())
else:
try:
parsed = literal_eval(string)
except (ValueError, SyntaxError):
parsed = string
return parsed
def cast_url(url_str):
url = 'http://%s' % url_str if '://' not in url_str else url_str
quoted = url_quote(url)
parsed = urlparse(quoted)
response = parsed._asdict()
response['url'] = parsed.geturl()
return response
def lookup_street_address(address):
location = {
'lat': 0, 'lon': 0, 'country': 'United States', 'admin1': 'state',
'admin2': 'county', 'admin3': 'city', 'city': 'city',
'street': 'street', 'postal': '61605'}
return location
def lookup_ip_address(address):
location = {
'country': 'United States', 'admin1': 'state', 'admin2': 'county',
'admin3': 'city', 'city': 'city'}
return location
def lookup_coordinates(lat, lon):
location = {
'lat': lat, 'lon': lon, 'country': 'United States', 'admin1': 'state',
'admin2': 'county', 'admin3': 'city', 'city': 'city',
'street': 'street', 'postal': '61605'}
return location
def cast_location(address, loc_type='street_address'):
GEOLOCATERS = {
'coordinates': lambda x: lookup_coordinates(*x),
'street_address': lambda x: lookup_street_address(x),
'ip_address': lambda x: lookup_ip_address(x),
'currency': lambda x: CURRENCY_CODES.get(x, {}),
}
result = GEOLOCATERS[loc_type](address)
if result.get('location'):
extra = LOCATIONS.get(result['location'], {})
result.update(extra)
return result
def cast_date(date_str):
try:
words = date_str.split(' ')
except AttributeError:
date = gmtime(date_str) if hasattr(date_str, 'real') else date_str
else:
mathish = set(words).intersection(MATH_WORDS)
textish = set(words).intersection(TEXT_WORDS)
if date_str[0] in {'+', '-'} and len(mathish) == 1:
op = sub if date_str.startswith('-') else add
date = get_date(mathish, words[0][1:], op)
elif len(textish) == 2:
date = get_date('%ss' % words[1], 1, add)
elif date_str in DATES:
date = DATES.get(date_str)
else:
date = parser.parse(date_str, tzinfos=TZINFOS)
if date:
normal = normalize_date(date)
tt = get_tt(normal)
# Make Sunday the first day of the week
day_of_w = 0 if tt[6] == 6 else tt[6] + 1
isdst = None if tt[8] == -1 else bool(tt[8])
result = {'utime': timegm(tt), 'timezone': 'UTC', 'date': normal}
result.update(zip(TT_KEYS, tt)) # pylint: disable=W1637
result.update({'day_of_week': day_of_w, 'daylight_savings': isdst})
else:
result = {}
return result
CAST_SWITCH = {
'float': {'default': float('nan'), 'func': float},
'decimal': {'default': Decimal('NaN'), 'func': Decimal},
'int': {'default': 0, 'func': lambda i: int(float(i))},
'text': {'default': '', 'func': decode},
'date': {'default': {'date': TODAY}, 'func': cast_date},
'url': {'default': {}, 'func': cast_url},
'location': {'default': {}, 'func': cast_location},
'bool': {'default': False, 'func': lambda i: bool(literal_parse(i))},
'pass': {'default': None, 'func': lambda i: i},
'none': {'default': None, 'func': lambda _: None},
}
def cast(content, _type='text', **kwargs):
""" Convert content from one type to another
Args:
content: The entry to convert
Kwargs:
_type (str): The type to convert to
Returns:
any: The converted content
Examples:
>>> content = '12.25'
>>> cast(content, 'float')
12.25
>>> cast(content, 'decimal')
Decimal('12.25')
>>> cast(content, 'int')
12
>>> cast(content, 'text')
'12.25'
>>> cast(content, 'bool')
True
>>> cast('foo', 'float')
nan
>>> cast('foo', 'decimal')
Decimal('NaN')
>>> cast('foo', 'int')
0
"""
if content is None:
value = CAST_SWITCH[_type]['default']
elif kwargs:
value = CAST_SWITCH[_type]['func'](content, **kwargs)
else:
try:
value = CAST_SWITCH[_type]['func'](content)
except (InvalidOperation, ValueError):
value = 0 if _type == 'int' else CAST_SWITCH[_type]['func']('NaN')
return value
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
slim = tf.contrib.slim
trunc_normal = lambda stddev: tf.truncated_normal_initializer(stddev=stddev)
def cifarnet(images, num_classes=10, is_training=False,
dropout_keep_prob=0.5,
prediction_fn=slim.softmax,
scope='CifarNet'):
"""Creates a variant of the CifarNet model.
Note that since the output is a set of 'logits', the values fall in the
interval of (-infinity, infinity). Consequently, to convert the outputs to a
probability distribution over the characters, one will need to convert them
using the softmax function:
logits = cifarnet.cifarnet(images, is_training=False)
probabilities = tf.nn.softmax(logits)
predictions = tf.argmax(logits, 1)
Args:
images: A batch of `Tensors` of size [batch_size, height, width, channels].
num_classes: the number of classes in the dataset.
is_training: specifies whether or not we're currently training the model.
This variable will determine the behaviour of the dropout layer.
dropout_keep_prob: the percentage of activation values that are retained.
prediction_fn: a function to get predictions out of logits.
scope: Optional variable_scope.
Returns:
logits: the pre-softmax activations, a tensor of size
[batch_size, `num_classes`]
end_points: a dictionary from components of the network to the corresponding
activation.
"""
end_points = {}
with tf.variable_scope(scope, 'CifarNet', [images, num_classes]):
net = slim.conv2d(images, 64, [5, 5], scope='conv1')
end_points['conv1'] = net
net = slim.max_pool2d(net, [2, 2], 2, scope='pool1')
end_points['pool1'] = net
net = tf.nn.lrn(net, 4, bias=1.0, alpha=0.001/9.0, beta=0.75, name='norm1')
net = slim.conv2d(net, 64, [5, 5], scope='conv2')
end_points['conv2'] = net
net = tf.nn.lrn(net, 4, bias=1.0, alpha=0.001/9.0, beta=0.75, name='norm2')
net = slim.max_pool2d(net, [2, 2], 2, scope='pool2')
end_points['pool2'] = net
net = slim.flatten(net)
end_points['Flatten'] = net
net = slim.fully_connected(net, 384, scope='fc3')
end_points['fc3'] = net
net = slim.dropout(net, dropout_keep_prob, is_training=is_training,
scope='dropout3')
net = slim.fully_connected(net, 192, scope='fc4')
end_points['fc4'] = net
logits = slim.fully_connected(net, num_classes,
biases_initializer=tf.zeros_initializer(),
weights_initializer=trunc_normal(1/192.0),
weights_regularizer=None,
activation_fn=None,
scope='logits')
end_points['Logits'] = logits
end_points['Predictions'] = prediction_fn(logits, scope='Predictions')
return logits, end_points
cifarnet.default_image_size = 32
def cifarnet_arg_scope(weight_decay=0.004):
"""Defines the default cifarnet argument scope.
Args:
weight_decay: The weight decay to use for regularizing the model.
Returns:
An `arg_scope` to use for the inception v3 model.
"""
with slim.arg_scope(
[slim.conv2d],
weights_initializer=tf.truncated_normal_initializer(stddev=5e-2),
activation_fn=tf.nn.relu):
with slim.arg_scope(
[slim.fully_connected],
biases_initializer=tf.constant_initializer(0.1),
weights_initializer=trunc_normal(0.04),
weights_regularizer=slim.l2_regularizer(weight_decay),
activation_fn=tf.nn.relu) as sc:
return sc
|
from datetime import timedelta
import logging
import voluptuous as vol
from homeassistant.components.light import (
ATTR_PROFILE,
ATTR_TRANSITION,
DOMAIN as DOMAIN_LIGHT,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
EVENT_HOMEASSISTANT_START,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_HOME,
STATE_NOT_HOME,
SUN_EVENT_SUNRISE,
SUN_EVENT_SUNSET,
)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import (
async_track_point_in_utc_time,
async_track_state_change,
)
from homeassistant.helpers.sun import get_astral_event_next, is_up
import homeassistant.util.dt as dt_util
DOMAIN = "device_sun_light_trigger"
CONF_DEVICE_GROUP = "device_group"
CONF_DISABLE_TURN_OFF = "disable_turn_off"
CONF_LIGHT_GROUP = "light_group"
CONF_LIGHT_PROFILE = "light_profile"
DEFAULT_DISABLE_TURN_OFF = False
DEFAULT_LIGHT_PROFILE = "relax"
LIGHT_TRANSITION_TIME = timedelta(minutes=15)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Optional(CONF_DEVICE_GROUP): cv.entity_id,
vol.Optional(
CONF_DISABLE_TURN_OFF, default=DEFAULT_DISABLE_TURN_OFF
): cv.boolean,
vol.Optional(CONF_LIGHT_GROUP): cv.string,
vol.Optional(
CONF_LIGHT_PROFILE, default=DEFAULT_LIGHT_PROFILE
): cv.string,
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Set up the triggers to control lights based on device presence."""
conf = config[DOMAIN]
disable_turn_off = conf[CONF_DISABLE_TURN_OFF]
light_group = conf.get(CONF_LIGHT_GROUP)
light_profile = conf[CONF_LIGHT_PROFILE]
device_group = conf.get(CONF_DEVICE_GROUP)
async def activate_on_start(_):
"""Activate automation."""
await activate_automation(
hass, device_group, light_group, light_profile, disable_turn_off
)
if hass.is_running:
await activate_on_start(None)
else:
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, activate_on_start)
return True
async def activate_automation(
hass, device_group, light_group, light_profile, disable_turn_off
):
"""Activate the automation."""
logger = logging.getLogger(__name__)
device_tracker = hass.components.device_tracker
group = hass.components.group
light = hass.components.light
person = hass.components.person
if device_group is None:
device_entity_ids = hass.states.async_entity_ids(device_tracker.DOMAIN)
else:
device_entity_ids = group.get_entity_ids(device_group, device_tracker.DOMAIN)
device_entity_ids.extend(group.get_entity_ids(device_group, person.DOMAIN))
if not device_entity_ids:
logger.error("No devices found to track")
return
# Get the light IDs from the specified group
if light_group is None:
light_ids = hass.states.async_entity_ids(light.DOMAIN)
else:
light_ids = group.get_entity_ids(light_group, light.DOMAIN)
if not light_ids:
logger.error("No lights found to turn on")
return
@callback
def anyone_home():
"""Test if anyone is home."""
return any(device_tracker.is_on(dt_id) for dt_id in device_entity_ids)
@callback
def any_light_on():
"""Test if any light on."""
return any(light.is_on(light_id) for light_id in light_ids)
def calc_time_for_light_when_sunset():
"""Calculate the time when to start fading lights in when sun sets.
Returns None if no next_setting data available.
Async friendly.
"""
next_setting = get_astral_event_next(hass, SUN_EVENT_SUNSET)
if not next_setting:
return None
return next_setting - LIGHT_TRANSITION_TIME * len(light_ids)
async def async_turn_on_before_sunset(light_id):
"""Turn on lights."""
if not anyone_home() or light.is_on(light_id):
return
await hass.services.async_call(
DOMAIN_LIGHT,
SERVICE_TURN_ON,
{
ATTR_ENTITY_ID: light_id,
ATTR_TRANSITION: LIGHT_TRANSITION_TIME.seconds,
ATTR_PROFILE: light_profile,
},
)
@callback
def async_turn_on_factory(light_id):
"""Generate turn on callbacks as factory."""
async def async_turn_on_light(now):
"""Turn on specific light."""
await async_turn_on_before_sunset(light_id)
return async_turn_on_light
# Track every time sun rises so we can schedule a time-based
# pre-sun set event
@callback
def schedule_light_turn_on(now):
"""Turn on all the lights at the moment sun sets.
We will schedule to have each light start after one another
and slowly transition in.
"""
start_point = calc_time_for_light_when_sunset()
if not start_point:
return
for index, light_id in enumerate(light_ids):
async_track_point_in_utc_time(
hass,
async_turn_on_factory(light_id),
start_point + index * LIGHT_TRANSITION_TIME,
)
async_track_point_in_utc_time(
hass, schedule_light_turn_on, get_astral_event_next(hass, SUN_EVENT_SUNRISE)
)
# If the sun is already above horizon schedule the time-based pre-sun set
# event.
if is_up(hass):
schedule_light_turn_on(None)
@callback
def check_light_on_dev_state_change(entity, old_state, new_state):
"""Handle tracked device state changes."""
lights_are_on = any_light_on()
light_needed = not (lights_are_on or is_up(hass))
# These variables are needed for the elif check
now = dt_util.utcnow()
start_point = calc_time_for_light_when_sunset()
# Do we need lights?
if light_needed:
logger.info("Home coming event for %s. Turning lights on", entity)
hass.async_create_task(
hass.services.async_call(
DOMAIN_LIGHT,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: light_ids, ATTR_PROFILE: light_profile},
)
)
# Are we in the time span were we would turn on the lights
# if someone would be home?
# Check this by seeing if current time is later then the point
# in time when we would start putting the lights on.
elif start_point and start_point < now < get_astral_event_next(
hass, SUN_EVENT_SUNSET
):
# Check for every light if it would be on if someone was home
# when the fading in started and turn it on if so
for index, light_id in enumerate(light_ids):
if now > start_point + index * LIGHT_TRANSITION_TIME:
hass.async_create_task(
hass.services.async_call(
DOMAIN_LIGHT, SERVICE_TURN_ON, {ATTR_ENTITY_ID: light_id}
)
)
else:
# If this light didn't happen to be turned on yet so
# will all the following then, break.
break
async_track_state_change(
hass,
device_entity_ids,
check_light_on_dev_state_change,
STATE_NOT_HOME,
STATE_HOME,
)
if disable_turn_off:
return
@callback
def turn_off_lights_when_all_leave(entity, old_state, new_state):
"""Handle device group state change."""
# Make sure there is not someone home
if anyone_home():
return
# Check if any light is on
if not any_light_on():
return
logger.info("Everyone has left but there are lights on. Turning them off")
hass.async_create_task(
hass.services.async_call(
DOMAIN_LIGHT, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: light_ids}
)
)
async_track_state_change(
hass,
device_entity_ids,
turn_off_lights_when_all_leave,
STATE_HOME,
STATE_NOT_HOME,
)
return
|
import logging
import voluptuous as vol
from zoneminder.zm import ZoneMinder
from homeassistant.const import (
ATTR_ID,
ATTR_NAME,
CONF_HOST,
CONF_PASSWORD,
CONF_PATH,
CONF_SSL,
CONF_USERNAME,
CONF_VERIFY_SSL,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.discovery import async_load_platform
_LOGGER = logging.getLogger(__name__)
CONF_PATH_ZMS = "path_zms"
DEFAULT_PATH = "/zm/"
DEFAULT_PATH_ZMS = "/zm/cgi-bin/nph-zms"
DEFAULT_SSL = False
DEFAULT_TIMEOUT = 10
DEFAULT_VERIFY_SSL = True
DOMAIN = "zoneminder"
HOST_CONFIG_SCHEMA = vol.Schema(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_PASSWORD): cv.string,
vol.Optional(CONF_PATH, default=DEFAULT_PATH): cv.string,
vol.Optional(CONF_PATH_ZMS, default=DEFAULT_PATH_ZMS): cv.string,
vol.Optional(CONF_SSL, default=DEFAULT_SSL): cv.boolean,
vol.Optional(CONF_USERNAME): cv.string,
vol.Optional(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL): cv.boolean,
}
)
CONFIG_SCHEMA = vol.Schema(
{DOMAIN: vol.All(cv.ensure_list, [HOST_CONFIG_SCHEMA])}, extra=vol.ALLOW_EXTRA
)
SERVICE_SET_RUN_STATE = "set_run_state"
SET_RUN_STATE_SCHEMA = vol.Schema(
{vol.Required(ATTR_ID): cv.string, vol.Required(ATTR_NAME): cv.string}
)
def setup(hass, config):
"""Set up the ZoneMinder component."""
hass.data[DOMAIN] = {}
success = True
for conf in config[DOMAIN]:
protocol = "https" if conf[CONF_SSL] else "http"
host_name = conf[CONF_HOST]
server_origin = f"{protocol}://{host_name}"
zm_client = ZoneMinder(
server_origin,
conf.get(CONF_USERNAME),
conf.get(CONF_PASSWORD),
conf.get(CONF_PATH),
conf.get(CONF_PATH_ZMS),
conf.get(CONF_VERIFY_SSL),
)
hass.data[DOMAIN][host_name] = zm_client
success = zm_client.login() and success
def set_active_state(call):
"""Set the ZoneMinder run state to the given state name."""
zm_id = call.data[ATTR_ID]
state_name = call.data[ATTR_NAME]
if zm_id not in hass.data[DOMAIN]:
_LOGGER.error("Invalid ZoneMinder host provided: %s", zm_id)
if not hass.data[DOMAIN][zm_id].set_active_state(state_name):
_LOGGER.error(
"Unable to change ZoneMinder state. Host: %s, state: %s",
zm_id,
state_name,
)
hass.services.register(
DOMAIN, SERVICE_SET_RUN_STATE, set_active_state, schema=SET_RUN_STATE_SCHEMA
)
hass.async_create_task(
async_load_platform(hass, "binary_sensor", DOMAIN, {}, config)
)
return success
|
from lark import Lark, Transformer, v_args
try:
input = raw_input # For Python2 compatibility
except NameError:
pass
calc_grammar = """
?start: sum
| NAME "=" sum -> assign_var
?sum: product
| sum "+" product -> add
| sum "-" product -> sub
?product: atom
| product "*" atom -> mul
| product "/" atom -> div
?atom: NUMBER -> number
| "-" atom -> neg
| NAME -> var
| "(" sum ")"
%import common.CNAME -> NAME
%import common.NUMBER
%import common.WS_INLINE
%ignore WS_INLINE
"""
@v_args(inline=True) # Affects the signatures of the methods
class CalculateTree(Transformer):
from operator import add, sub, mul, truediv as div, neg
number = float
def __init__(self):
self.vars = {}
def assign_var(self, name, value):
self.vars[name] = value
return value
def var(self, name):
try:
return self.vars[name]
except KeyError:
raise Exception("Variable not found: %s" % name)
calc_parser = Lark(calc_grammar, parser='lalr', transformer=CalculateTree())
calc = calc_parser.parse
def main():
while True:
try:
s = input('> ')
except EOFError:
break
print(calc(s))
def test():
print(calc("a = 1+2"))
print(calc("1+a*-3"))
if __name__ == '__main__':
# test()
main()
|
import functools
import pytest
from PyQt5.QtGui import QIcon, QPixmap
from qutebrowser.mainwindow import tabwidget
from qutebrowser.utils import usertypes
class TestTabWidget:
"""Tests for TabWidget."""
@pytest.fixture
def widget(self, qtbot, monkeypatch, config_stub):
w = tabwidget.TabWidget(0)
qtbot.addWidget(w)
monkeypatch.setattr(tabwidget.objects, 'backend',
usertypes.Backend.QtWebKit)
w.show()
return w
def test_small_icon_doesnt_crash(self, widget, qtbot, fake_web_tab):
"""Test that setting a small icon doesn't produce a crash.
Regression test for #1015.
"""
# Size taken from issue report
pixmap = QPixmap(72, 1)
icon = QIcon(pixmap)
tab = fake_web_tab()
widget.addTab(tab, icon, 'foobar')
with qtbot.waitExposed(widget):
widget.show()
# Sizing tests
def test_tab_size_same(self, widget, fake_web_tab):
"""Ensure by default, all tab sizes are the same."""
num_tabs = 10
for i in range(num_tabs):
widget.addTab(fake_web_tab(), 'foobar' + str(i))
first_size = widget.tabBar().tabSizeHint(0)
first_size_min = widget.tabBar().minimumTabSizeHint(0)
for i in range(num_tabs):
assert first_size == widget.tabBar().tabSizeHint(i)
assert first_size_min == widget.tabBar().minimumTabSizeHint(i)
@pytest.mark.parametrize("shrink_pinned", [True, False])
@pytest.mark.parametrize("vertical", [True, False])
def test_pinned_size(self, widget, fake_web_tab, config_stub,
shrink_pinned, vertical):
"""Ensure by default, pinned min sizes are forced to title.
If pinned.shrink is not true, then all tabs should be the same
If tabs are vertical, all tabs should be the same"""
num_tabs = 10
for i in range(num_tabs):
widget.addTab(fake_web_tab(), 'foobar' + str(i))
# Set pinned title format longer than unpinned
config_stub.val.tabs.title.format_pinned = "_" * 10
config_stub.val.tabs.title.format = "_" * 2
config_stub.val.tabs.pinned.shrink = shrink_pinned
if vertical:
# Use pixel width so we don't need to mock main-window
config_stub.val.tabs.width = 50
config_stub.val.tabs.position = "left"
pinned_num = [1, num_tabs - 1]
for num in pinned_num:
tab = widget.widget(num)
tab.set_pinned(True)
first_size = widget.tabBar().tabSizeHint(0)
first_size_min = widget.tabBar().minimumTabSizeHint(0)
for i in range(num_tabs):
if i in pinned_num and shrink_pinned and not vertical:
assert (first_size.width() >
widget.tabBar().tabSizeHint(i).width())
assert (first_size_min.width() <
widget.tabBar().minimumTabSizeHint(i).width())
else:
assert first_size == widget.tabBar().tabSizeHint(i)
assert first_size_min == widget.tabBar().minimumTabSizeHint(i)
@pytest.mark.parametrize("num_tabs", [4, 10, 50, 100])
def test_update_tab_titles_benchmark(self, benchmark, widget,
qtbot, fake_web_tab, num_tabs):
"""Benchmark for update_tab_titles."""
for i in range(num_tabs):
widget.addTab(fake_web_tab(), 'foobar' + str(i))
with qtbot.waitExposed(widget):
widget.show()
benchmark(widget.update_tab_titles)
def test_tab_min_width(self, widget, fake_web_tab, config_stub, qtbot):
widget.addTab(fake_web_tab(), 'foobar')
widget.addTab(fake_web_tab(), 'foobar1')
min_size = widget.tabBar().tabRect(0).width() + 10
config_stub.val.tabs.min_width = min_size
assert widget.tabBar().tabRect(0).width() == min_size
def test_tab_max_width(self, widget, fake_web_tab, config_stub, qtbot):
widget.addTab(fake_web_tab(), 'foobar')
max_size = widget.tabBar().tabRect(0).width() - 10
config_stub.val.tabs.max_width = max_size
assert widget.tabBar().tabRect(0).width() == max_size
def test_tab_stays_hidden(self, widget, fake_web_tab, config_stub):
assert widget.tabBar().isVisible()
config_stub.val.tabs.show = "never"
assert not widget.tabBar().isVisible()
for i in range(12):
widget.addTab(fake_web_tab(), 'foobar' + str(i))
assert not widget.tabBar().isVisible()
@pytest.mark.parametrize("num_tabs", [4, 70])
@pytest.mark.parametrize("rev", [True, False])
def test_add_remove_tab_benchmark(self, benchmark, widget,
qtbot, fake_web_tab, num_tabs, rev):
"""Benchmark for addTab and removeTab."""
def _run_bench():
with qtbot.wait_exposed(widget):
widget.show()
for i in range(num_tabs):
idx = i if rev else 0
widget.insertTab(idx, fake_web_tab(), 'foobar' + str(i))
to_del = range(num_tabs)
if rev:
to_del = reversed(to_del)
for i in to_del:
widget.removeTab(i)
benchmark(_run_bench)
def test_tab_pinned_benchmark(self, benchmark, widget, fake_web_tab):
"""Benchmark for _tab_pinned."""
widget.addTab(fake_web_tab(), 'foobar')
tab_bar = widget.tabBar()
benchmark(functools.partial(tab_bar._tab_pinned, 0))
|
from homeassistant.components.binary_sensor import BinarySensorEntity
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from . import ATTR_NEWEST_VERSION, ATTR_RELEASE_NOTES, DOMAIN as UPDATER_DOMAIN
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the updater binary sensors."""
if discovery_info is None:
return
async_add_entities([UpdaterBinary(hass.data[UPDATER_DOMAIN])])
class UpdaterBinary(CoordinatorEntity, BinarySensorEntity):
"""Representation of an updater binary sensor."""
@property
def name(self) -> str:
"""Return the name of the binary sensor, if any."""
return "Updater"
@property
def unique_id(self) -> str:
"""Return a unique ID."""
return "updater"
@property
def is_on(self) -> bool:
"""Return true if the binary sensor is on."""
if not self.coordinator.data:
return None
return self.coordinator.data.update_available
@property
def device_state_attributes(self) -> dict:
"""Return the optional state attributes."""
if not self.coordinator.data:
return None
data = {}
if self.coordinator.data.release_notes:
data[ATTR_RELEASE_NOTES] = self.coordinator.data.release_notes
if self.coordinator.data.newest_version:
data[ATTR_NEWEST_VERSION] = self.coordinator.data.newest_version
return data
|
from total_connect_client import TotalConnectClient
from homeassistant.components.totalconnect import DOMAIN
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from homeassistant.setup import async_setup_component
from tests.async_mock import patch
from tests.common import MockConfigEntry
LOCATION_INFO_BASIC_NORMAL = {
"LocationID": "123456",
"LocationName": "test",
"SecurityDeviceID": "987654",
"PhotoURL": "http://www.example.com/some/path/to/file.jpg",
"LocationModuleFlags": "Security=1,Video=0,Automation=0,GPS=0,VideoPIR=0",
"DeviceList": None,
}
LOCATIONS = {"LocationInfoBasic": [LOCATION_INFO_BASIC_NORMAL]}
MODULE_FLAGS = "Some=0,Fake=1,Flags=2"
USER = {
"UserID": "1234567",
"Username": "username",
"UserFeatureList": "Master=0,User Administration=0,Configuration Administration=0",
}
RESPONSE_AUTHENTICATE = {
"ResultCode": 0,
"SessionID": 1,
"Locations": LOCATIONS,
"ModuleFlags": MODULE_FLAGS,
"UserInfo": USER,
}
PARTITION_DISARMED = {
"PartitionID": "1",
"ArmingState": TotalConnectClient.TotalConnectLocation.DISARMED,
}
PARTITION_ARMED_STAY = {
"PartitionID": "1",
"ArmingState": TotalConnectClient.TotalConnectLocation.ARMED_STAY,
}
PARTITION_ARMED_AWAY = {
"PartitionID": "1",
"ArmingState": TotalConnectClient.TotalConnectLocation.ARMED_AWAY,
}
PARTITION_INFO_DISARMED = {0: PARTITION_DISARMED}
PARTITION_INFO_ARMED_STAY = {0: PARTITION_ARMED_STAY}
PARTITION_INFO_ARMED_AWAY = {0: PARTITION_ARMED_AWAY}
PARTITIONS_DISARMED = {"PartitionInfo": PARTITION_INFO_DISARMED}
PARTITIONS_ARMED_STAY = {"PartitionInfo": PARTITION_INFO_ARMED_STAY}
PARTITIONS_ARMED_AWAY = {"PartitionInfo": PARTITION_INFO_ARMED_AWAY}
ZONE_NORMAL = {
"ZoneID": "1",
"ZoneDescription": "Normal",
"ZoneStatus": TotalConnectClient.ZONE_STATUS_NORMAL,
"PartitionId": "1",
}
ZONE_INFO = [ZONE_NORMAL]
ZONES = {"ZoneInfo": ZONE_INFO}
METADATA_DISARMED = {
"Partitions": PARTITIONS_DISARMED,
"Zones": ZONES,
"PromptForImportSecuritySettings": False,
"IsInACLoss": False,
"IsCoverTampered": False,
"Bell1SupervisionFailure": False,
"Bell2SupervisionFailure": False,
"IsInLowBattery": False,
}
METADATA_ARMED_STAY = METADATA_DISARMED.copy()
METADATA_ARMED_STAY["Partitions"] = PARTITIONS_ARMED_STAY
METADATA_ARMED_AWAY = METADATA_DISARMED.copy()
METADATA_ARMED_AWAY["Partitions"] = PARTITIONS_ARMED_AWAY
RESPONSE_DISARMED = {"ResultCode": 0, "PanelMetadataAndStatus": METADATA_DISARMED}
RESPONSE_ARMED_STAY = {"ResultCode": 0, "PanelMetadataAndStatus": METADATA_ARMED_STAY}
RESPONSE_ARMED_AWAY = {"ResultCode": 0, "PanelMetadataAndStatus": METADATA_ARMED_AWAY}
RESPONSE_ARM_SUCCESS = {"ResultCode": TotalConnectClient.TotalConnectClient.ARM_SUCCESS}
RESPONSE_ARM_FAILURE = {
"ResultCode": TotalConnectClient.TotalConnectClient.COMMAND_FAILED
}
RESPONSE_DISARM_SUCCESS = {
"ResultCode": TotalConnectClient.TotalConnectClient.DISARM_SUCCESS
}
RESPONSE_DISARM_FAILURE = {
"ResultCode": TotalConnectClient.TotalConnectClient.COMMAND_FAILED,
"ResultData": "Command Failed",
}
async def setup_platform(hass, platform):
"""Set up the TotalConnect platform."""
# first set up a config entry and add it to hass
mock_entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_USERNAME: "[email protected]", CONF_PASSWORD: "password"},
)
mock_entry.add_to_hass(hass)
responses = [RESPONSE_AUTHENTICATE, RESPONSE_DISARMED]
with patch("homeassistant.components.totalconnect.PLATFORMS", [platform]), patch(
"zeep.Client", autospec=True
), patch(
"homeassistant.components.totalconnect.TotalConnectClient.TotalConnectClient.request",
side_effect=responses,
) as mock_request, patch(
"homeassistant.components.totalconnect.TotalConnectClient.TotalConnectClient.get_zone_details",
return_value=True,
):
assert await async_setup_component(hass, DOMAIN, {})
assert mock_request.call_count == 2
await hass.async_block_till_done()
return mock_entry
|
import json
from pkg_resources import resource_string
from ...stepper.util import parse_duration
def periodic_schedule(batch_size, period, limit):
return {
"LimiterType": "periodic",
"Parameters": {
"BatchSize": float(batch_size),
"MaxCount": float(limit),
"Period": float(period),
},
}
def linear_schedule(start_rps, end_rps, period):
return {
"LimiterType": "linear",
"Parameters": {
"StartRps": float(start_rps),
"EndRps": float(end_rps),
"Period": parse_duration(period) / 1000.0,
},
}
def unlimited_schedule(*args):
return {
"LimiterType": "unlimited",
"Parameters": {},
}
step_producers = {
"periodic": periodic_schedule,
"linear": linear_schedule,
"unlimited": unlimited_schedule,
}
def parse_schedule(schedule):
steps = [
step.strip() for step in " ".join(schedule.split("\n")).split(')')
if step.strip()
]
if len(steps) > 1:
raise NotImplementedError("Composite schedules not implemented yet")
schedule_type, params = steps[0].split('(')
params = [p.strip() for p in params.split(',')]
if schedule_type in step_producers:
return step_producers[schedule_type](*params)
else:
raise NotImplementedError(
"Step of type %s is not implemented" % schedule_type)
class PandoraConfig(object):
def __init__(self):
self.pools = []
def data(self):
return {"Pools": [p.data() for p in self.pools]}
def json(self):
return json.dumps(self.data(), indent=2)
def add_pool(self, pool_config):
self.pools.append(pool_config)
class PoolConfig(object):
def __init__(self):
self.config = json.loads(
resource_string(__name__, 'config/pandora_pool_default.json'))
def set_ammo(self, ammo):
self.config["AmmoProvider"]["AmmoSource"] = ammo
def set_ammo_type(self, ammo_type):
self.config["AmmoProvider"]["AmmoType"] = ammo_type
def set_loop(self, loop):
self.config["AmmoProvider"]["Passes"] = loop
def set_report_file(self, report_file):
self.config["ResultListener"]["Destination"] = report_file
def set_startup_schedule(self, startup_schedule):
self.config["StartupLimiter"] = startup_schedule
def set_user_schedule(self, user_schedule):
self.config["UserLimiter"] = user_schedule
def set_shared_schedule(self, shared_schedule):
self.config["SharedSchedule"] = shared_schedule
def set_target(self, target):
self.config["Gun"]["Parameters"]["Target"] = target
def set_ssl(self, ssl):
self.config["Gun"]["Parameters"]["SSL"] = ssl
def set_gun_type(self, gun_type):
self.config["Gun"]["GunType"] = gun_type
def data(self):
return self.config
def main():
pool_config = PoolConfig()
pool_config.set_loop(1)
pool_config.set_startup_schedule(parse_schedule("periodic(100, 100, 100)"))
pool_config.set_target("example.org:443")
pandora_config = PandoraConfig()
pandora_config.add_pool(pool_config)
print(pandora_config.json())
if __name__ == '__main__':
main()
|
from typing import List
import voluptuous as vol
from homeassistant.components.alarm_control_panel.const import (
SUPPORT_ALARM_ARM_AWAY,
SUPPORT_ALARM_ARM_HOME,
SUPPORT_ALARM_ARM_NIGHT,
)
from homeassistant.components.automation import AutomationActionType
from homeassistant.components.device_automation import TRIGGER_BASE_SCHEMA
from homeassistant.components.homeassistant.triggers import state as state_trigger
from homeassistant.const import (
ATTR_SUPPORTED_FEATURES,
CONF_DEVICE_ID,
CONF_DOMAIN,
CONF_ENTITY_ID,
CONF_PLATFORM,
CONF_TYPE,
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_ARMED_NIGHT,
STATE_ALARM_ARMING,
STATE_ALARM_DISARMED,
STATE_ALARM_PENDING,
STATE_ALARM_TRIGGERED,
)
from homeassistant.core import CALLBACK_TYPE, HomeAssistant
from homeassistant.helpers import config_validation as cv, entity_registry
from homeassistant.helpers.typing import ConfigType
from . import DOMAIN
TRIGGER_TYPES = {
"triggered",
"disarmed",
"arming",
"armed_home",
"armed_away",
"armed_night",
}
TRIGGER_SCHEMA = TRIGGER_BASE_SCHEMA.extend(
{
vol.Required(CONF_ENTITY_ID): cv.entity_id,
vol.Required(CONF_TYPE): vol.In(TRIGGER_TYPES),
}
)
async def async_get_triggers(hass: HomeAssistant, device_id: str) -> List[dict]:
"""List device triggers for Alarm control panel devices."""
registry = await entity_registry.async_get_registry(hass)
triggers = []
# Get all the integrations entities for this device
for entry in entity_registry.async_entries_for_device(registry, device_id):
if entry.domain != DOMAIN:
continue
entity_state = hass.states.get(entry.entity_id)
# We need a state or else we can't populate the HVAC and preset modes.
if entity_state is None:
continue
supported_features = entity_state.attributes[ATTR_SUPPORTED_FEATURES]
# Add triggers for each entity that belongs to this integration
triggers += [
{
CONF_PLATFORM: "device",
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "disarmed",
},
{
CONF_PLATFORM: "device",
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "triggered",
},
{
CONF_PLATFORM: "device",
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "arming",
},
]
if supported_features & SUPPORT_ALARM_ARM_HOME:
triggers.append(
{
CONF_PLATFORM: "device",
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "armed_home",
}
)
if supported_features & SUPPORT_ALARM_ARM_AWAY:
triggers.append(
{
CONF_PLATFORM: "device",
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "armed_away",
}
)
if supported_features & SUPPORT_ALARM_ARM_NIGHT:
triggers.append(
{
CONF_PLATFORM: "device",
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "armed_night",
}
)
return triggers
async def async_attach_trigger(
hass: HomeAssistant,
config: ConfigType,
action: AutomationActionType,
automation_info: dict,
) -> CALLBACK_TYPE:
"""Attach a trigger."""
config = TRIGGER_SCHEMA(config)
from_state = None
if config[CONF_TYPE] == "triggered":
to_state = STATE_ALARM_TRIGGERED
elif config[CONF_TYPE] == "disarmed":
to_state = STATE_ALARM_DISARMED
elif config[CONF_TYPE] == "arming":
from_state = STATE_ALARM_DISARMED
to_state = STATE_ALARM_ARMING
elif config[CONF_TYPE] == "armed_home":
from_state = STATE_ALARM_PENDING or STATE_ALARM_ARMING
to_state = STATE_ALARM_ARMED_HOME
elif config[CONF_TYPE] == "armed_away":
from_state = STATE_ALARM_PENDING or STATE_ALARM_ARMING
to_state = STATE_ALARM_ARMED_AWAY
elif config[CONF_TYPE] == "armed_night":
from_state = STATE_ALARM_PENDING or STATE_ALARM_ARMING
to_state = STATE_ALARM_ARMED_NIGHT
state_config = {
state_trigger.CONF_PLATFORM: "state",
CONF_ENTITY_ID: config[CONF_ENTITY_ID],
state_trigger.CONF_TO: to_state,
}
if from_state:
state_config[state_trigger.CONF_FROM] = from_state
state_config = state_trigger.TRIGGER_SCHEMA(state_config)
return await state_trigger.async_attach_trigger(
hass, state_config, action, automation_info, platform_type="device"
)
|
from __future__ import absolute_import, unicode_literals
import time
import kaptan
from tmuxp import config
from tmuxp.workspacebuilder import WorkspaceBuilder, freeze
from .fixtures._util import loadfixture
def test_freeze_config(session):
yaml_config = loadfixture("workspacefreezer/sampleconfig.yaml")
sconfig = kaptan.Kaptan(handler='yaml')
sconfig = sconfig.import_config(yaml_config).get()
builder = WorkspaceBuilder(sconf=sconfig)
builder.build(session=session)
assert session == builder.session
time.sleep(0.50)
session = session
sconf = freeze(session)
config.validate_schema(sconf)
sconf = config.inline(sconf)
kaptanconf = kaptan.Kaptan()
kaptanconf = kaptanconf.import_config(sconf)
kaptanconf.export('json', indent=2)
kaptanconf.export('yaml', indent=2, default_flow_style=False, safe=True)
|
from __future__ import division
import chainer
import chainer.functions as F
import chainer.links as L
class GlobalContextModule(chainer.Chain):
def __init__(
self, in_channels, mid_channels, out_channels,
ksize, initialW=None
):
super(GlobalContextModule, self).__init__()
with self.init_scope():
padsize = (ksize - 1) // 2
self.col_max = L.Convolution2D(
in_channels, mid_channels, (ksize, 1), 1, (padsize, 0),
initialW=initialW)
self.col = L.Convolution2D(
mid_channels, out_channels, (1, ksize), 1, (0, padsize),
initialW=initialW)
self.row_max = L.Convolution2D(
in_channels, mid_channels, (1, ksize), 1, (0, padsize),
initialW=initialW)
self.row = L.Convolution2D(
mid_channels, out_channels, (ksize, 1), 1, (padsize, 0),
initialW=initialW)
def __call__(self, x):
h_col = self.col(self.col_max(x))
h_row = self.row(self.row_max(x))
return F.relu(h_col + h_row)
|
import diamond.collector
import re
from urlparse import urljoin
from urllib import quote
import urllib2
from base64 import b64encode
try:
import json
except ImportError:
import simplejson as json
class RabbitMQClient(object):
"""
Tiny interface into the rabbit http api
"""
def __init__(self, log, host, user, password, timeout=5, scheme="http"):
self.log = log
self.base_url = '%s://%s/api/' % (scheme, host)
self.timeout = timeout
self._authorization = 'Basic ' + b64encode('%s:%s' % (user, password))
def do_call(self, path):
url = urljoin(self.base_url, path)
req = urllib2.Request(url)
req.add_header('Authorization', self._authorization)
return json.load(urllib2.urlopen(req, timeout=self.timeout))
def get_all_vhosts(self):
return self.do_call('vhosts')
def get_vhost_names(self):
return [i['name'] for i in self.get_all_vhosts()]
def get_queue(self, vhost, queue_name):
path = 'queues'
if vhost:
vhost = quote(vhost, '')
queue_name = quote(queue_name, '')
path += '/%s/%s' % (vhost, queue_name)
try:
queue = self.do_call(path)
return queue or None
except Exception as e:
self.log.error('Error querying queue %s/%s: %s' % (
vhost, queue_name, e
))
return None
def get_queues(self, vhost):
path = 'queues'
vhost = quote(vhost, '')
path += '/%s' % vhost
try:
queues = self.do_call(path)
return queues or []
except Exception as e:
self.log.error('Error querying queues %s: %s' % (
vhost, e
))
return []
def get_overview(self):
return self.do_call('overview')
def get_nodes(self):
return self.do_call('nodes')
def get_node(self, node):
return self.do_call('nodes/%s' % node)
class RabbitMQCollector(diamond.collector.Collector):
def get_default_config_help(self):
config_help = super(RabbitMQCollector, self).get_default_config_help()
config_help.update({
'host': 'Hostname and port to collect from',
'user': 'Username',
'password': 'Password',
'replace_dot':
'A value to replace dot in queue names and vhosts names by',
'replace_slash':
'A value to replace a slash in queue names and vhosts names by',
'queues': 'Queues to publish. Leave empty to publish all.',
'vhosts':
'A list of vhosts and queues for which we want to collect',
'queues_ignored':
'A list of queues or regexes for queue names not to report on.',
'cluster':
'If this node is part of a cluster, will collect metrics on the'
' cluster health',
'query_individual_queues':
'If specific queues are set, query their metrics individually.'
' When this is False, queue metrics will be queried in bulk and'
' filtered, which can time out for vhosts with many queues.'
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(RabbitMQCollector, self).get_default_config()
config.update({
'path': 'rabbitmq',
'host': 'localhost:55672',
'user': 'guest',
'password': 'guest',
'replace_dot': False,
'replace_slash': False,
'queues_ignored': '',
'cluster': False,
'scheme': 'http',
'query_individual_queues': False,
})
return config
def collect_health(self):
health_metrics = [
'fd_used',
'fd_total',
'mem_used',
'mem_limit',
'sockets_used',
'sockets_total',
'disk_free_limit',
'disk_free',
'proc_used',
'proc_total',
]
try:
client = RabbitMQClient(self.log,
self.config['host'],
self.config['user'],
self.config['password'],
scheme=self.config['scheme'])
node_name = client.get_overview()['node']
node_data = client.get_node(node_name)
for metric in health_metrics:
self.publish('health.{}'.format(metric), node_data[metric])
if self.config['cluster']:
self.publish('cluster.partitions',
len(node_data['partitions']))
content = client.get_nodes()
self.publish('cluster.nodes', len(content))
except:
self.log.exception('Couldnt connect to rabbitmq')
return {}
def get_queue_metrics(self, client, vhost, queues):
# Allow the use of a asterix to glob the queues, but replace
# with a empty string to match how legacy config was.
if queues == "*":
queues = ""
allowed_queues = queues.split()
matchers = []
if self.config['queues_ignored']:
for reg in self.config['queues_ignored'].split():
matchers.append(re.compile(reg))
if len(allowed_queues) and self.config['query_individual_queues']:
for queue_name in allowed_queues:
if matchers and any(
[m.match(queue_name) for m in matchers]):
continue
queue = client.get_queue(vhost, queue_name)
if queue is not None:
yield queue
else:
for queue in client.get_queues(vhost):
# If queues are defined and it doesn't match, then skip.
if ((queue['name'] not in allowed_queues and
len(allowed_queues) > 0)):
continue
if matchers and any(
[m.match(queue['name']) for m in matchers]):
continue
yield queue
def get_vhost_conf(self, vhost_names):
legacy = False
if 'vhosts' in self.config:
vhost_conf = self.config['vhosts']
else:
# Legacy configurations, those that don't include the [vhosts]
# section require special care so that we do not break metric
# gathering for people that were using this collector before
# the update to support vhosts.
legacy = True
if 'queues' in self.config:
vhost_conf = {"*": self.config['queues']}
else:
vhost_conf = {"*": ""}
if "*" in vhost_conf:
for vhost in vhost_names:
# Copy the glob queue list to each vhost not
# specifically defined in the configuration.
if vhost not in vhost_conf:
vhost_conf[vhost] = vhost_conf['*']
del vhost_conf["*"]
return vhost_conf, legacy
def collect(self):
self.collect_health()
try:
client = RabbitMQClient(self.log,
self.config['host'],
self.config['user'],
self.config['password'],
scheme=self.config['scheme'])
vhost_names = client.get_vhost_names()
vhost_conf, legacy = self.get_vhost_conf(vhost_names)
# Iterate all vhosts in our vhosts configurations
for vhost, queues in vhost_conf.iteritems():
vhost_name = vhost
if self.config['replace_dot']:
vhost_name = vhost_name.replace(
'.', self.config['replace_dot'])
if self.config['replace_slash']:
vhost_name = vhost_name.replace(
'/', self.config['replace_slash'])
for queue in self.get_queue_metrics(
client, vhost, queues
):
for key in queue:
prefix = "queues"
if not legacy:
prefix = "vhosts.%s.%s" % (vhost_name, "queues")
queue_name = queue['name']
if self.config['replace_dot']:
queue_name = queue_name.replace(
'.', self.config['replace_dot'])
if self.config['replace_slash']:
queue_name = queue_name.replace(
'/', self.config['replace_slash'])
name = '{}.{}'.format(prefix, queue_name)
self._publish_metrics(name, [], key, queue)
overview = client.get_overview()
for key in overview:
self._publish_metrics('', [], key, overview)
except:
self.log.exception('An error occurred collecting from RabbitMQ')
return {}
def _publish_metrics(self, name, prev_keys, key, data):
"""Recursively publish keys"""
value = data[key]
keys = prev_keys + [key]
if isinstance(value, dict):
for new_key in value:
self._publish_metrics(name, keys, new_key, value)
elif isinstance(value, (float, int, long)):
joined_keys = '.'.join(keys)
if name:
publish_key = '{}.{}'.format(name, joined_keys)
else:
publish_key = joined_keys
if isinstance(value, bool):
value = int(value)
self.publish(publish_key, value)
|
import os
import os.path as op
import shutil
import pytest
from numpy.testing import assert_array_equal
import mne
from mne.datasets.testing import data_path, requires_testing_data
from mne.io import read_raw_persyst
from mne.io.tests.test_raw import _test_raw_reader
from mne.utils import run_tests_if_main
fname_lay = op.join(
data_path(download=False), 'Persyst',
'sub-pt1_ses-02_task-monitor_acq-ecog_run-01_clip2.lay')
fname_dat = op.join(
data_path(download=False), 'Persyst',
'sub-pt1_ses-02_task-monitor_acq-ecog_run-01_clip2.dat')
@requires_testing_data
def test_persyst_lay_load():
"""Test reading Persyst files using path to header file."""
raw = read_raw_persyst(fname_lay, preload=False)
# Test data import
assert raw.info['sfreq'] == 200
assert raw.preload is False
# load raw data
raw.load_data()
assert raw._data.shape == (83, 847)
assert raw.preload is True
# defaults channels to EEG
raw = raw.pick_types(eeg=True)
assert len(raw.ch_names) == 83
# no "-Ref" in channel names
assert all(['-ref' not in ch.lower()
for ch in raw.ch_names])
# test with preload True
raw = read_raw_persyst(fname_lay, preload=True)
@requires_testing_data
def test_persyst_raw():
"""Test reading Persyst files using path to header file."""
raw = read_raw_persyst(fname_lay, preload=False)
# defaults channels to EEG
raw = raw.pick_types(eeg=True)
# get data
data, times = raw.get_data(start=200, return_times=True)
assert data.shape == (83, 647)
# seconds should match up to what is in the file
assert times.min() == 1.0
assert times.max() == 4.23
# get data
data = raw.get_data(start=200, stop=400)
assert data.shape == (83, 200)
# data should have been set correctly
assert not data.min() == 0 and not data.max() == 0
first_ch_data = raw.get_data(picks=[0], start=200, stop=400)
assert_array_equal(first_ch_data.squeeze(), data[0, :])
@requires_testing_data
def test_persyst_dates():
"""Test different Persyst date formats for meas date."""
# now test what if you change contents of the lay file
out_dir = mne.utils._TempDir()
new_fname_lay = op.join(out_dir, op.basename(fname_lay))
new_fname_dat = op.join(out_dir, op.basename(fname_dat))
shutil.copy(fname_dat, new_fname_dat)
# reformat the lay file to have testdate with
# "/" character
with open(fname_lay, "r") as fin:
with open(new_fname_lay, 'w') as fout:
# for each line in the input file
for idx, line in enumerate(fin):
if line.startswith('TestDate'):
line = 'TestDate=01/23/2000\n'
fout.write(line)
# file should update correctly with datetime
raw = read_raw_persyst(new_fname_lay)
assert raw.info['meas_date'].month == 1
assert raw.info['meas_date'].day == 23
assert raw.info['meas_date'].year == 2000
# reformat the lay file to have testdate with
# "-" character
os.remove(new_fname_lay)
with open(fname_lay, "r") as fin:
with open(new_fname_lay, 'w') as fout:
# for each line in the input file
for idx, line in enumerate(fin):
if line.startswith('TestDate'):
line = 'TestDate=24-01-2000\n'
fout.write(line)
# file should update correctly with datetime
raw = read_raw_persyst(new_fname_lay)
assert raw.info['meas_date'].month == 1
assert raw.info['meas_date'].day == 24
assert raw.info['meas_date'].year == 2000
@requires_testing_data
def test_persyst_wrong_file(tmpdir):
"""Test reading Persyst files when passed in wrong file path."""
with pytest.raises(FileNotFoundError, match='The path you'):
read_raw_persyst(fname_dat, preload=True)
out_dir = mne.utils._TempDir()
out_dir = str(tmpdir)
new_fname_lay = op.join(out_dir, op.basename(fname_lay))
new_fname_dat = op.join(out_dir, op.basename(fname_dat))
shutil.copy(fname_lay, new_fname_lay)
# without a .dat file, reader should break
desired_err_msg = \
'The data path you specified does ' \
'not exist for the lay path, ' \
'sub-pt1_ses-02_task-monitor_acq-ecog_run-01_clip2.lay'
with pytest.raises(FileNotFoundError, match=desired_err_msg):
read_raw_persyst(new_fname_lay, preload=True)
# once you copy over the .dat file things should work
shutil.copy(fname_dat, new_fname_dat)
read_raw_persyst(new_fname_lay, preload=True)
@requires_testing_data
def test_persyst_standard():
"""Test standard operations."""
_test_raw_reader(read_raw_persyst, fname=fname_lay)
@requires_testing_data
def test_persyst_errors():
"""Test reading Persyst files when passed in wrong file path."""
out_dir = mne.utils._TempDir()
new_fname_lay = op.join(out_dir, op.basename(fname_lay))
new_fname_dat = op.join(out_dir, op.basename(fname_dat))
shutil.copy(fname_dat, new_fname_dat)
# reformat the lay file
with open(fname_lay, "r") as fin:
with open(new_fname_lay, 'w') as fout:
# for each line in the input file
for idx, line in enumerate(fin):
if idx == 1:
line = line.replace('=', ',')
fout.write(line)
# file should break
with pytest.raises(RuntimeError, match='The line'):
read_raw_persyst(new_fname_lay)
# reformat the lay file
os.remove(new_fname_lay)
with open(fname_lay, "r") as fin:
with open(new_fname_lay, 'w') as fout:
# for each line in the input file
for idx, line in enumerate(fin):
if line.startswith('WaveformCount'):
line = 'WaveformCount=1\n'
fout.write(line)
# file should break
with pytest.raises(RuntimeError, match='Channels in lay '
'file do not'):
read_raw_persyst(new_fname_lay)
# reformat the lay file
os.remove(new_fname_lay)
with open(fname_lay, "r") as fin:
with open(new_fname_lay, 'w') as fout:
# for each line in the input file
for idx, line in enumerate(fin):
if line.startswith('File'):
line = f'File=/{op.basename(fname_dat)}\n'
fout.write(line)
# file should break
with pytest.raises(FileNotFoundError, match='The data path '
'you specified'):
read_raw_persyst(new_fname_lay)
# reformat the lay file to have testdate
# improperly specified
os.remove(new_fname_lay)
with open(fname_lay, "r") as fin:
with open(new_fname_lay, 'w') as fout:
# for each line in the input file
for idx, line in enumerate(fin):
if line.startswith('TestDate'):
line = 'TestDate=Jan 23rd 2000\n'
fout.write(line)
# file should not read in meas date
with pytest.warns(RuntimeWarning,
match='Cannot read in the measurement date'):
raw = read_raw_persyst(new_fname_lay)
assert raw.info['meas_date'] is None
run_tests_if_main()
|
from flask import current_app
from flask_restful import inputs
from flask_restful.reqparse import RequestParser
from marshmallow import fields, validate, validates_schema, post_load, pre_load, post_dump
from marshmallow.exceptions import ValidationError
from lemur.authorities.schemas import AuthorityNestedOutputSchema
from lemur.certificates import utils as cert_utils
from lemur.common import missing, utils, validators
from lemur.common.fields import ArrowDateTime, Hex
from lemur.common.schema import LemurInputSchema, LemurOutputSchema
from lemur.constants import CERTIFICATE_KEY_TYPES, CRLReason
from lemur.destinations.schemas import DestinationNestedOutputSchema
from lemur.dns_providers.schemas import DnsProvidersNestedOutputSchema
from lemur.domains.schemas import DomainNestedOutputSchema
from lemur.notifications import service as notification_service
from lemur.notifications.schemas import NotificationNestedOutputSchema
from lemur.policies.schemas import RotationPolicyNestedOutputSchema
from lemur.roles import service as roles_service
from lemur.roles.schemas import RoleNestedOutputSchema
from lemur.schemas import (
AssociatedAuthoritySchema,
AssociatedDestinationSchema,
AssociatedCertificateSchema,
AssociatedNotificationSchema,
AssociatedDnsProviderSchema,
PluginInputSchema,
ExtensionSchema,
AssociatedRoleSchema,
EndpointNestedOutputSchema,
AssociatedRotationPolicySchema,
)
from lemur.users.schemas import UserNestedOutputSchema
class CertificateSchema(LemurInputSchema):
owner = fields.Email(required=True)
description = fields.String(missing="", allow_none=True)
class CertificateCreationSchema(CertificateSchema):
@post_load
def default_notification(self, data):
if not data["notifications"]:
data[
"notifications"
] += notification_service.create_default_expiration_notifications(
"DEFAULT_{0}".format(data["owner"].split("@")[0].upper()),
[data["owner"]],
)
data[
"notifications"
] += notification_service.create_default_expiration_notifications(
"DEFAULT_SECURITY",
current_app.config.get("LEMUR_SECURITY_TEAM_EMAIL"),
current_app.config.get("LEMUR_SECURITY_TEAM_EMAIL_INTERVALS", None),
)
return data
class CertificateInputSchema(CertificateCreationSchema):
name = fields.String()
common_name = fields.String(required=True, validate=validators.common_name)
authority = fields.Nested(AssociatedAuthoritySchema, required=True)
validity_start = ArrowDateTime(allow_none=True)
validity_end = ArrowDateTime(allow_none=True)
validity_years = fields.Integer(allow_none=True)
destinations = fields.Nested(AssociatedDestinationSchema, missing=[], many=True)
notifications = fields.Nested(AssociatedNotificationSchema, missing=[], many=True)
replaces = fields.Nested(AssociatedCertificateSchema, missing=[], many=True)
replacements = fields.Nested(
AssociatedCertificateSchema, missing=[], many=True
) # deprecated
roles = fields.Nested(AssociatedRoleSchema, missing=[], many=True)
dns_provider = fields.Nested(
AssociatedDnsProviderSchema, missing=None, allow_none=True, required=False
)
csr = fields.String(allow_none=True, validate=validators.csr)
key_type = fields.String(
validate=validate.OneOf(CERTIFICATE_KEY_TYPES), missing="RSA2048"
)
notify = fields.Boolean(default=True)
rotation = fields.Boolean()
rotation_policy = fields.Nested(
AssociatedRotationPolicySchema,
missing={"name": "default"},
allow_none=True,
default={"name": "default"},
)
# certificate body fields
organizational_unit = fields.String(
missing=lambda: current_app.config.get("LEMUR_DEFAULT_ORGANIZATIONAL_UNIT")
)
organization = fields.String(
missing=lambda: current_app.config.get("LEMUR_DEFAULT_ORGANIZATION")
)
location = fields.String()
country = fields.String(
missing=lambda: current_app.config.get("LEMUR_DEFAULT_COUNTRY")
)
state = fields.String(missing=lambda: current_app.config.get("LEMUR_DEFAULT_STATE"))
extensions = fields.Nested(ExtensionSchema)
@validates_schema
def validate_authority(self, data):
if 'authority' not in data:
raise ValidationError("Missing Authority.")
if isinstance(data["authority"], str):
raise ValidationError("Authority not found.")
if not data["authority"].active:
raise ValidationError("The authority is inactive.", ["authority"])
@validates_schema
def validate_dates(self, data):
validators.dates(data)
@pre_load
def load_data(self, data):
if data.get("replacements"):
data["replaces"] = data[
"replacements"
] # TODO remove when field is deprecated
if data.get("csr"):
csr_sans = cert_utils.get_sans_from_csr(data["csr"])
if not data.get("extensions"):
data["extensions"] = {"subAltNames": {"names": []}}
elif not data["extensions"].get("subAltNames"):
data["extensions"]["subAltNames"] = {"names": []}
elif not data["extensions"]["subAltNames"].get("names"):
data["extensions"]["subAltNames"]["names"] = []
data["extensions"]["subAltNames"]["names"] = csr_sans
common_name = cert_utils.get_cn_from_csr(data["csr"])
if common_name:
data["common_name"] = common_name
key_type = cert_utils.get_key_type_from_csr(data["csr"])
if key_type:
data["key_type"] = key_type
# This code will be exercised for certificate import (without CSR)
if data.get("key_type") is None:
if data.get("body"):
data["key_type"] = utils.get_key_type_from_certificate(data["body"])
else:
data["key_type"] = "RSA2048" # default value
return missing.convert_validity_years(data)
class CertificateEditInputSchema(CertificateSchema):
owner = fields.String()
notify = fields.Boolean()
rotation = fields.Boolean()
destinations = fields.Nested(AssociatedDestinationSchema, missing=[], many=True)
notifications = fields.Nested(AssociatedNotificationSchema, missing=[], many=True)
replaces = fields.Nested(AssociatedCertificateSchema, missing=[], many=True)
replacements = fields.Nested(
AssociatedCertificateSchema, missing=[], many=True
) # deprecated
roles = fields.Nested(AssociatedRoleSchema, missing=[], many=True)
@pre_load
def load_data(self, data):
if data.get("replacements"):
data["replaces"] = data[
"replacements"
] # TODO remove when field is deprecated
if data.get("owner"):
# Check if role already exists. This avoids adding duplicate role.
if data.get("roles") and any(r.get("name") == data["owner"] for r in data["roles"]):
return data
# Add required role
owner_role = roles_service.get_or_create(
data["owner"],
description=f"Auto generated role based on owner: {data['owner']}"
)
# Put role info in correct format using RoleNestedOutputSchema
owner_role_dict = RoleNestedOutputSchema().dump(owner_role).data
if data.get("roles"):
data["roles"].append(owner_role_dict)
else:
data["roles"] = [owner_role_dict]
return data
@post_load
def enforce_notifications(self, data):
"""
Add default notification for current owner if none exist.
This ensures that the default notifications are added in the event of owner change.
Old owner notifications are retained unless explicitly removed later in the code path.
:param data:
:return:
"""
if data.get("owner"):
notification_name = "DEFAULT_{0}".format(
data["owner"].split("@")[0].upper()
)
# Even if one default role exists, return
# This allows a User to remove unwanted default notification for current owner
if any(n.label.startswith(notification_name) for n in data["notifications"]):
return data
data[
"notifications"
] += notification_service.create_default_expiration_notifications(
notification_name, [data["owner"]]
)
return data
class CertificateNestedOutputSchema(LemurOutputSchema):
__envelope__ = False
id = fields.Integer()
name = fields.String()
owner = fields.Email()
creator = fields.Nested(UserNestedOutputSchema)
description = fields.String()
status = fields.String()
bits = fields.Integer()
body = fields.String()
chain = fields.String()
csr = fields.String()
active = fields.Boolean()
rotation = fields.Boolean()
notify = fields.Boolean()
rotation_policy = fields.Nested(RotationPolicyNestedOutputSchema)
# Note aliasing is the first step in deprecating these fields.
cn = fields.String() # deprecated
common_name = fields.String(attribute="cn")
not_after = fields.DateTime() # deprecated
validity_end = ArrowDateTime(attribute="not_after")
not_before = fields.DateTime() # deprecated
validity_start = ArrowDateTime(attribute="not_before")
issuer = fields.Nested(AuthorityNestedOutputSchema)
class CertificateCloneSchema(LemurOutputSchema):
__envelope__ = False
description = fields.String()
common_name = fields.String()
class CertificateOutputSchema(LemurOutputSchema):
id = fields.Integer()
external_id = fields.String()
bits = fields.Integer()
body = fields.String()
chain = fields.String()
csr = fields.String()
deleted = fields.Boolean(default=False)
description = fields.String()
issuer = fields.String()
name = fields.String()
dns_provider_id = fields.Integer(required=False, allow_none=True)
date_created = ArrowDateTime()
resolved = fields.Boolean(required=False, allow_none=True)
resolved_cert_id = fields.Integer(required=False, allow_none=True)
rotation = fields.Boolean()
# Note aliasing is the first step in deprecating these fields.
notify = fields.Boolean()
active = fields.Boolean(attribute="notify")
has_private_key = fields.Boolean()
cn = fields.String()
common_name = fields.String(attribute="cn")
distinguished_name = fields.String()
not_after = fields.DateTime()
validity_end = ArrowDateTime(attribute="not_after")
not_before = fields.DateTime()
validity_start = ArrowDateTime(attribute="not_before")
owner = fields.Email()
san = fields.Boolean()
serial = fields.String()
serial_hex = Hex(attribute="serial")
signing_algorithm = fields.String()
key_type = fields.String(allow_none=True)
status = fields.String()
user = fields.Nested(UserNestedOutputSchema)
extensions = fields.Nested(ExtensionSchema)
# associated objects
domains = fields.Nested(DomainNestedOutputSchema, many=True)
destinations = fields.Nested(DestinationNestedOutputSchema, many=True)
notifications = fields.Nested(NotificationNestedOutputSchema, many=True)
replaces = fields.Nested(CertificateNestedOutputSchema, many=True)
authority = fields.Nested(AuthorityNestedOutputSchema)
dns_provider = fields.Nested(DnsProvidersNestedOutputSchema)
roles = fields.Nested(RoleNestedOutputSchema, many=True)
endpoints = fields.Nested(EndpointNestedOutputSchema, many=True, missing=[])
replaced_by = fields.Nested(
CertificateNestedOutputSchema, many=True, attribute="replaced"
)
rotation_policy = fields.Nested(RotationPolicyNestedOutputSchema)
country = fields.String()
location = fields.String()
state = fields.String()
organization = fields.String()
organizational_unit = fields.String()
@post_dump
def handle_subject_details(self, data):
subject_details = ["country", "state", "location", "organization", "organizational_unit"]
# Remove subject details if authority is CA/Browser Forum compliant. The code will use default set of values in that case.
# If CA/Browser Forum compliance of an authority is unknown (None), it is safe to fallback to default values. Thus below
# condition checks for 'not False' ==> 'True or None'
if data.get("authority"):
is_cab_compliant = data.get("authority").get("isCabCompliant")
if is_cab_compliant is not False:
for field in subject_details:
data.pop(field, None)
# Removing subject fields if None, else it complains in de-serialization
for field in subject_details:
if field in data and data[field] is None:
data.pop(field)
class CertificateShortOutputSchema(LemurOutputSchema):
id = fields.Integer()
name = fields.String()
owner = fields.Email()
notify = fields.Boolean()
authority = fields.Nested(AuthorityNestedOutputSchema)
issuer = fields.String()
cn = fields.String()
class CertificateUploadInputSchema(CertificateCreationSchema):
name = fields.String()
authority = fields.Nested(AssociatedAuthoritySchema, required=False)
notify = fields.Boolean(missing=True)
external_id = fields.String(missing=None, allow_none=True)
private_key = fields.String()
body = fields.String(required=True)
chain = fields.String(missing=None, allow_none=True)
csr = fields.String(required=False, allow_none=True, validate=validators.csr)
key_type = fields.String()
destinations = fields.Nested(AssociatedDestinationSchema, missing=[], many=True)
notifications = fields.Nested(AssociatedNotificationSchema, missing=[], many=True)
replaces = fields.Nested(AssociatedCertificateSchema, missing=[], many=True)
roles = fields.Nested(AssociatedRoleSchema, missing=[], many=True)
@validates_schema
def keys(self, data):
if data.get("destinations"):
if not data.get("private_key"):
raise ValidationError("Destinations require private key.")
@validates_schema
def validate_cert_private_key_chain(self, data):
cert = None
key = None
if data.get("body"):
try:
cert = utils.parse_certificate(data["body"])
except ValueError:
raise ValidationError(
"Public certificate presented is not valid.", field_names=["body"]
)
if data.get("private_key"):
try:
key = utils.parse_private_key(data["private_key"])
except ValueError:
raise ValidationError(
"Private key presented is not valid.", field_names=["private_key"]
)
if cert and key:
# Throws ValidationError
validators.verify_private_key_match(key, cert)
if data.get("chain"):
try:
chain = utils.parse_cert_chain(data["chain"])
except ValueError:
raise ValidationError(
"Invalid certificate in certificate chain.", field_names=["chain"]
)
# Throws ValidationError
validators.verify_cert_chain([cert] + chain)
@pre_load
def load_data(self, data):
if data.get("body"):
try:
data["key_type"] = utils.get_key_type_from_certificate(data["body"])
except ValueError:
raise ValidationError(
"Public certificate presented is not valid.", field_names=["body"]
)
class CertificateExportInputSchema(LemurInputSchema):
plugin = fields.Nested(PluginInputSchema)
class CertificateNotificationOutputSchema(LemurOutputSchema):
description = fields.String()
issuer = fields.String()
name = fields.String()
owner = fields.Email()
user = fields.Nested(UserNestedOutputSchema)
validity_end = ArrowDateTime(attribute="not_after")
replaced_by = fields.Nested(
CertificateNestedOutputSchema, many=True, attribute="replaced"
)
endpoints = fields.Nested(EndpointNestedOutputSchema, many=True, missing=[])
class CertificateRevokeSchema(LemurInputSchema):
comments = fields.String()
crl_reason = fields.String(validate=validate.OneOf(CRLReason.__members__), missing="unspecified")
certificates_list_request_parser = RequestParser()
certificates_list_request_parser.add_argument("short", type=inputs.boolean, default=False, location="args")
def certificates_list_output_schema_factory():
args = certificates_list_request_parser.parse_args()
if args["short"]:
return certificates_short_output_schema
else:
return certificates_output_schema
certificate_input_schema = CertificateInputSchema()
certificate_output_schema = CertificateOutputSchema()
certificates_output_schema = CertificateOutputSchema(many=True)
certificates_short_output_schema = CertificateShortOutputSchema(many=True)
certificate_upload_input_schema = CertificateUploadInputSchema()
certificate_export_input_schema = CertificateExportInputSchema()
certificate_edit_input_schema = CertificateEditInputSchema()
certificate_notification_output_schema = CertificateNotificationOutputSchema()
certificate_revoke_schema = CertificateRevokeSchema()
|
from collections.abc import Mapping
from itertools import chain
import logging
import unittest
import codecs
import os
import os.path
import scipy
import gensim
from gensim.corpora import Dictionary
from gensim.utils import to_utf8
from gensim.test.utils import get_tmpfile, common_texts
class TestDictionary(unittest.TestCase):
def setUp(self):
self.texts = common_texts
def testDocFreqOneDoc(self):
texts = [['human', 'interface', 'computer']]
d = Dictionary(texts)
expected = {0: 1, 1: 1, 2: 1}
self.assertEqual(d.dfs, expected)
def testDocFreqAndToken2IdForSeveralDocsWithOneWord(self):
# two docs
texts = [['human'], ['human']]
d = Dictionary(texts)
expected = {0: 2}
self.assertEqual(d.dfs, expected)
# only one token (human) should exist
expected = {'human': 0}
self.assertEqual(d.token2id, expected)
# three docs
texts = [['human'], ['human'], ['human']]
d = Dictionary(texts)
expected = {0: 3}
self.assertEqual(d.dfs, expected)
# only one token (human) should exist
expected = {'human': 0}
self.assertEqual(d.token2id, expected)
# four docs
texts = [['human'], ['human'], ['human'], ['human']]
d = Dictionary(texts)
expected = {0: 4}
self.assertEqual(d.dfs, expected)
# only one token (human) should exist
expected = {'human': 0}
self.assertEqual(d.token2id, expected)
def testDocFreqForOneDocWithSeveralWord(self):
# two words
texts = [['human', 'cat']]
d = Dictionary(texts)
expected = {0: 1, 1: 1}
self.assertEqual(d.dfs, expected)
# three words
texts = [['human', 'cat', 'minors']]
d = Dictionary(texts)
expected = {0: 1, 1: 1, 2: 1}
self.assertEqual(d.dfs, expected)
def testDocFreqAndCollectionFreq(self):
# one doc
texts = [['human', 'human', 'human']]
d = Dictionary(texts)
self.assertEqual(d.cfs, {0: 3})
self.assertEqual(d.dfs, {0: 1})
# two docs
texts = [['human', 'human'], ['human']]
d = Dictionary(texts)
self.assertEqual(d.cfs, {0: 3})
self.assertEqual(d.dfs, {0: 2})
# three docs
texts = [['human'], ['human'], ['human']]
d = Dictionary(texts)
self.assertEqual(d.cfs, {0: 3})
self.assertEqual(d.dfs, {0: 3})
def testBuild(self):
d = Dictionary(self.texts)
# Since we don't specify the order in which dictionaries are built,
# we cannot reliably test for the mapping; only the keys and values.
expected_keys = list(range(12))
expected_values = [2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
self.assertEqual(sorted(d.dfs.keys()), expected_keys)
self.assertEqual(sorted(d.dfs.values()), expected_values)
expected_keys = sorted([
'computer', 'eps', 'graph', 'human', 'interface',
'minors', 'response', 'survey', 'system', 'time', 'trees', 'user'
])
expected_values = list(range(12))
self.assertEqual(sorted(d.token2id.keys()), expected_keys)
self.assertEqual(sorted(d.token2id.values()), expected_values)
def testMerge(self):
d = Dictionary(self.texts)
f = Dictionary(self.texts[:3])
g = Dictionary(self.texts[3:])
f.merge_with(g)
self.assertEqual(sorted(d.token2id.keys()), sorted(f.token2id.keys()))
def testFilter(self):
d = Dictionary(self.texts)
d.filter_extremes(no_below=2, no_above=1.0, keep_n=4)
dfs_expected = {0: 3, 1: 3, 2: 3, 3: 3}
cfs_expected = {0: 4, 1: 3, 2: 3, 3: 3}
self.assertEqual(d.dfs, dfs_expected)
self.assertEqual(d.cfs, cfs_expected)
def testFilterKeepTokens_keepTokens(self):
# provide keep_tokens argument, keep the tokens given
d = Dictionary(self.texts)
d.filter_extremes(no_below=3, no_above=1.0, keep_tokens=['human', 'survey'])
expected = {'graph', 'trees', 'human', 'system', 'user', 'survey'}
self.assertEqual(set(d.token2id.keys()), expected)
def testFilterKeepTokens_unchangedFunctionality(self):
# do not provide keep_tokens argument, filter_extremes functionality is unchanged
d = Dictionary(self.texts)
d.filter_extremes(no_below=3, no_above=1.0)
expected = {'graph', 'trees', 'system', 'user'}
self.assertEqual(set(d.token2id.keys()), expected)
def testFilterKeepTokens_unseenToken(self):
# do provide keep_tokens argument with unseen tokens, filter_extremes functionality is unchanged
d = Dictionary(self.texts)
d.filter_extremes(no_below=3, no_above=1.0, keep_tokens=['unknown_token'])
expected = {'graph', 'trees', 'system', 'user'}
self.assertEqual(set(d.token2id.keys()), expected)
def testFilterKeepTokens_keepn(self):
# keep_tokens should also work if the keep_n parameter is used, but only
# to keep a maximum of n (so if keep_n < len(keep_n) the tokens to keep are
# still getting removed to reduce the size to keep_n!)
d = Dictionary(self.texts)
# Note: there are four tokens with freq 3, all the others have frequence 2
# in self.texts. In order to make the test result deterministic, we add
# 2 tokens of frequency one
d.add_documents([['worda'], ['wordb']])
# this should keep the 3 tokens with freq 3 and the one we want to keep
d.filter_extremes(keep_n=5, no_below=0, no_above=1.0, keep_tokens=['worda'])
expected = {'graph', 'trees', 'system', 'user', 'worda'}
self.assertEqual(set(d.token2id.keys()), expected)
def testFilterMostFrequent(self):
d = Dictionary(self.texts)
d.filter_n_most_frequent(4)
expected = {0: 2, 1: 2, 2: 2, 3: 2, 4: 2, 5: 2, 6: 2, 7: 2}
self.assertEqual(d.dfs, expected)
def testFilterTokens(self):
self.maxDiff = 10000
d = Dictionary(self.texts)
removed_word = d[0]
d.filter_tokens([0])
expected = {
'computer': 0, 'eps': 8, 'graph': 10, 'human': 1,
'interface': 2, 'minors': 11, 'response': 3, 'survey': 4,
'system': 5, 'time': 6, 'trees': 9, 'user': 7
}
del expected[removed_word]
self.assertEqual(sorted(d.token2id.keys()), sorted(expected.keys()))
expected[removed_word] = len(expected)
d.add_documents([[removed_word]])
self.assertEqual(sorted(d.token2id.keys()), sorted(expected.keys()))
def test_doc2bow(self):
d = Dictionary([["žluťoučký"], ["žluťoučký"]])
# pass a utf8 string
self.assertEqual(d.doc2bow(["žluťoučký"]), [(0, 1)])
# doc2bow must raise a TypeError if passed a string instead of array of strings by accident
self.assertRaises(TypeError, d.doc2bow, "žluťoučký")
# unicode must be converted to utf8
self.assertEqual(d.doc2bow([u'\u017elu\u0165ou\u010dk\xfd']), [(0, 1)])
def test_saveAsText(self):
"""`Dictionary` can be saved as textfile. """
tmpf = get_tmpfile('save_dict_test.txt')
small_text = [
["prvé", "slovo"],
["slovo", "druhé"],
["druhé", "slovo"]
]
d = Dictionary(small_text)
d.save_as_text(tmpf)
with codecs.open(tmpf, 'r', encoding='utf-8') as file:
serialized_lines = file.readlines()
self.assertEqual(serialized_lines[0], u"3\n")
self.assertEqual(len(serialized_lines), 4)
# We do not know, which word will have which index
self.assertEqual(serialized_lines[1][1:], u"\tdruhé\t2\n")
self.assertEqual(serialized_lines[2][1:], u"\tprvé\t1\n")
self.assertEqual(serialized_lines[3][1:], u"\tslovo\t3\n")
d.save_as_text(tmpf, sort_by_word=False)
with codecs.open(tmpf, 'r', encoding='utf-8') as file:
serialized_lines = file.readlines()
self.assertEqual(serialized_lines[0], u"3\n")
self.assertEqual(len(serialized_lines), 4)
self.assertEqual(serialized_lines[1][1:], u"\tslovo\t3\n")
self.assertEqual(serialized_lines[2][1:], u"\tdruhé\t2\n")
self.assertEqual(serialized_lines[3][1:], u"\tprvé\t1\n")
def test_loadFromText_legacy(self):
"""
`Dictionary` can be loaded from textfile in legacy format.
Legacy format does not have num_docs on the first line.
"""
tmpf = get_tmpfile('load_dict_test_legacy.txt')
no_num_docs_serialization = to_utf8("1\tprvé\t1\n2\tslovo\t2\n")
with open(tmpf, "wb") as file:
file.write(no_num_docs_serialization)
d = Dictionary.load_from_text(tmpf)
self.assertEqual(d.token2id[u"prvé"], 1)
self.assertEqual(d.token2id[u"slovo"], 2)
self.assertEqual(d.dfs[1], 1)
self.assertEqual(d.dfs[2], 2)
self.assertEqual(d.num_docs, 0)
def test_loadFromText(self):
"""`Dictionary` can be loaded from textfile."""
tmpf = get_tmpfile('load_dict_test.txt')
no_num_docs_serialization = to_utf8("2\n1\tprvé\t1\n2\tslovo\t2\n")
with open(tmpf, "wb") as file:
file.write(no_num_docs_serialization)
d = Dictionary.load_from_text(tmpf)
self.assertEqual(d.token2id[u"prvé"], 1)
self.assertEqual(d.token2id[u"slovo"], 2)
self.assertEqual(d.dfs[1], 1)
self.assertEqual(d.dfs[2], 2)
self.assertEqual(d.num_docs, 2)
def test_saveAsText_and_loadFromText(self):
"""`Dictionary` can be saved as textfile and loaded again from textfile. """
tmpf = get_tmpfile('dict_test.txt')
for sort_by_word in [True, False]:
d = Dictionary(self.texts)
d.save_as_text(tmpf, sort_by_word=sort_by_word)
self.assertTrue(os.path.exists(tmpf))
d_loaded = Dictionary.load_from_text(tmpf)
self.assertNotEqual(d_loaded, None)
self.assertEqual(d_loaded.token2id, d.token2id)
def test_from_corpus(self):
"""build `Dictionary` from an existing corpus"""
documents = [
"Human machine interface for lab abc computer applications",
"A survey of user opinion of computer system response time",
"The EPS user interface management system",
"System and human system engineering testing of EPS",
"Relation of user perceived response time to error measurement",
"The generation of random binary unordered trees",
"The intersection graph of paths in trees",
"Graph minors IV Widths of trees and well quasi ordering",
"Graph minors A survey"
]
stoplist = set('for a of the and to in'.split())
texts = [
[word for word in document.lower().split() if word not in stoplist]
for document in documents]
# remove words that appear only once
all_tokens = list(chain.from_iterable(texts))
tokens_once = set(word for word in set(all_tokens) if all_tokens.count(word) == 1)
texts = [[word for word in text if word not in tokens_once] for text in texts]
dictionary = Dictionary(texts)
corpus = [dictionary.doc2bow(text) for text in texts]
# Create dictionary from corpus without a token map
dictionary_from_corpus = Dictionary.from_corpus(corpus)
dict_token2id_vals = sorted(dictionary.token2id.values())
dict_from_corpus_vals = sorted(dictionary_from_corpus.token2id.values())
self.assertEqual(dict_token2id_vals, dict_from_corpus_vals)
self.assertEqual(dictionary.dfs, dictionary_from_corpus.dfs)
self.assertEqual(dictionary.num_docs, dictionary_from_corpus.num_docs)
self.assertEqual(dictionary.num_pos, dictionary_from_corpus.num_pos)
self.assertEqual(dictionary.num_nnz, dictionary_from_corpus.num_nnz)
# Create dictionary from corpus with an id=>token map
dictionary_from_corpus_2 = Dictionary.from_corpus(corpus, id2word=dictionary)
self.assertEqual(dictionary.token2id, dictionary_from_corpus_2.token2id)
self.assertEqual(dictionary.dfs, dictionary_from_corpus_2.dfs)
self.assertEqual(dictionary.num_docs, dictionary_from_corpus_2.num_docs)
self.assertEqual(dictionary.num_pos, dictionary_from_corpus_2.num_pos)
self.assertEqual(dictionary.num_nnz, dictionary_from_corpus_2.num_nnz)
# Ensure Sparse2Corpus is compatible with from_corpus
bow = gensim.matutils.Sparse2Corpus(scipy.sparse.rand(10, 100))
dictionary = Dictionary.from_corpus(bow)
self.assertEqual(dictionary.num_docs, 100)
def test_dict_interface(self):
"""Test Python 2 dict-like interface in both Python 2 and 3."""
d = Dictionary(self.texts)
self.assertTrue(isinstance(d, Mapping))
self.assertEqual(list(zip(d.keys(), d.values())), list(d.items()))
# Even in Py3, we want the iter* members.
self.assertEqual(list(d.items()), list(d.iteritems()))
self.assertEqual(list(d.keys()), list(d.iterkeys()))
self.assertEqual(list(d.values()), list(d.itervalues()))
def test_patch_with_special_tokens(self):
special_tokens = {'pad': 0, 'space': 1, 'quake': 3}
corpus = [["máma", "mele", "maso"], ["ema", "má", "máma"]]
d = Dictionary(corpus)
self.assertEqual(len(d.token2id), 5)
d.patch_with_special_tokens(special_tokens)
self.assertEqual(d.token2id['pad'], 0)
self.assertEqual(d.token2id['space'], 1)
self.assertEqual(d.token2id['quake'], 3)
self.assertEqual(len(d.token2id), 8)
self.assertNotIn((0, 1), d.doc2bow(corpus[0]))
self.assertIn((0, 1), d.doc2bow(['pad'] + corpus[0]))
corpus_with_special_tokens = [["máma", "mele", "maso"], ["ema", "má", "máma", "space"]]
d = Dictionary(corpus_with_special_tokens)
self.assertEqual(len(d.token2id), 6)
self.assertNotEqual(d.token2id['space'], 1)
d.patch_with_special_tokens(special_tokens)
self.assertEqual(len(d.token2id), 8)
self.assertEqual(max(d.token2id.values()), 7)
self.assertEqual(d.token2id['space'], 1)
self.assertNotIn((1, 1), d.doc2bow(corpus_with_special_tokens[0]))
self.assertIn((1, 1), d.doc2bow(corpus_with_special_tokens[1]))
# endclass TestDictionary
if __name__ == '__main__':
logging.basicConfig(level=logging.WARNING)
unittest.main()
|
from unittest.mock import Mock
import pandas as pd
import pytest
import pytz
from qstrader.alpha_model.single_signal import SingleSignalAlphaModel
@pytest.mark.parametrize(
'signal,expected_signals',
[
(0.75, {'EQ:SPY': 0.75, 'EQ:AGG': 0.75, 'EQ:GLD': 0.75}),
(-0.25, {'EQ:SPY': -0.25, 'EQ:AGG': -0.25, 'EQ:GLD': -0.25})
]
)
def test_single_signal_alpha_model(signal, expected_signals):
"""
Checks that the single signal alpha model correctly produces
the same signal for each asset in the universe.
"""
universe = Mock()
universe.get_assets.return_value = ['EQ:SPY', 'EQ:AGG', 'EQ:GLD']
alpha = SingleSignalAlphaModel(universe=universe, signal=signal)
dt = pd.Timestamp('2019-01-01 15:00:00', tz=pytz.utc)
assert alpha(dt) == expected_signals
|
from regenmaschine.errors import RainMachineError
from homeassistant import data_entry_flow
from homeassistant.components.rainmachine import CONF_ZONE_RUN_TIME, DOMAIN, config_flow
from homeassistant.config_entries import SOURCE_USER
from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD, CONF_PORT, CONF_SSL
from tests.async_mock import patch
from tests.common import MockConfigEntry
async def test_duplicate_error(hass):
"""Test that errors are shown when duplicates are added."""
conf = {
CONF_IP_ADDRESS: "192.168.1.100",
CONF_PASSWORD: "password",
CONF_PORT: 8080,
CONF_SSL: True,
}
MockConfigEntry(domain=DOMAIN, unique_id="192.168.1.100", data=conf).add_to_hass(
hass
)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data=conf
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_invalid_password(hass):
"""Test that an invalid password throws an error."""
conf = {
CONF_IP_ADDRESS: "192.168.1.100",
CONF_PASSWORD: "bad_password",
CONF_PORT: 8080,
CONF_SSL: True,
}
flow = config_flow.RainMachineFlowHandler()
flow.hass = hass
flow.context = {"source": SOURCE_USER}
with patch(
"regenmaschine.client.Client.load_local",
side_effect=RainMachineError,
):
result = await flow.async_step_user(user_input=conf)
assert result["errors"] == {CONF_PASSWORD: "invalid_auth"}
async def test_options_flow(hass):
"""Test config flow options."""
conf = {
CONF_IP_ADDRESS: "192.168.1.100",
CONF_PASSWORD: "password",
CONF_PORT: 8080,
CONF_SSL: True,
}
config_entry = MockConfigEntry(
domain=DOMAIN,
unique_id="abcde12345",
data=conf,
options={CONF_ZONE_RUN_TIME: 900},
)
config_entry.add_to_hass(hass)
with patch(
"homeassistant.components.rainmachine.async_setup_entry", return_value=True
):
await hass.config_entries.async_setup(config_entry.entry_id)
result = await hass.config_entries.options.async_init(config_entry.entry_id)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "init"
result = await hass.config_entries.options.async_configure(
result["flow_id"], user_input={CONF_ZONE_RUN_TIME: 600}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert config_entry.options == {CONF_ZONE_RUN_TIME: 600}
async def test_show_form(hass):
"""Test that the form is served with no input."""
flow = config_flow.RainMachineFlowHandler()
flow.hass = hass
flow.context = {"source": SOURCE_USER}
result = await flow.async_step_user(user_input=None)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
async def test_step_user(hass):
"""Test that the user step works."""
conf = {
CONF_IP_ADDRESS: "192.168.1.100",
CONF_PASSWORD: "password",
CONF_PORT: 8080,
CONF_SSL: True,
}
flow = config_flow.RainMachineFlowHandler()
flow.hass = hass
flow.context = {"source": SOURCE_USER}
with patch(
"regenmaschine.client.Client.load_local",
return_value=True,
):
result = await flow.async_step_user(user_input=conf)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "192.168.1.100"
assert result["data"] == {
CONF_IP_ADDRESS: "192.168.1.100",
CONF_PASSWORD: "password",
CONF_PORT: 8080,
CONF_SSL: True,
CONF_ZONE_RUN_TIME: 600,
}
|
import os.path as op
import numpy as np
from shutil import copyfile
from datetime import datetime, timezone
import pytest
from numpy.testing import assert_allclose, assert_array_equal
from mne.annotations import events_from_annotations
from mne.bem import _fit_sphere
from mne.datasets import testing
from mne.event import find_events
from mne.io import _loc_to_coil_trans
from mne.io.constants import FIFF
from mne.io.edf import read_raw_bdf
from mne.io.bti import read_raw_bti
from mne.io.curry import read_raw_curry
from mne.utils import check_version, run_tests_if_main, catch_logging
from mne.annotations import read_annotations
from mne.io.curry.curry import (_get_curry_version, _get_curry_file_structure,
_read_events_curry, FILE_EXTENSIONS)
data_dir = testing.data_path(download=False)
curry_dir = op.join(data_dir, "curry")
bdf_file = op.join(data_dir, 'BDF', 'test_bdf_stim_channel.bdf')
bti_rfDC_file = op.join(data_dir, 'BTi', 'erm_HFH', 'c,rfDC')
curry7_rfDC_file = op.join(curry_dir, "c,rfDC Curry 7.dat")
curry8_rfDC_file = op.join(curry_dir, "c,rfDC Curry 8.cdt")
curry7_bdf_file = op.join(curry_dir, "test_bdf_stim_channel Curry 7.dat")
curry7_bdf_ascii_file = op.join(curry_dir,
"test_bdf_stim_channel Curry 7 ASCII.dat")
curry8_bdf_file = op.join(curry_dir, "test_bdf_stim_channel Curry 8.cdt")
curry8_bdf_ascii_file = op.join(curry_dir,
"test_bdf_stim_channel Curry 8 ASCII.cdt")
missing_event_file = op.join(curry_dir, "test_sfreq_0.dat")
Ref_chan_omitted_file = op.join(curry_dir, 'Ref_channel_omitted Curry7.dat')
Ref_chan_omitted_reordered_file = op.join(curry_dir, 'Ref_channel_omitted '
'reordered Curry7.dat')
@pytest.fixture(scope='session')
def bdf_curry_ref():
"""Return a view of the reference bdf used to create the curry files."""
raw = read_raw_bdf(bdf_file, preload=True).drop_channels(['Status'])
return raw
@testing.requires_testing_data
@pytest.mark.parametrize('fname,tol', [
pytest.param(curry7_bdf_file, 1e-7, id='curry 7'),
pytest.param(curry8_bdf_file, 1e-7, id='curry 8'),
pytest.param(curry7_bdf_ascii_file, 1e-4, id='curry 7 ascii'),
pytest.param(curry8_bdf_ascii_file, 1e-4, id='curry 8 ascii'),
])
@pytest.mark.parametrize('preload', [True, False])
def test_read_raw_curry(fname, tol, preload, bdf_curry_ref):
"""Test reading CURRY files."""
with pytest.warns(None) as wrn:
raw = read_raw_curry(fname, preload=preload)
if not check_version('numpy', '1.16') and preload and fname.endswith(
'ASCII.dat'):
assert len(wrn) > 0
else:
assert len(wrn) == 0
assert hasattr(raw, '_data') == preload
assert raw.n_times == bdf_curry_ref.n_times
assert raw.info['sfreq'] == bdf_curry_ref.info['sfreq']
for field in ['kind', 'ch_name']:
assert_array_equal([ch[field] for ch in raw.info['chs']],
[ch[field] for ch in bdf_curry_ref.info['chs']])
raw.verbose = 'error' # don't emit warnings about slow reading
assert_allclose(raw.get_data(), bdf_curry_ref.get_data(), atol=tol)
picks, start, stop = ["C3", "C4"], 200, 800
assert_allclose(
raw.get_data(picks=picks, start=start, stop=stop),
bdf_curry_ref.get_data(picks=picks, start=start, stop=stop),
rtol=tol)
assert raw.info['dev_head_t'] is None
# These values taken from a different recording but allow us to test
# using our existing filres
HPI_CONTENT = """\
FileVersion: 804
NumCoils: 10
0 1 -50.67 50.98 133.15 0.006406 1 46.45 51.51 143.15 0.006789 1 39.38 -26.67 155.51 0.008034 1 -36.72 -39.95 142.83 0.007700 1 1.61 16.95 172.76 0.001788 0 0.00 0.00 0.00 0.000000 0 0.00 0.00 0.00 0.000000 0 0.00 0.00 0.00 0.000000 0 0.00 0.00 0.00 0.000000 0 0.00 0.00 0.00 0.000000
""" # noqa: E501
LM_CONTENT = """
LANDMARKS_MAG1 START
ListDescription = functional landmark positions
ListUnits = mm
ListNrColumns = 3
ListNrRows = 8
ListNrTimepts = 1
ListNrBlocks = 1
ListBinary = 0
ListType = 1
ListTrafoType = 1
ListGridType = 2
ListFirstColumn = 1
ListIndexMin = -1
ListIndexMax = -1
ListIndexAbsMax = -1
LANDMARKS_MAG1 END
LANDMARKS_MAG1 START_LIST # Do not edit!
75.4535 5.32907e-15 2.91434e-16
1.42109e-14 -75.3212 9.71445e-16
-74.4568 -1.42109e-14 2.51188e-15
-59.7558 35.5804 66.822
43.15 43.4107 78.0027
38.8415 -41.1884 81.9941
-36.683 -59.5119 66.4338
-1.07259 -1.88025 103.747
LANDMARKS_MAG1 END_LIST
LM_INDICES_MAG1 START
ListDescription = functional landmark PAN info
ListUnits =
ListNrColumns = 1
ListNrRows = 3
ListNrTimepts = 1
ListNrBlocks = 1
ListBinary = 0
ListType = 0
ListTrafoType = 0
ListGridType = 2
ListFirstColumn = 1
ListIndexMin = -1
ListIndexMax = -1
ListIndexAbsMax = -1
LM_INDICES_MAG1 END
LM_INDICES_MAG1 START_LIST # Do not edit!
2
1
3
LM_INDICES_MAG1 END_LIST
LM_REMARKS_MAG1 START
ListDescription = functional landmark labels
ListUnits =
ListNrColumns = 40
ListNrRows = 8
ListNrTimepts = 1
ListNrBlocks = 1
ListBinary = 0
ListType = 5
ListTrafoType = 0
ListGridType = 2
ListFirstColumn = 1
ListIndexMin = -1
ListIndexMax = -1
ListIndexAbsMax = -1
LM_REMARKS_MAG1 END
LM_REMARKS_MAG1 START_LIST # Do not edit!
Left ear
Nasion
Right ear
HPI1
HPI2
HPI3
HPI4
HPI5
LM_REMARKS_MAG1 END_LIST
"""
WANT_TRANS = np.array(
[[0.99729224, -0.07353067, -0.00119791, 0.00126953],
[0.07319243, 0.99085848, 0.11332405, 0.02670814],
[-0.00714583, -0.11310488, 0.99355736, 0.04721836],
[0., 0., 0., 1.]])
@testing.requires_testing_data
@pytest.mark.parametrize('fname,tol', [
pytest.param(curry7_rfDC_file, 1e-6, id='curry 7'),
pytest.param(curry8_rfDC_file, 1e-3, id='curry 8'),
])
@pytest.mark.parametrize('mock_dev_head_t', [True, False])
def test_read_raw_curry_rfDC(fname, tol, mock_dev_head_t, tmpdir):
"""Test reading CURRY files."""
if mock_dev_head_t:
if 'Curry 7' in fname: # not supported yet
return
# copy files to tmpdir
base = op.splitext(fname)[0]
for ext in ('.cdt', '.cdt.dpa'):
src = base + ext
dst = op.join(tmpdir, op.basename(base) + ext)
copyfile(src, dst)
if ext == '.cdt.dpa':
with open(dst, 'a') as fid:
fid.write(LM_CONTENT)
fname = op.join(tmpdir, op.basename(fname))
with open(fname + '.hpi', 'w') as fid:
fid.write(HPI_CONTENT)
# check data
bti_rfDC = read_raw_bti(pdf_fname=bti_rfDC_file, head_shape_fname=None)
with catch_logging() as log:
raw = read_raw_curry(fname, verbose=True)
log = log.getvalue()
if mock_dev_head_t:
assert 'Composing device' in log
else:
assert 'Leaving device' in log
assert 'no landmark' in log
# test on the eeg chans, since these were not renamed by curry
eeg_names = [ch["ch_name"] for ch in raw.info["chs"]
if ch["kind"] == FIFF.FIFFV_EEG_CH]
assert_allclose(raw.get_data(eeg_names),
bti_rfDC.get_data(eeg_names), rtol=tol)
assert bti_rfDC.info['dev_head_t'] is not None # XXX probably a BTI bug
if mock_dev_head_t:
assert raw.info['dev_head_t'] is not None
assert_allclose(raw.info['dev_head_t']['trans'], WANT_TRANS, atol=1e-5)
else:
assert raw.info['dev_head_t'] is None
# check that most MEG sensors are approximately oriented outward from
# the device origin
n_meg = n_eeg = n_other = 0
pos = list()
nn = list()
for ch in raw.info['chs']:
if ch['kind'] == FIFF.FIFFV_MEG_CH:
assert ch['coil_type'] == FIFF.FIFFV_COIL_CTF_GRAD
t = _loc_to_coil_trans(ch['loc'])
pos.append(t[:3, 3])
nn.append(t[:3, 2])
assert_allclose(np.linalg.norm(nn[-1]), 1.)
n_meg += 1
elif ch['kind'] == FIFF.FIFFV_EEG_CH:
assert ch['coil_type'] == FIFF.FIFFV_COIL_EEG
n_eeg += 1
else:
assert ch['coil_type'] == FIFF.FIFFV_COIL_NONE
n_other += 1
assert n_meg == 148
assert n_eeg == 31
assert n_other == 15
pos = np.array(pos)
nn = np.array(nn)
rad, origin = _fit_sphere(pos, disp=False)
assert 0.11 < rad < 0.13
pos -= origin
pos /= np.linalg.norm(pos, axis=1, keepdims=True)
angles = np.abs(np.rad2deg(np.arccos((pos * nn).sum(-1))))
assert (angles < 20).sum() > 100
@testing.requires_testing_data
@pytest.mark.parametrize('fname', [
pytest.param(curry7_bdf_file, id='curry 7'),
pytest.param(curry8_bdf_file, id='curry 8'),
])
def test_read_events_curry_are_same_as_bdf(fname):
"""Test events from curry annotations recovers the right events."""
EVENT_ID = {str(ii): ii for ii in range(5)}
REF_EVENTS = find_events(read_raw_bdf(bdf_file, preload=True))
raw = read_raw_curry(fname)
events, _ = events_from_annotations(raw, event_id=EVENT_ID)
assert_allclose(events, REF_EVENTS)
assert raw.info['dev_head_t'] is None
@testing.requires_testing_data
def test_check_missing_files():
"""Test checking for missing curry files (smoke test)."""
invalid_fname = "/invalid/path/name.xy"
with pytest.raises(IOError, match="file type .*? must end with"):
_read_events_curry(invalid_fname)
with pytest.raises(FileNotFoundError, match='does not exist'):
_get_curry_file_structure(invalid_fname)
with pytest.raises(FileNotFoundError, match="files cannot be found"):
_get_curry_file_structure(missing_event_file,
required=["info", "events"])
def _mock_info_file(src, dst, sfreq, time_step):
with open(src, 'r') as in_file, open(dst, 'w') as out_file:
for line in in_file:
if 'SampleFreqHz' in line:
out_file.write(line.replace('500', str(sfreq)))
elif 'SampleTimeUsec' in line:
out_file.write(line.replace('2000', str(time_step)))
else:
out_file.write(line)
@pytest.fixture(params=[
pytest.param(dict(sfreq=500, time_step=0), id='correct sfreq'),
pytest.param(dict(sfreq=0, time_step=2000), id='correct time_step'),
pytest.param(dict(sfreq=500, time_step=2000), id='both correct'),
pytest.param(dict(sfreq=0, time_step=0), id='both 0',
marks=pytest.mark.xfail(raises=ValueError)),
pytest.param(dict(sfreq=500, time_step=42), id='mismatch',
marks=pytest.mark.xfail(raises=ValueError)),
])
def sfreq_testing_data(tmpdir, request):
"""Generate different sfreq, time_step scenarios to be tested."""
sfreq, time_step = request.param['sfreq'], request.param['time_step']
in_base_name = curry7_bdf_file.strip('dat')
out_base_name = str(tmpdir.join('curry.'))
# create dummy empty files for 'dat' and 'rs3'
for fname in [out_base_name + ext for ext in ['dat', 'rs3']]:
open(fname, 'a').close()
_mock_info_file(src=in_base_name + 'dap', dst=out_base_name + 'dap',
sfreq=sfreq, time_step=time_step)
_mock_info_file(src=in_base_name + 'rs3', dst=out_base_name + 'rs3',
sfreq=sfreq, time_step=time_step)
return out_base_name + 'dat'
@testing.requires_testing_data
def test_sfreq(sfreq_testing_data):
"""Test sfreq and time_step."""
raw = read_raw_curry(sfreq_testing_data, preload=False)
assert raw.info['sfreq'] == 500
@testing.requires_testing_data
@pytest.mark.parametrize('fname', [
pytest.param(curry_dir + '/test_bdf_stim_channel Curry 7.cef', id='7'),
pytest.param(curry_dir + '/test_bdf_stim_channel Curry 8.cdt.cef', id='8'),
pytest.param(curry_dir + '/test_bdf_stim_channel Curry 7 ASCII.cef',
id='7 ascii'),
pytest.param(curry_dir + '/test_bdf_stim_channel Curry 8 ASCII.cdt.cef',
id='8 ascii'),
])
def test_read_curry_annotations(fname):
"""Test reading for Curry events file."""
EXPECTED_ONSET = [0.484, 0.486, 0.62, 0.622, 1.904, 1.906, 3.212, 3.214,
4.498, 4.5, 5.8, 5.802, 7.074, 7.076, 8.324, 8.326, 9.58,
9.582]
EXPECTED_DURATION = np.zeros_like(EXPECTED_ONSET)
EXPECTED_DESCRIPTION = ['4', '50000', '2', '50000', '1', '50000', '1',
'50000', '1', '50000', '1', '50000', '1', '50000',
'1', '50000', '1', '50000']
annot = read_annotations(fname, sfreq='auto')
assert annot.orig_time is None
assert_array_equal(annot.onset, EXPECTED_ONSET)
assert_array_equal(annot.duration, EXPECTED_DURATION)
assert_array_equal(annot.description, EXPECTED_DESCRIPTION)
def _get_read_annotations_mock_info(name_part, mock_dir):
original, modified = dict(), dict()
original['event'] = curry_dir + '/test_bdf_stim_channel ' + name_part
original['base'], ext = original['event'].split(".", maxsplit=1)
version = _get_curry_version(ext)
original['info'] = original['base'] + FILE_EXTENSIONS[version]["info"]
modified['base'] = str(mock_dir.join('curry'))
modified['event'] = modified['base'] + FILE_EXTENSIONS[version]["events"]
modified['info'] = modified['base'] + FILE_EXTENSIONS[version]["info"]
return original, modified
@testing.requires_testing_data
@pytest.mark.parametrize('name_part', [
pytest.param('7.cef', id='7'),
pytest.param('8.cdt.cef', id='8'),
pytest.param('7 ASCII.cef', id='7 (ascii)'),
pytest.param('8 ASCII.cdt.cef', id='8 (ascii)'),
])
def test_read_curry_annotations_using_mocked_info(tmpdir, name_part):
"""Test reading for Curry events file."""
EXPECTED_ONSET = [0.484, 0.486, 0.62, 0.622, 1.904, 1.906, 3.212, 3.214,
4.498, 4.5, 5.8, 5.802, 7.074, 7.076, 8.324, 8.326, 9.58,
9.582]
EXPECTED_DURATION = np.zeros_like(EXPECTED_ONSET)
EXPECTED_DESCRIPTION = ['4', '50000', '2', '50000', '1', '50000', '1',
'50000', '1', '50000', '1', '50000', '1', '50000',
'1', '50000', '1', '50000']
original, fname = _get_read_annotations_mock_info("Curry " + name_part,
tmpdir)
copyfile(src=original['event'], dst=fname['event'])
_msg = 'required files cannot be found'
with pytest.raises(FileNotFoundError, match=_msg):
read_annotations(fname['event'], sfreq='auto')
_mock_info_file(src=original['info'], dst=fname['info'],
sfreq=0, time_step=2000)
annot = read_annotations(fname['event'], sfreq='auto')
assert annot.orig_time is None
assert_array_equal(annot.onset, EXPECTED_ONSET)
assert_array_equal(annot.duration, EXPECTED_DURATION)
assert_array_equal(annot.description, EXPECTED_DESCRIPTION)
@testing.requires_testing_data
@pytest.mark.parametrize('fname,expected_channel_list', [
pytest.param(Ref_chan_omitted_file,
['FP1', 'FPZ', 'FP2', 'VEO', 'EKG', 'Trigger'],
id='Ref omitted, normal order'),
pytest.param(Ref_chan_omitted_reordered_file,
['FP2', 'FPZ', 'FP1', 'VEO', 'EKG', 'Trigger'],
id='Ref omitted, reordered')
])
def test_read_files_missing_channel(fname, expected_channel_list):
"""Test reading data files that has an omitted channel."""
# This for Git issue #8391. In some cases, the 'labels' (.rs3 file will
# list channels that are not actually saved in the datafile (such as the
# 'Ref' channel). These channels are denoted in the 'info' (.dap) file
# in the CHAN_IN_FILE section with a '0' as their index.
# If the CHAN_IN_FILE section is present, the code also assures that the
# channels are sorted in the prescribed order.
# This test makes sure the data load correctly, and that we end up with
# the proper channel list.
raw = read_raw_curry(fname, preload=True)
assert raw.ch_names == expected_channel_list
@testing.requires_testing_data
@pytest.mark.parametrize('fname,expected_meas_date', [
pytest.param(Ref_chan_omitted_file,
datetime(2018, 11, 21, 12, 53, 48,
525000, tzinfo=timezone.utc),
id='valid start date'),
pytest.param(curry7_rfDC_file,
None,
id='start date year is 0'),
pytest.param(curry7_bdf_file,
None,
id='start date seconds invalid')
])
def test_meas_date(fname, expected_meas_date):
"""Test reading acquisition start datetime info info['meas_date']."""
# This for Git issue #8398. The 'info' (.dap) file includes acquisition
# start date & time. Test that this goes into raw.info['meas_date'].
# If the information is not valid, raw.info['meas_date'] should be None
raw = read_raw_curry(fname, preload=False)
assert raw.info['meas_date'] == expected_meas_date
run_tests_if_main()
|
from functools import wraps
import os
from platform import system as _curos
import re
import subprocess
CUR_OS = _curos()
IS_WIN = CUR_OS in ['Windows', 'cli']
IS_NIX = (not IS_WIN) and any(
CUR_OS.startswith(i) for i in
['CYGWIN', 'MSYS', 'Linux', 'Darwin', 'SunOS',
'FreeBSD', 'NetBSD', 'OpenBSD'])
RE_ANSI = re.compile(r"\x1b\[[;\d]*[A-Za-z]")
# Py2/3 compat. Empty conditional to avoid coverage
if True: # pragma: no cover
try:
_range = xrange
except NameError:
_range = range
try:
_unich = unichr
except NameError:
_unich = chr
try:
_unicode = unicode
except NameError:
_unicode = str
try:
if IS_WIN:
import colorama
else:
raise ImportError
except ImportError:
colorama = None
else:
try:
colorama.init(strip=False)
except TypeError:
colorama.init()
try:
from weakref import WeakSet
except ImportError:
WeakSet = set
try:
_basestring = basestring
except NameError:
_basestring = str
try: # py>=2.7,>=3.1
from collections import OrderedDict as _OrderedDict
except ImportError:
try: # older Python versions with backported ordereddict lib
from ordereddict import OrderedDict as _OrderedDict
except ImportError: # older Python versions without ordereddict lib
# Py2.6,3.0 compat, from PEP 372
from collections import MutableMapping
class _OrderedDict(dict, MutableMapping):
# Methods with direct access to underlying attributes
def __init__(self, *args, **kwds):
if len(args) > 1:
raise TypeError('expected at 1 argument, got %d',
len(args))
if not hasattr(self, '_keys'):
self._keys = []
self.update(*args, **kwds)
def clear(self):
del self._keys[:]
dict.clear(self)
def __setitem__(self, key, value):
if key not in self:
self._keys.append(key)
dict.__setitem__(self, key, value)
def __delitem__(self, key):
dict.__delitem__(self, key)
self._keys.remove(key)
def __iter__(self):
return iter(self._keys)
def __reversed__(self):
return reversed(self._keys)
def popitem(self):
if not self:
raise KeyError
key = self._keys.pop()
value = dict.pop(self, key)
return key, value
def __reduce__(self):
items = [[k, self[k]] for k in self]
inst_dict = vars(self).copy()
inst_dict.pop('_keys', None)
return self.__class__, (items,), inst_dict
# Methods with indirect access via the above methods
setdefault = MutableMapping.setdefault
update = MutableMapping.update
pop = MutableMapping.pop
keys = MutableMapping.keys
values = MutableMapping.values
items = MutableMapping.items
def __repr__(self):
pairs = ', '.join(map('%r: %r'.__mod__, self.items()))
return '%s({%s})' % (self.__class__.__name__, pairs)
def copy(self):
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
d = cls()
for key in iterable:
d[key] = value
return d
class FormatReplace(object):
"""
>>> a = FormatReplace('something')
>>> "{:5d}".format(a)
'something'
"""
def __init__(self, replace=''):
self.replace = replace
self.format_called = 0
def __format__(self, _):
self.format_called += 1
return self.replace
class Comparable(object):
"""Assumes child has self._comparable attr/@property"""
def __lt__(self, other):
return self._comparable < other._comparable
def __le__(self, other):
return (self < other) or (self == other)
def __eq__(self, other):
return self._comparable == other._comparable
def __ne__(self, other):
return not self == other
def __gt__(self, other):
return not self <= other
def __ge__(self, other):
return not self < other
class ObjectWrapper(object):
def __getattr__(self, name):
return getattr(self._wrapped, name)
def __setattr__(self, name, value):
return setattr(self._wrapped, name, value)
def wrapper_getattr(self, name):
"""Actual `self.getattr` rather than self._wrapped.getattr"""
try:
return object.__getattr__(self, name)
except AttributeError: # py2
return getattr(self, name)
def wrapper_setattr(self, name, value):
"""Actual `self.setattr` rather than self._wrapped.setattr"""
return object.__setattr__(self, name, value)
def __init__(self, wrapped):
"""
Thin wrapper around a given object
"""
self.wrapper_setattr('_wrapped', wrapped)
class SimpleTextIOWrapper(ObjectWrapper):
"""
Change only `.write()` of the wrapped object by encoding the passed
value and passing the result to the wrapped object's `.write()` method.
"""
# pylint: disable=too-few-public-methods
def __init__(self, wrapped, encoding):
super(SimpleTextIOWrapper, self).__init__(wrapped)
self.wrapper_setattr('encoding', encoding)
def write(self, s):
"""
Encode `s` and pass to the wrapped object's `.write()` method.
"""
return self._wrapped.write(s.encode(self.wrapper_getattr('encoding')))
class CallbackIOWrapper(ObjectWrapper):
def __init__(self, callback, stream, method="read"):
"""
Wrap a given `file`-like object's `read()` or `write()` to report
lengths to the given `callback`
"""
super(CallbackIOWrapper, self).__init__(stream)
func = getattr(stream, method)
if method == "write":
@wraps(func)
def write(data, *args, **kwargs):
res = func(data, *args, **kwargs)
callback(len(data))
return res
self.wrapper_setattr('write', write)
elif method == "read":
@wraps(func)
def read(*args, **kwargs):
data = func(*args, **kwargs)
callback(len(data))
return data
self.wrapper_setattr('read', read)
else:
raise KeyError("Can only wrap read/write methods")
def _is_utf(encoding):
try:
u'\u2588\u2589'.encode(encoding)
except UnicodeEncodeError: # pragma: no cover
return False
except Exception: # pragma: no cover
try:
return encoding.lower().startswith('utf-') or ('U8' == encoding)
except:
return False
else:
return True
def _supports_unicode(fp):
try:
return _is_utf(fp.encoding)
except AttributeError:
return False
def _is_ascii(s):
if isinstance(s, str):
for c in s:
if ord(c) > 255:
return False
return True
return _supports_unicode(s)
def _environ_cols_wrapper(): # pragma: no cover
"""
Return a function which gets width and height of console
(linux,osx,windows,cygwin).
"""
_environ_cols = None
if IS_WIN:
_environ_cols = _environ_cols_windows
if _environ_cols is None:
_environ_cols = _environ_cols_tput
if IS_NIX:
_environ_cols = _environ_cols_linux
return _environ_cols
def _environ_cols_windows(fp): # pragma: no cover
try:
from ctypes import windll, create_string_buffer
import struct
from sys import stdin, stdout
io_handle = -12 # assume stderr
if fp == stdin:
io_handle = -10
elif fp == stdout:
io_handle = -11
h = windll.kernel32.GetStdHandle(io_handle)
csbi = create_string_buffer(22)
res = windll.kernel32.GetConsoleScreenBufferInfo(h, csbi)
if res:
(_bufx, _bufy, _curx, _cury, _wattr, left, _top, right, _bottom,
_maxx, _maxy) = struct.unpack("hhhhHhhhhhh", csbi.raw)
# nlines = bottom - top + 1
return right - left # +1
except:
pass
return None
def _environ_cols_tput(*_): # pragma: no cover
"""cygwin xterm (windows)"""
try:
import shlex
cols = int(subprocess.check_call(shlex.split('tput cols')))
# rows = int(subprocess.check_call(shlex.split('tput lines')))
return cols
except:
pass
return None
def _environ_cols_linux(fp): # pragma: no cover
try:
from termios import TIOCGWINSZ
from fcntl import ioctl
from array import array
except ImportError:
return None
else:
try:
return array('h', ioctl(fp, TIOCGWINSZ, '\0' * 8))[1]
except:
try:
return int(os.environ["COLUMNS"]) - 1
except KeyError:
return None
def _term_move_up(): # pragma: no cover
return '' if (os.name == 'nt') and (colorama is None) else '\x1b[A'
try:
# TODO consider using wcswidth third-party package for 0-width characters
from unicodedata import east_asian_width
except ImportError:
_text_width = len
else:
def _text_width(s):
return sum(
2 if east_asian_width(ch) in 'FW' else 1 for ch in _unicode(s))
|
import logging
from typing import List
from homeassistant.components.water_heater import (
SUPPORT_AWAY_MODE,
SUPPORT_OPERATION_MODE,
WaterHeaterEntity,
)
from homeassistant.const import PRECISION_TENTHS, PRECISION_WHOLE, STATE_OFF, STATE_ON
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
import homeassistant.util.dt as dt_util
from . import EvoChild
from .const import DOMAIN, EVO_FOLLOW, EVO_PERMOVER
_LOGGER = logging.getLogger(__name__)
STATE_AUTO = "auto"
HA_STATE_TO_EVO = {STATE_AUTO: "", STATE_ON: "On", STATE_OFF: "Off"}
EVO_STATE_TO_HA = {v: k for k, v in HA_STATE_TO_EVO.items() if k != ""}
STATE_ATTRS_DHW = ["dhwId", "activeFaults", "stateStatus", "temperatureStatus"]
async def async_setup_platform(
hass: HomeAssistantType, config: ConfigType, async_add_entities, discovery_info=None
) -> None:
"""Create a DHW controller."""
if discovery_info is None:
return
broker = hass.data[DOMAIN]["broker"]
_LOGGER.debug(
"Adding: DhwController (%s), id=%s",
broker.tcs.hotwater.zone_type,
broker.tcs.hotwater.zoneId,
)
new_entity = EvoDHW(broker, broker.tcs.hotwater)
async_add_entities([new_entity], update_before_add=True)
class EvoDHW(EvoChild, WaterHeaterEntity):
"""Base for a Honeywell TCC DHW controller (aka boiler)."""
def __init__(self, evo_broker, evo_device) -> None:
"""Initialize an evohome DHW controller."""
super().__init__(evo_broker, evo_device)
self._unique_id = evo_device.dhwId
self._name = "DHW controller"
self._icon = "mdi:thermometer-lines"
self._precision = PRECISION_TENTHS if evo_broker.client_v1 else PRECISION_WHOLE
self._supported_features = SUPPORT_AWAY_MODE | SUPPORT_OPERATION_MODE
@property
def state(self):
"""Return the current state."""
return EVO_STATE_TO_HA[self._evo_device.stateStatus["state"]]
@property
def current_operation(self) -> str:
"""Return the current operating mode (Auto, On, or Off)."""
if self._evo_device.stateStatus["mode"] == EVO_FOLLOW:
return STATE_AUTO
return EVO_STATE_TO_HA[self._evo_device.stateStatus["state"]]
@property
def operation_list(self) -> List[str]:
"""Return the list of available operations."""
return list(HA_STATE_TO_EVO)
@property
def is_away_mode_on(self):
"""Return True if away mode is on."""
is_off = EVO_STATE_TO_HA[self._evo_device.stateStatus["state"]] == STATE_OFF
is_permanent = self._evo_device.stateStatus["mode"] == EVO_PERMOVER
return is_off and is_permanent
async def async_set_operation_mode(self, operation_mode: str) -> None:
"""Set new operation mode for a DHW controller.
Except for Auto, the mode is only until the next SetPoint.
"""
if operation_mode == STATE_AUTO:
await self._evo_broker.call_client_api(self._evo_device.set_dhw_auto())
else:
await self._update_schedule()
until = dt_util.parse_datetime(self.setpoints.get("next_sp_from", ""))
until = dt_util.as_utc(until) if until else None
if operation_mode == STATE_ON:
await self._evo_broker.call_client_api(
self._evo_device.set_dhw_on(until=until)
)
else: # STATE_OFF
await self._evo_broker.call_client_api(
self._evo_device.set_dhw_off(until=until)
)
async def async_turn_away_mode_on(self):
"""Turn away mode on."""
await self._evo_broker.call_client_api(self._evo_device.set_dhw_off())
async def async_turn_away_mode_off(self):
"""Turn away mode off."""
await self._evo_broker.call_client_api(self._evo_device.set_dhw_auto())
async def async_update(self) -> None:
"""Get the latest state data for a DHW controller."""
await super().async_update()
for attr in STATE_ATTRS_DHW:
self._device_state_attrs[attr] = getattr(self._evo_device, attr)
|
from threading import Thread
from kalliope.core import SignalModule, MissingParameter
from apscheduler.schedulers.background import BackgroundScheduler
from apscheduler.triggers.cron import CronTrigger
from kalliope.core.ConfigurationManager import BrainLoader
from kalliope.core.SynapseLauncher import SynapseLauncher
from kalliope.core import Utils
class Event(SignalModule, Thread):
def __init__(self, **kwargs):
super(Event, self).__init__(**kwargs)
Thread.__init__(self, name=Event)
Utils.print_info('[Event] Starting manager')
self.scheduler = BackgroundScheduler()
self.list_synapses_with_event = list(super(Event, self).get_list_synapse())
self.load_events()
def run(self):
self.scheduler.start()
def load_events(self):
"""
For each received synapse that have an event as signal, we add a new job scheduled
to launch the synapse
"""
for synapse in self.list_synapses_with_event:
for signal in synapse.signals:
# We need to loop here again if the synapse has multiple event signals.
# if the signal is an event we add it to the task list.
if signal.name == "event":
my_cron = CronTrigger(year=self.get_parameter_from_dict("year", signal.parameters),
month=self.get_parameter_from_dict("month", signal.parameters),
day=self.get_parameter_from_dict("day", signal.parameters),
week=self.get_parameter_from_dict("week", signal.parameters),
day_of_week=self.get_parameter_from_dict("day_of_week",
signal.parameters),
hour=self.get_parameter_from_dict("hour", signal.parameters),
minute=self.get_parameter_from_dict("minute", signal.parameters),
second=self.get_parameter_from_dict("second", signal.parameters), )
Utils.print_info("Add synapse name \"%s\" to the scheduler: %s" % (synapse.name, my_cron))
self.scheduler.add_job(self.run_synapse_by_name, my_cron, args=[synapse.name])
@staticmethod
def run_synapse_by_name(synapse_name):
"""
This method will run the synapse
"""
Utils.print_info("[Event] triggered, running synapse: %s" % synapse_name)
# get a brain
brain_loader = BrainLoader()
brain = brain_loader.brain
SynapseLauncher.start_synapse_by_list_name([synapse_name], brain=brain)
@staticmethod
def get_parameter_from_dict(parameter_name, parameters_dict):
"""
return the value in the dict parameters_dict frm the key parameter_name
return None if the key does not exist
:param parameter_name: name of the key
:param parameters_dict: dict
:return: string
"""
try:
return parameters_dict[parameter_name]
except KeyError:
return None
@staticmethod
def check_parameters(parameters):
"""
Check received event dictionary of parameter is valid:
:return: True if event are ok
:rtype: Boolean
"""
def get_key(key_name):
try:
return parameters[key_name]
except KeyError:
return None
if parameters is None or parameters == "":
raise MissingParameter("Event must contain at least one of those elements: "
"year, month, day, week, day_of_week, hour, minute, second")
# check content as at least on key
year = get_key("year")
month = get_key("month")
day = get_key("day")
week = get_key("week")
day_of_week = get_key("day_of_week")
hour = get_key("hour")
minute = get_key("minute")
second = get_key("second")
list_to_check = [year, month, day, week, day_of_week, hour, minute, second]
number_of_none_object = list_to_check.count(None)
list_size = len(list_to_check)
if number_of_none_object >= list_size:
raise MissingParameter("Event must contain at least one of those elements: "
"year, month, day, week, day_of_week, hour, minute, second")
return True
|
import copy
import json
import pytest
from homeassistant.components import alarm_control_panel
from homeassistant.const import (
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_CUSTOM_BYPASS,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_ARMED_NIGHT,
STATE_ALARM_ARMING,
STATE_ALARM_DISARMED,
STATE_ALARM_DISARMING,
STATE_ALARM_PENDING,
STATE_ALARM_TRIGGERED,
STATE_UNKNOWN,
)
from homeassistant.setup import async_setup_component
from .test_common import (
help_test_availability_when_connection_lost,
help_test_availability_without_topic,
help_test_custom_availability_payload,
help_test_default_availability_payload,
help_test_discovery_broken,
help_test_discovery_removal,
help_test_discovery_update,
help_test_discovery_update_attr,
help_test_discovery_update_unchanged,
help_test_entity_debug_info_message,
help_test_entity_device_info_remove,
help_test_entity_device_info_update,
help_test_entity_device_info_with_connection,
help_test_entity_device_info_with_identifier,
help_test_entity_id_update_discovery_update,
help_test_entity_id_update_subscriptions,
help_test_setting_attribute_via_mqtt_json_message,
help_test_setting_attribute_with_template,
help_test_unique_id,
help_test_update_with_json_attrs_bad_JSON,
help_test_update_with_json_attrs_not_dict,
)
from tests.async_mock import patch
from tests.common import assert_setup_component, async_fire_mqtt_message
from tests.components.alarm_control_panel import common
CODE_NUMBER = "1234"
CODE_TEXT = "HELLO_CODE"
DEFAULT_CONFIG = {
alarm_control_panel.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "alarm/state",
"command_topic": "alarm/command",
}
}
DEFAULT_CONFIG_CODE = {
alarm_control_panel.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "alarm/state",
"command_topic": "alarm/command",
"code": "0123",
"code_arm_required": True,
}
}
async def test_fail_setup_without_state_topic(hass, mqtt_mock):
"""Test for failing with no state topic."""
with assert_setup_component(0) as config:
assert await async_setup_component(
hass,
alarm_control_panel.DOMAIN,
{
alarm_control_panel.DOMAIN: {
"platform": "mqtt",
"command_topic": "alarm/command",
}
},
)
assert not config[alarm_control_panel.DOMAIN]
async def test_fail_setup_without_command_topic(hass, mqtt_mock):
"""Test failing with no command topic."""
with assert_setup_component(0):
assert await async_setup_component(
hass,
alarm_control_panel.DOMAIN,
{
alarm_control_panel.DOMAIN: {
"platform": "mqtt",
"state_topic": "alarm/state",
}
},
)
async def test_update_state_via_state_topic(hass, mqtt_mock):
"""Test updating with via state topic."""
assert await async_setup_component(
hass,
alarm_control_panel.DOMAIN,
DEFAULT_CONFIG,
)
await hass.async_block_till_done()
entity_id = "alarm_control_panel.test"
assert hass.states.get(entity_id).state == STATE_UNKNOWN
for state in (
STATE_ALARM_DISARMED,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_NIGHT,
STATE_ALARM_ARMED_CUSTOM_BYPASS,
STATE_ALARM_PENDING,
STATE_ALARM_ARMING,
STATE_ALARM_DISARMING,
STATE_ALARM_TRIGGERED,
):
async_fire_mqtt_message(hass, "alarm/state", state)
assert hass.states.get(entity_id).state == state
async def test_ignore_update_state_if_unknown_via_state_topic(hass, mqtt_mock):
"""Test ignoring updates via state topic."""
assert await async_setup_component(
hass,
alarm_control_panel.DOMAIN,
DEFAULT_CONFIG,
)
await hass.async_block_till_done()
entity_id = "alarm_control_panel.test"
assert hass.states.get(entity_id).state == STATE_UNKNOWN
async_fire_mqtt_message(hass, "alarm/state", "unsupported state")
assert hass.states.get(entity_id).state == STATE_UNKNOWN
async def test_arm_home_publishes_mqtt(hass, mqtt_mock):
"""Test publishing of MQTT messages while armed."""
assert await async_setup_component(
hass,
alarm_control_panel.DOMAIN,
DEFAULT_CONFIG,
)
await hass.async_block_till_done()
await common.async_alarm_arm_home(hass)
mqtt_mock.async_publish.assert_called_once_with(
"alarm/command", "ARM_HOME", 0, False
)
async def test_arm_home_not_publishes_mqtt_with_invalid_code_when_req(hass, mqtt_mock):
"""Test not publishing of MQTT messages with invalid.
When code_arm_required = True
"""
assert await async_setup_component(
hass,
alarm_control_panel.DOMAIN,
DEFAULT_CONFIG_CODE,
)
call_count = mqtt_mock.async_publish.call_count
await common.async_alarm_arm_home(hass, "abcd")
assert mqtt_mock.async_publish.call_count == call_count
async def test_arm_home_publishes_mqtt_when_code_not_req(hass, mqtt_mock):
"""Test publishing of MQTT messages.
When code_arm_required = False
"""
config = copy.deepcopy(DEFAULT_CONFIG_CODE)
config[alarm_control_panel.DOMAIN]["code_arm_required"] = False
assert await async_setup_component(
hass,
alarm_control_panel.DOMAIN,
config,
)
await hass.async_block_till_done()
await common.async_alarm_arm_home(hass)
mqtt_mock.async_publish.assert_called_once_with(
"alarm/command", "ARM_HOME", 0, False
)
async def test_arm_away_publishes_mqtt(hass, mqtt_mock):
"""Test publishing of MQTT messages while armed."""
assert await async_setup_component(
hass,
alarm_control_panel.DOMAIN,
DEFAULT_CONFIG,
)
await hass.async_block_till_done()
await common.async_alarm_arm_away(hass)
mqtt_mock.async_publish.assert_called_once_with(
"alarm/command", "ARM_AWAY", 0, False
)
async def test_arm_away_not_publishes_mqtt_with_invalid_code_when_req(hass, mqtt_mock):
"""Test not publishing of MQTT messages with invalid code.
When code_arm_required = True
"""
assert await async_setup_component(
hass,
alarm_control_panel.DOMAIN,
DEFAULT_CONFIG_CODE,
)
call_count = mqtt_mock.async_publish.call_count
await common.async_alarm_arm_away(hass, "abcd")
assert mqtt_mock.async_publish.call_count == call_count
async def test_arm_away_publishes_mqtt_when_code_not_req(hass, mqtt_mock):
"""Test publishing of MQTT messages.
When code_arm_required = False
"""
config = copy.deepcopy(DEFAULT_CONFIG_CODE)
config[alarm_control_panel.DOMAIN]["code_arm_required"] = False
assert await async_setup_component(
hass,
alarm_control_panel.DOMAIN,
config,
)
await hass.async_block_till_done()
await common.async_alarm_arm_away(hass)
mqtt_mock.async_publish.assert_called_once_with(
"alarm/command", "ARM_AWAY", 0, False
)
async def test_arm_night_publishes_mqtt(hass, mqtt_mock):
"""Test publishing of MQTT messages while armed."""
assert await async_setup_component(
hass,
alarm_control_panel.DOMAIN,
DEFAULT_CONFIG,
)
await hass.async_block_till_done()
await common.async_alarm_arm_night(hass)
mqtt_mock.async_publish.assert_called_once_with(
"alarm/command", "ARM_NIGHT", 0, False
)
async def test_arm_night_not_publishes_mqtt_with_invalid_code_when_req(hass, mqtt_mock):
"""Test not publishing of MQTT messages with invalid code.
When code_arm_required = True
"""
assert await async_setup_component(
hass,
alarm_control_panel.DOMAIN,
DEFAULT_CONFIG_CODE,
)
call_count = mqtt_mock.async_publish.call_count
await common.async_alarm_arm_night(hass, "abcd")
assert mqtt_mock.async_publish.call_count == call_count
async def test_arm_night_publishes_mqtt_when_code_not_req(hass, mqtt_mock):
"""Test publishing of MQTT messages.
When code_arm_required = False
"""
config = copy.deepcopy(DEFAULT_CONFIG_CODE)
config[alarm_control_panel.DOMAIN]["code_arm_required"] = False
assert await async_setup_component(
hass,
alarm_control_panel.DOMAIN,
config,
)
await hass.async_block_till_done()
await common.async_alarm_arm_night(hass)
mqtt_mock.async_publish.assert_called_once_with(
"alarm/command", "ARM_NIGHT", 0, False
)
async def test_arm_custom_bypass_publishes_mqtt(hass, mqtt_mock):
"""Test publishing of MQTT messages while armed."""
assert await async_setup_component(
hass,
alarm_control_panel.DOMAIN,
{
alarm_control_panel.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "alarm/state",
"command_topic": "alarm/command",
}
},
)
await hass.async_block_till_done()
await common.async_alarm_arm_custom_bypass(hass)
mqtt_mock.async_publish.assert_called_once_with(
"alarm/command", "ARM_CUSTOM_BYPASS", 0, False
)
async def test_arm_custom_bypass_not_publishes_mqtt_with_invalid_code_when_req(
hass, mqtt_mock
):
"""Test not publishing of MQTT messages with invalid code.
When code_arm_required = True
"""
assert await async_setup_component(
hass,
alarm_control_panel.DOMAIN,
{
alarm_control_panel.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "alarm/state",
"command_topic": "alarm/command",
"code": "1234",
"code_arm_required": True,
}
},
)
await hass.async_block_till_done()
call_count = mqtt_mock.async_publish.call_count
await common.async_alarm_arm_custom_bypass(hass, "abcd")
assert mqtt_mock.async_publish.call_count == call_count
async def test_arm_custom_bypass_publishes_mqtt_when_code_not_req(hass, mqtt_mock):
"""Test publishing of MQTT messages.
When code_arm_required = False
"""
assert await async_setup_component(
hass,
alarm_control_panel.DOMAIN,
{
alarm_control_panel.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "alarm/state",
"command_topic": "alarm/command",
"code": "1234",
"code_arm_required": False,
}
},
)
await hass.async_block_till_done()
await common.async_alarm_arm_custom_bypass(hass)
mqtt_mock.async_publish.assert_called_once_with(
"alarm/command", "ARM_CUSTOM_BYPASS", 0, False
)
async def test_disarm_publishes_mqtt(hass, mqtt_mock):
"""Test publishing of MQTT messages while disarmed."""
assert await async_setup_component(
hass,
alarm_control_panel.DOMAIN,
DEFAULT_CONFIG,
)
await hass.async_block_till_done()
await common.async_alarm_disarm(hass)
mqtt_mock.async_publish.assert_called_once_with("alarm/command", "DISARM", 0, False)
async def test_disarm_publishes_mqtt_with_template(hass, mqtt_mock):
"""Test publishing of MQTT messages while disarmed.
When command_template set to output json
"""
config = copy.deepcopy(DEFAULT_CONFIG_CODE)
config[alarm_control_panel.DOMAIN]["code"] = "0123"
config[alarm_control_panel.DOMAIN][
"command_template"
] = '{"action":"{{ action }}","code":"{{ code }}"}'
assert await async_setup_component(
hass,
alarm_control_panel.DOMAIN,
config,
)
await hass.async_block_till_done()
await common.async_alarm_disarm(hass, "0123")
mqtt_mock.async_publish.assert_called_once_with(
"alarm/command", '{"action":"DISARM","code":"0123"}', 0, False
)
async def test_disarm_publishes_mqtt_when_code_not_req(hass, mqtt_mock):
"""Test publishing of MQTT messages while disarmed.
When code_disarm_required = False
"""
config = copy.deepcopy(DEFAULT_CONFIG_CODE)
config[alarm_control_panel.DOMAIN]["code"] = "1234"
config[alarm_control_panel.DOMAIN]["code_disarm_required"] = False
assert await async_setup_component(
hass,
alarm_control_panel.DOMAIN,
config,
)
await hass.async_block_till_done()
await common.async_alarm_disarm(hass)
mqtt_mock.async_publish.assert_called_once_with("alarm/command", "DISARM", 0, False)
async def test_disarm_not_publishes_mqtt_with_invalid_code_when_req(hass, mqtt_mock):
"""Test not publishing of MQTT messages with invalid code.
When code_disarm_required = True
"""
assert await async_setup_component(
hass,
alarm_control_panel.DOMAIN,
DEFAULT_CONFIG_CODE,
)
call_count = mqtt_mock.async_publish.call_count
await common.async_alarm_disarm(hass, "abcd")
assert mqtt_mock.async_publish.call_count == call_count
async def test_update_state_via_state_topic_template(hass, mqtt_mock):
"""Test updating with template_value via state topic."""
assert await async_setup_component(
hass,
alarm_control_panel.DOMAIN,
{
alarm_control_panel.DOMAIN: {
"platform": "mqtt",
"name": "test",
"command_topic": "test-topic",
"state_topic": "test-topic",
"value_template": "\
{% if (value | int) == 100 %}\
armed_away\
{% else %}\
disarmed\
{% endif %}",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("alarm_control_panel.test")
assert state.state == STATE_UNKNOWN
async_fire_mqtt_message(hass, "test-topic", "100")
state = hass.states.get("alarm_control_panel.test")
assert state.state == STATE_ALARM_ARMED_AWAY
async def test_attributes_code_number(hass, mqtt_mock):
"""Test attributes which are not supported by the vacuum."""
config = copy.deepcopy(DEFAULT_CONFIG)
config[alarm_control_panel.DOMAIN]["code"] = CODE_NUMBER
assert await async_setup_component(hass, alarm_control_panel.DOMAIN, config)
await hass.async_block_till_done()
state = hass.states.get("alarm_control_panel.test")
assert (
state.attributes.get(alarm_control_panel.ATTR_CODE_FORMAT)
== alarm_control_panel.FORMAT_NUMBER
)
async def test_attributes_code_text(hass, mqtt_mock):
"""Test attributes which are not supported by the vacuum."""
config = copy.deepcopy(DEFAULT_CONFIG)
config[alarm_control_panel.DOMAIN]["code"] = CODE_TEXT
assert await async_setup_component(hass, alarm_control_panel.DOMAIN, config)
await hass.async_block_till_done()
state = hass.states.get("alarm_control_panel.test")
assert (
state.attributes.get(alarm_control_panel.ATTR_CODE_FORMAT)
== alarm_control_panel.FORMAT_TEXT
)
async def test_availability_when_connection_lost(hass, mqtt_mock):
"""Test availability after MQTT disconnection."""
await help_test_availability_when_connection_lost(
hass, mqtt_mock, alarm_control_panel.DOMAIN, DEFAULT_CONFIG_CODE
)
async def test_availability_without_topic(hass, mqtt_mock):
"""Test availability without defined availability topic."""
await help_test_availability_without_topic(
hass, mqtt_mock, alarm_control_panel.DOMAIN, DEFAULT_CONFIG_CODE
)
async def test_default_availability_payload(hass, mqtt_mock):
"""Test availability by default payload with defined topic."""
await help_test_default_availability_payload(
hass, mqtt_mock, alarm_control_panel.DOMAIN, DEFAULT_CONFIG_CODE
)
async def test_custom_availability_payload(hass, mqtt_mock):
"""Test availability by custom payload with defined topic."""
await help_test_custom_availability_payload(
hass, mqtt_mock, alarm_control_panel.DOMAIN, DEFAULT_CONFIG_CODE
)
async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock):
"""Test the setting of attribute via MQTT with JSON payload."""
await help_test_setting_attribute_via_mqtt_json_message(
hass, mqtt_mock, alarm_control_panel.DOMAIN, DEFAULT_CONFIG
)
async def test_setting_attribute_with_template(hass, mqtt_mock):
"""Test the setting of attribute via MQTT with JSON payload."""
await help_test_setting_attribute_with_template(
hass, mqtt_mock, alarm_control_panel.DOMAIN, DEFAULT_CONFIG
)
async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog):
"""Test attributes get extracted from a JSON result."""
await help_test_update_with_json_attrs_not_dict(
hass, mqtt_mock, caplog, alarm_control_panel.DOMAIN, DEFAULT_CONFIG
)
async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog):
"""Test attributes get extracted from a JSON result."""
await help_test_update_with_json_attrs_bad_JSON(
hass, mqtt_mock, caplog, alarm_control_panel.DOMAIN, DEFAULT_CONFIG
)
async def test_discovery_update_attr(hass, mqtt_mock, caplog):
"""Test update of discovered MQTTAttributes."""
await help_test_discovery_update_attr(
hass, mqtt_mock, caplog, alarm_control_panel.DOMAIN, DEFAULT_CONFIG
)
async def test_unique_id(hass, mqtt_mock):
"""Test unique id option only creates one alarm per unique_id."""
config = {
alarm_control_panel.DOMAIN: [
{
"platform": "mqtt",
"name": "Test 1",
"state_topic": "test-topic",
"command_topic": "command-topic",
"unique_id": "TOTALLY_UNIQUE",
},
{
"platform": "mqtt",
"name": "Test 2",
"state_topic": "test-topic",
"command_topic": "command-topic",
"unique_id": "TOTALLY_UNIQUE",
},
]
}
await help_test_unique_id(hass, mqtt_mock, alarm_control_panel.DOMAIN, config)
async def test_discovery_removal_alarm(hass, mqtt_mock, caplog):
"""Test removal of discovered alarm_control_panel."""
data = json.dumps(DEFAULT_CONFIG[alarm_control_panel.DOMAIN])
await help_test_discovery_removal(
hass, mqtt_mock, caplog, alarm_control_panel.DOMAIN, data
)
async def test_discovery_update_alarm_topic_and_template(hass, mqtt_mock, caplog):
"""Test update of discovered alarm_control_panel."""
config1 = copy.deepcopy(DEFAULT_CONFIG[alarm_control_panel.DOMAIN])
config2 = copy.deepcopy(DEFAULT_CONFIG[alarm_control_panel.DOMAIN])
config1["name"] = "Beer"
config2["name"] = "Milk"
config1["state_topic"] = "alarm/state1"
config2["state_topic"] = "alarm/state2"
config1["value_template"] = "{{ value_json.state1.state }}"
config2["value_template"] = "{{ value_json.state2.state }}"
state_data1 = [
([("alarm/state1", '{"state1":{"state":"armed_away"}}')], "armed_away", None),
]
state_data2 = [
([("alarm/state1", '{"state1":{"state":"triggered"}}')], "armed_away", None),
([("alarm/state1", '{"state2":{"state":"triggered"}}')], "armed_away", None),
([("alarm/state2", '{"state1":{"state":"triggered"}}')], "armed_away", None),
([("alarm/state2", '{"state2":{"state":"triggered"}}')], "triggered", None),
]
data1 = json.dumps(config1)
data2 = json.dumps(config2)
await help_test_discovery_update(
hass,
mqtt_mock,
caplog,
alarm_control_panel.DOMAIN,
data1,
data2,
state_data1=state_data1,
state_data2=state_data2,
)
async def test_discovery_update_alarm_template(hass, mqtt_mock, caplog):
"""Test update of discovered alarm_control_panel."""
config1 = copy.deepcopy(DEFAULT_CONFIG[alarm_control_panel.DOMAIN])
config2 = copy.deepcopy(DEFAULT_CONFIG[alarm_control_panel.DOMAIN])
config1["name"] = "Beer"
config2["name"] = "Milk"
config1["state_topic"] = "alarm/state1"
config2["state_topic"] = "alarm/state1"
config1["value_template"] = "{{ value_json.state1.state }}"
config2["value_template"] = "{{ value_json.state2.state }}"
state_data1 = [
([("alarm/state1", '{"state1":{"state":"armed_away"}}')], "armed_away", None),
]
state_data2 = [
([("alarm/state1", '{"state1":{"state":"triggered"}}')], "armed_away", None),
([("alarm/state1", '{"state2":{"state":"triggered"}}')], "triggered", None),
]
data1 = json.dumps(config1)
data2 = json.dumps(config2)
await help_test_discovery_update(
hass,
mqtt_mock,
caplog,
alarm_control_panel.DOMAIN,
data1,
data2,
state_data1=state_data1,
state_data2=state_data2,
)
async def test_discovery_update_unchanged_alarm(hass, mqtt_mock, caplog):
"""Test update of discovered alarm_control_panel."""
config1 = copy.deepcopy(DEFAULT_CONFIG[alarm_control_panel.DOMAIN])
config1["name"] = "Beer"
data1 = json.dumps(config1)
with patch(
"homeassistant.components.mqtt.alarm_control_panel.MqttAlarm.discovery_update"
) as discovery_update:
await help_test_discovery_update_unchanged(
hass, mqtt_mock, caplog, alarm_control_panel.DOMAIN, data1, discovery_update
)
@pytest.mark.no_fail_on_log_exception
async def test_discovery_broken(hass, mqtt_mock, caplog):
"""Test handling of bad discovery message."""
data1 = '{ "name": "Beer" }'
data2 = (
'{ "name": "Milk",'
' "state_topic": "test_topic",'
' "command_topic": "test_topic" }'
)
await help_test_discovery_broken(
hass, mqtt_mock, caplog, alarm_control_panel.DOMAIN, data1, data2
)
async def test_entity_device_info_with_connection(hass, mqtt_mock):
"""Test MQTT alarm control panel device registry integration."""
await help_test_entity_device_info_with_connection(
hass, mqtt_mock, alarm_control_panel.DOMAIN, DEFAULT_CONFIG
)
async def test_entity_device_info_with_identifier(hass, mqtt_mock):
"""Test MQTT alarm control panel device registry integration."""
await help_test_entity_device_info_with_identifier(
hass, mqtt_mock, alarm_control_panel.DOMAIN, DEFAULT_CONFIG
)
async def test_entity_device_info_update(hass, mqtt_mock):
"""Test device registry update."""
await help_test_entity_device_info_update(
hass, mqtt_mock, alarm_control_panel.DOMAIN, DEFAULT_CONFIG
)
async def test_entity_device_info_remove(hass, mqtt_mock):
"""Test device registry remove."""
await help_test_entity_device_info_remove(
hass, mqtt_mock, alarm_control_panel.DOMAIN, DEFAULT_CONFIG
)
async def test_entity_id_update_subscriptions(hass, mqtt_mock):
"""Test MQTT subscriptions are managed when entity_id is updated."""
await help_test_entity_id_update_subscriptions(
hass, mqtt_mock, alarm_control_panel.DOMAIN, DEFAULT_CONFIG
)
async def test_entity_id_update_discovery_update(hass, mqtt_mock):
"""Test MQTT discovery update when entity_id is updated."""
await help_test_entity_id_update_discovery_update(
hass, mqtt_mock, alarm_control_panel.DOMAIN, DEFAULT_CONFIG
)
async def test_entity_debug_info_message(hass, mqtt_mock):
"""Test MQTT debug info."""
await help_test_entity_debug_info_message(
hass, mqtt_mock, alarm_control_panel.DOMAIN, DEFAULT_CONFIG
)
|
from django.http import Http404
from django.utils.translation import gettext as _
class EntryPreviewMixin(object):
"""
Mixin implementing the preview of Entries.
"""
def get_object(self, queryset=None):
"""
If the status of the entry is not PUBLISHED,
a preview is requested, so we check if the user
has the 'zinnia.can_view_all' permission or if
it's an author of the entry.
"""
obj = super(EntryPreviewMixin, self).get_object(queryset)
if obj.is_visible:
return obj
if (self.request.user.has_perm('zinnia.can_view_all') or
self.request.user.pk in [
author.pk for author in obj.authors.all()]):
return obj
raise Http404(_('No entry found matching the query'))
|
import json
import os
import shutil
import sys
import tempfile
import yaml
from .serializers import yamlserializer, jsonserializer
from .serialize import serialize
from . import request
from .stubs.compat import get_httpmessage
# Use the libYAML versions if possible
try:
from yaml import CLoader as Loader
except ImportError:
from yaml import Loader
def preprocess_yaml(cassette):
# this is the hack that makes the whole thing work. The old version used
# to deserialize to Request objects automatically using pyYaml's !!python
# tag system. This made it difficult to deserialize old cassettes on new
# versions. So this just strips the tags before deserializing.
STRINGS_TO_NUKE = [
"!!python/object:vcr.request.Request",
"!!python/object/apply:__builtin__.frozenset",
"!!python/object/apply:builtins.frozenset",
]
for s in STRINGS_TO_NUKE:
cassette = cassette.replace(s, "")
return cassette
PARTS = ["protocol", "host", "port", "path"]
def build_uri(**parts):
port = parts["port"]
scheme = parts["protocol"]
default_port = {"https": 443, "http": 80}[scheme]
parts["port"] = ":{}".format(port) if port != default_port else ""
return "{protocol}://{host}{port}{path}".format(**parts)
def _migrate(data):
interactions = []
for item in data:
req = item["request"]
res = item["response"]
uri = {k: req.pop(k) for k in PARTS}
req["uri"] = build_uri(**uri)
# convert headers to dict of lists
headers = req["headers"]
for k in headers:
headers[k] = [headers[k]]
response_headers = {}
for k, v in get_httpmessage(b"".join(h.encode("utf-8") for h in res["headers"])).items():
response_headers.setdefault(k, [])
response_headers[k].append(v)
res["headers"] = response_headers
interactions.append({"request": req, "response": res})
return {
"requests": [request.Request._from_dict(i["request"]) for i in interactions],
"responses": [i["response"] for i in interactions],
}
def migrate_json(in_fp, out_fp):
data = json.load(in_fp)
if _already_migrated(data):
return False
interactions = _migrate(data)
out_fp.write(serialize(interactions, jsonserializer))
return True
def _list_of_tuples_to_dict(fs):
return {k: v for k, v in fs[0]}
def _already_migrated(data):
try:
if data.get("version") == 1:
return True
except AttributeError:
return False
def migrate_yml(in_fp, out_fp):
data = yaml.load(preprocess_yaml(in_fp.read()), Loader=Loader)
if _already_migrated(data):
return False
for i in range(len(data)):
data[i]["request"]["headers"] = _list_of_tuples_to_dict(data[i]["request"]["headers"])
interactions = _migrate(data)
out_fp.write(serialize(interactions, yamlserializer))
return True
def migrate(file_path, migration_fn):
# because we assume that original files can be reverted
# we will try to copy the content. (os.rename not needed)
with tempfile.TemporaryFile(mode="w+") as out_fp:
with open(file_path, "r") as in_fp:
if not migration_fn(in_fp, out_fp):
return False
with open(file_path, "w") as in_fp:
out_fp.seek(0)
shutil.copyfileobj(out_fp, in_fp)
return True
def try_migrate(path):
if path.endswith(".json"):
return migrate(path, migrate_json)
elif path.endswith(".yaml") or path.endswith(".yml"):
return migrate(path, migrate_yml)
return False
def main():
if len(sys.argv) != 2:
raise SystemExit(
"Please provide path to cassettes directory or file. " "Usage: python -m vcr.migration PATH"
)
path = sys.argv[1]
if not os.path.isabs(path):
path = os.path.abspath(path)
files = [path]
if os.path.isdir(path):
files = (os.path.join(root, name) for (root, dirs, files) in os.walk(path) for name in files)
for file_path in files:
migrated = try_migrate(file_path)
status = "OK" if migrated else "FAIL"
sys.stderr.write("[{}] {}\n".format(status, file_path))
sys.stderr.write("Done.\n")
if __name__ == "__main__":
main()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
from absl.testing import absltest
class ClassA(absltest.TestCase):
"""Helper test case A for absltest_sharding_test."""
def testA(self):
sys.stderr.write('\nclass A test A\n')
def testB(self):
sys.stderr.write('\nclass A test B\n')
def testC(self):
sys.stderr.write('\nclass A test C\n')
class ClassB(absltest.TestCase):
"""Helper test case B for absltest_sharding_test."""
def testA(self):
sys.stderr.write('\nclass B test A\n')
def testB(self):
sys.stderr.write('\nclass B test B\n')
def testC(self):
sys.stderr.write('\nclass B test C\n')
def testD(self):
sys.stderr.write('\nclass B test D\n')
def testE(self):
sys.stderr.write('\nclass B test E\n')
self.fail('Force failure')
if __name__ == '__main__':
absltest.main()
|
import snappy
from autobahn.websocket.compress_base import PerMessageCompressOffer, \
PerMessageCompressOfferAccept, \
PerMessageCompressResponse, \
PerMessageCompressResponseAccept, \
PerMessageCompress
__all__ = (
'PerMessageSnappyMixin',
'PerMessageSnappyOffer',
'PerMessageSnappyOfferAccept',
'PerMessageSnappyResponse',
'PerMessageSnappyResponseAccept',
'PerMessageSnappy',
)
class PerMessageSnappyMixin(object):
"""
Mixin class for this extension.
"""
EXTENSION_NAME = "permessage-snappy"
"""
Name of this WebSocket extension.
"""
class PerMessageSnappyOffer(PerMessageCompressOffer, PerMessageSnappyMixin):
"""
Set of extension parameters for `permessage-snappy` WebSocket extension
offered by a client to a server.
"""
@classmethod
def parse(cls, params):
"""
Parses a WebSocket extension offer for `permessage-snappy` provided by a client to a server.
:param params: Output from :func:`autobahn.websocket.WebSocketProtocol._parseExtensionsHeader`.
:type params: list
:returns: A new instance of :class:`autobahn.compress.PerMessageSnappyOffer`.
:rtype: obj
"""
# extension parameter defaults
accept_no_context_takeover = False
request_no_context_takeover = False
# verify/parse client ("client-to-server direction") parameters of permessage-snappy offer
for p in params:
if len(params[p]) > 1:
raise Exception("multiple occurrence of extension parameter '%s' for extension '%s'" % (p, cls.EXTENSION_NAME))
val = params[p][0]
if p == 'client_no_context_takeover':
# noinspection PySimplifyBooleanCheck
if val is not True:
raise Exception("illegal extension parameter value '%s' for parameter '%s' of extension '%s'" % (val, p, cls.EXTENSION_NAME))
else:
accept_no_context_takeover = True
elif p == 'server_no_context_takeover':
# noinspection PySimplifyBooleanCheck
if val is not True:
raise Exception("illegal extension parameter value '%s' for parameter '%s' of extension '%s'" % (val, p, cls.EXTENSION_NAME))
else:
request_no_context_takeover = True
else:
raise Exception("illegal extension parameter '%s' for extension '%s'" % (p, cls.EXTENSION_NAME))
offer = cls(accept_no_context_takeover,
request_no_context_takeover)
return offer
def __init__(self,
accept_no_context_takeover=True,
request_no_context_takeover=False):
"""
:param accept_no_context_takeover: Iff true, client accepts "no context takeover" feature.
:type accept_no_context_takeover: bool
:param request_no_context_takeover: Iff true, client request "no context takeover" feature.
:type request_no_context_takeover: bool
"""
if type(accept_no_context_takeover) != bool:
raise Exception("invalid type %s for accept_no_context_takeover" % type(accept_no_context_takeover))
self.accept_no_context_takeover = accept_no_context_takeover
if type(request_no_context_takeover) != bool:
raise Exception("invalid type %s for request_no_context_takeover" % type(request_no_context_takeover))
self.request_no_context_takeover = request_no_context_takeover
def get_extension_string(self):
"""
Returns the WebSocket extension configuration string as sent to the server.
:returns: PMCE configuration string.
:rtype: str
"""
pmce_string = self.EXTENSION_NAME
if self.accept_no_context_takeover:
pmce_string += "; client_no_context_takeover"
if self.request_no_context_takeover:
pmce_string += "; server_no_context_takeover"
return pmce_string
def __json__(self):
"""
Returns a JSON serializable object representation.
:returns: JSON serializable representation.
:rtype: dict
"""
return {'extension': self.EXTENSION_NAME,
'accept_no_context_takeover': self.accept_no_context_takeover,
'request_no_context_takeover': self.request_no_context_takeover}
def __repr__(self):
"""
Returns Python object representation that can be eval'ed to reconstruct the object.
:returns: Python string representation.
:rtype: str
"""
return "PerMessageSnappyOffer(accept_no_context_takeover = %s, request_no_context_takeover = %s)" % (self.accept_no_context_takeover, self.request_no_context_takeover)
class PerMessageSnappyOfferAccept(PerMessageCompressOfferAccept, PerMessageSnappyMixin):
"""
Set of parameters with which to accept an `permessage-snappy` offer
from a client by a server.
"""
def __init__(self,
offer,
request_no_context_takeover=False,
no_context_takeover=None):
"""
:param offer: The offer being accepted.
:type offer: Instance of :class:`autobahn.compress.PerMessageSnappyOffer`.
:param request_no_context_takeover: Iff true, server request "no context takeover" feature.
:type request_no_context_takeover: bool
:param no_context_takeover: Override server ("server-to-client direction") context takeover (this must be compatible with offer).
:type no_context_takeover: bool
"""
if not isinstance(offer, PerMessageSnappyOffer):
raise Exception("invalid type %s for offer" % type(offer))
self.offer = offer
if type(request_no_context_takeover) != bool:
raise Exception("invalid type %s for request_no_context_takeover" % type(request_no_context_takeover))
if request_no_context_takeover and not offer.accept_no_context_takeover:
raise Exception("invalid value %s for request_no_context_takeover - feature unsupported by client" % request_no_context_takeover)
self.request_no_context_takeover = request_no_context_takeover
if no_context_takeover is not None:
if type(no_context_takeover) != bool:
raise Exception("invalid type %s for no_context_takeover" % type(no_context_takeover))
if offer.request_no_context_takeover and not no_context_takeover:
raise Exception("invalid value %s for no_context_takeover - client requested feature" % no_context_takeover)
self.no_context_takeover = no_context_takeover
def get_extension_string(self):
"""
Returns the WebSocket extension configuration string as sent to the server.
:returns: PMCE configuration string.
:rtype: str
"""
pmce_string = self.EXTENSION_NAME
if self.offer.request_no_context_takeover:
pmce_string += "; server_no_context_takeover"
if self.request_no_context_takeover:
pmce_string += "; client_no_context_takeover"
return pmce_string
def __json__(self):
"""
Returns a JSON serializable object representation.
:returns: JSON serializable representation.
:rtype: dict
"""
return {'extension': self.EXTENSION_NAME,
'offer': self.offer.__json__(),
'request_no_context_takeover': self.request_no_context_takeover,
'no_context_takeover': self.no_context_takeover}
def __repr__(self):
"""
Returns Python object representation that can be eval'ed to reconstruct the object.
:returns: Python string representation.
:rtype: str
"""
return "PerMessageSnappyAccept(offer = %s, request_no_context_takeover = %s, no_context_takeover = %s)" % (self.offer.__repr__(), self.request_no_context_takeover, self.no_context_takeover)
class PerMessageSnappyResponse(PerMessageCompressResponse, PerMessageSnappyMixin):
"""
Set of parameters for `permessage-snappy` responded by server.
"""
@classmethod
def parse(cls, params):
"""
Parses a WebSocket extension response for `permessage-snappy` provided by a server to a client.
:param params: Output from :func:`autobahn.websocket.WebSocketProtocol._parseExtensionsHeader`.
:type params: list
:returns: A new instance of :class:`autobahn.compress.PerMessageSnappyResponse`.
:rtype: obj
"""
client_no_context_takeover = False
server_no_context_takeover = False
for p in params:
if len(params[p]) > 1:
raise Exception("multiple occurrence of extension parameter '%s' for extension '%s'" % (p, cls.EXTENSION_NAME))
val = params[p][0]
if p == 'client_no_context_takeover':
# noinspection PySimplifyBooleanCheck
if val is not True:
raise Exception("illegal extension parameter value '%s' for parameter '%s' of extension '%s'" % (val, p, cls.EXTENSION_NAME))
else:
client_no_context_takeover = True
elif p == 'server_no_context_takeover':
# noinspection PySimplifyBooleanCheck
if val is not True:
raise Exception("illegal extension parameter value '%s' for parameter '%s' of extension '%s'" % (val, p, cls.EXTENSION_NAME))
else:
server_no_context_takeover = True
else:
raise Exception("illegal extension parameter '%s' for extension '%s'" % (p, cls.EXTENSION_NAME))
response = cls(client_no_context_takeover,
server_no_context_takeover)
return response
def __init__(self,
client_no_context_takeover,
server_no_context_takeover):
self.client_no_context_takeover = client_no_context_takeover
self.server_no_context_takeover = server_no_context_takeover
def __json__(self):
"""
Returns a JSON serializable object representation.
:returns: JSON serializable representation.
:rtype: dict
"""
return {'extension': self.EXTENSION_NAME,
'client_no_context_takeover': self.client_no_context_takeover,
'server_no_context_takeover': self.server_no_context_takeover}
def __repr__(self):
"""
Returns Python object representation that can be eval'ed to reconstruct the object.
:returns: Python string representation.
:rtype: str
"""
return "PerMessageSnappyResponse(client_no_context_takeover = %s, server_no_context_takeover = %s)" % (self.client_no_context_takeover, self.server_no_context_takeover)
class PerMessageSnappyResponseAccept(PerMessageCompressResponseAccept, PerMessageSnappyMixin):
"""
Set of parameters with which to accept an `permessage-snappy` response
from a server by a client.
"""
def __init__(self,
response,
no_context_takeover=None):
"""
:param response: The response being accepted.
:type response: Instance of :class:`autobahn.compress.PerMessageSnappyResponse`.
:param no_context_takeover: Override client ("client-to-server direction") context takeover (this must be compatible with response).
:type no_context_takeover: bool
"""
if not isinstance(response, PerMessageSnappyResponse):
raise Exception("invalid type %s for response" % type(response))
self.response = response
if no_context_takeover is not None:
if type(no_context_takeover) != bool:
raise Exception("invalid type %s for no_context_takeover" % type(no_context_takeover))
if response.client_no_context_takeover and not no_context_takeover:
raise Exception("invalid value %s for no_context_takeover - server requested feature" % no_context_takeover)
self.no_context_takeover = no_context_takeover
def __json__(self):
"""
Returns a JSON serializable object representation.
:returns: JSON serializable representation.
:rtype: dict
"""
return {'extension': self.EXTENSION_NAME,
'response': self.response.__json__(),
'no_context_takeover': self.no_context_takeover}
def __repr__(self):
"""
Returns Python object representation that can be eval'ed to reconstruct the object.
:returns: Python string representation.
:rtype: str
"""
return "PerMessageSnappyResponseAccept(response = %s, no_context_takeover = %s)" % (self.response.__repr__(), self.no_context_takeover)
class PerMessageSnappy(PerMessageCompress, PerMessageSnappyMixin):
"""
`permessage-snappy` WebSocket extension processor.
"""
@classmethod
def create_from_response_accept(cls, is_server, accept):
pmce = cls(is_server,
accept.response.server_no_context_takeover,
accept.no_context_takeover if accept.no_context_takeover is not None else accept.response.client_no_context_takeover)
return pmce
@classmethod
def create_from_offer_accept(cls, is_server, accept):
pmce = cls(is_server,
accept.no_context_takeover if accept.no_context_takeover is not None else accept.offer.request_no_context_takeover,
accept.request_no_context_takeover)
return pmce
def __init__(self,
is_server,
server_no_context_takeover,
client_no_context_takeover):
self._is_server = is_server
self.server_no_context_takeover = server_no_context_takeover
self.client_no_context_takeover = client_no_context_takeover
self._compressor = None
self._decompressor = None
def __json__(self):
return {'extension': self.EXTENSION_NAME,
'server_no_context_takeover': self.server_no_context_takeover,
'client_no_context_takeover': self.client_no_context_takeover}
def __repr__(self):
return "PerMessageSnappy(is_server = %s, server_no_context_takeover = %s, client_no_context_takeover = %s)" % (self._is_server, self.server_no_context_takeover, self.client_no_context_takeover)
def start_compress_message(self):
if self._is_server:
if self._compressor is None or self.server_no_context_takeover:
self._compressor = snappy.StreamCompressor()
else:
if self._compressor is None or self.client_no_context_takeover:
self._compressor = snappy.StreamCompressor()
def compress_message_data(self, data):
return self._compressor.add_chunk(data)
def end_compress_message(self):
return b""
def start_decompress_message(self):
if self._is_server:
if self._decompressor is None or self.client_no_context_takeover:
self._decompressor = snappy.StreamDecompressor()
else:
if self._decompressor is None or self.server_no_context_takeover:
self._decompressor = snappy.StreamDecompressor()
def decompress_message_data(self, data):
return self._decompressor.decompress(data)
def end_decompress_message(self):
pass
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN
class JuiceNetDevice(CoordinatorEntity):
"""Represent a base JuiceNet device."""
def __init__(self, device, sensor_type, coordinator):
"""Initialise the sensor."""
super().__init__(coordinator)
self.device = device
self.type = sensor_type
@property
def name(self):
"""Return the name of the device."""
return self.device.name
@property
def unique_id(self):
"""Return a unique ID."""
return f"{self.device.id}-{self.type}"
@property
def device_info(self):
"""Return device information about this JuiceNet Device."""
return {
"identifiers": {(DOMAIN, self.device.id)},
"name": self.device.name,
"manufacturer": "JuiceNet",
}
|
import os
import random
import string
import base64
from ast import literal_eval
_basedir = os.path.abspath(os.path.dirname(__file__))
CORS = os.environ.get("CORS") == "True"
debug = os.environ.get("DEBUG") == "True"
def get_random_secret(length):
secret_key = ''.join(random.choice(string.ascii_uppercase) for x in range(round(length / 4)))
secret_key = secret_key + ''.join(random.choice("~!@#$%^&*()_+") for x in range(round(length / 4)))
secret_key = secret_key + ''.join(random.choice(string.ascii_lowercase) for x in range(round(length / 4)))
return secret_key + ''.join(random.choice(string.digits) for x in range(round(length / 4)))
SECRET_KEY = repr(os.environ.get('SECRET_KEY', get_random_secret(32).encode('utf8')))
LEMUR_TOKEN_SECRET = repr(os.environ.get('LEMUR_TOKEN_SECRET',
base64.b64encode(get_random_secret(32).encode('utf8'))))
LEMUR_ENCRYPTION_KEYS = repr(os.environ.get('LEMUR_ENCRYPTION_KEYS',
base64.b64encode(get_random_secret(32).encode('utf8'))))
LEMUR_ALLOWED_DOMAINS = []
LEMUR_EMAIL = ''
LEMUR_SECURITY_TEAM_EMAIL = []
ALLOW_CERT_DELETION = os.environ.get('ALLOW_CERT_DELETION') == "True"
LEMUR_DEFAULT_COUNTRY = str(os.environ.get('LEMUR_DEFAULT_COUNTRY',''))
LEMUR_DEFAULT_STATE = str(os.environ.get('LEMUR_DEFAULT_STATE',''))
LEMUR_DEFAULT_LOCATION = str(os.environ.get('LEMUR_DEFAULT_LOCATION',''))
LEMUR_DEFAULT_ORGANIZATION = str(os.environ.get('LEMUR_DEFAULT_ORGANIZATION',''))
LEMUR_DEFAULT_ORGANIZATIONAL_UNIT = str(os.environ.get('LEMUR_DEFAULT_ORGANIZATIONAL_UNIT',''))
LEMUR_DEFAULT_ISSUER_PLUGIN = str(os.environ.get('LEMUR_DEFAULT_ISSUER_PLUGIN',''))
LEMUR_DEFAULT_AUTHORITY = str(os.environ.get('LEMUR_DEFAULT_AUTHORITY',''))
ACTIVE_PROVIDERS = []
METRIC_PROVIDERS = []
LOG_LEVEL = str(os.environ.get('LOG_LEVEL','DEBUG'))
LOG_FILE = str(os.environ.get('LOG_FILE','/home/lemur/.lemur/lemur.log'))
SQLALCHEMY_DATABASE_URI = os.environ.get('SQLALCHEMY_DATABASE_URI','postgresql://lemur:lemur@localhost:5432/lemur')
LDAP_DEBUG = os.environ.get('LDAP_DEBUG') == "True"
LDAP_AUTH = os.environ.get('LDAP_AUTH') == "True"
LDAP_IS_ACTIVE_DIRECTORY = os.environ.get('LDAP_IS_ACTIVE_DIRECTORY') == "True"
LDAP_BIND_URI = str(os.environ.get('LDAP_BIND_URI',''))
LDAP_BASE_DN = str(os.environ.get('LDAP_BASE_DN',''))
LDAP_EMAIL_DOMAIN = str(os.environ.get('LDAP_EMAIL_DOMAIN',''))
LDAP_USE_TLS = str(os.environ.get('LDAP_USE_TLS',''))
LDAP_REQUIRED_GROUP = str(os.environ.get('LDAP_REQUIRED_GROUP',''))
LDAP_GROUPS_TO_ROLES = literal_eval(os.environ.get('LDAP_GROUPS_TO_ROLES') or "{}")
|
import logging
import os
import google.auth
_LOG_TO_FILE_ENV = os.getenv("KAGGLE_LOG_TO_FILE")
class _LogFormatter(logging.Formatter):
"""A logging formatter which truncates long messages."""
_MAX_LOG_LENGTH = 10000 # Be generous, not to truncate long backtraces.
def format(self, record):
msg = super(_LogFormatter, self).format(record)
return msg[:_LogFormatter._MAX_LOG_LENGTH] if msg else msg
# TODO(vimota): Clean this up once we're using python 3.8 and can use
# (https://github.com/python/cpython/commit/dde9fdbe453925279ac3d2a6a72102f6f9ef247c)
# Right now, making the logging module display the intended frame's information
# when the logging calls (info, warn, ...) are wrapped (as is the case in our
# Log class) involves fragile logic.
class _Logger(logging.Logger):
# This is a copy of logging.Logger.findCaller with the filename ignore
# set expanded to include the current filename (".../log.py").
# Copyright 2001-2015 by Vinay Sajip. All Rights Reserved.
# License: https://github.com/python/cpython/blob/ce9e62544571e7ade7186697d5dd065fb4c5243f/LICENSE
def findCaller(self, stack_info=False):
f = logging.currentframe()
f = f.f_back
rv = "(unknown file)", 0, "(unknown function)", None
while hasattr(f, "f_code"):
co = f.f_code
filename = os.path.normcase(co.co_filename)
if filename in _ignore_srcfiles:
f = f.f_back
continue
sinfo = None
if stack_info:
sio = io.StringIO()
sio.write('Stack (most recent call last):\n')
traceback.print_stack(f, file=sio)
sinfo = sio.getvalue()
if sinfo[-1] == '\n':
sinfo = sinfo[:-1]
sio.close()
rv = (co.co_filename, f.f_lineno, co.co_name, sinfo)
break
return rv
_srcfile = os.path.normcase(_Logger.findCaller.__code__.co_filename)
_ignore_srcfiles = (_srcfile, logging._srcfile)
class Log:
""" Helper aggregate for all things related to logging activity. """
_GLOBAL_LOG = logging.getLogger("")
_initialized = False
# These are convenience helpers. For performance, consider saving Log.get_logger() and using that
@staticmethod
def critical(msg, *args, **kwargs):
Log._GLOBAL_LOG.critical(msg, *args, **kwargs)
@staticmethod
def fatal(msg, *args, **kwargs):
Log._GLOBAL_LOG.fatal(msg, *args, **kwargs)
@staticmethod
def exception(msg, *args, **kwargs):
Log._GLOBAL_LOG.exception(msg, *args, **kwargs)
@staticmethod
def error(msg, *args, **kwargs):
Log._GLOBAL_LOG.error(msg, *args, **kwargs)
@staticmethod
def warn(msg, *args, **kwargs):
Log._GLOBAL_LOG.warn(msg, *args, **kwargs)
@staticmethod
def warning(msg, *args, **kwargs):
Log._GLOBAL_LOG.warning(msg, *args, **kwargs)
@staticmethod
def debug(msg, *args, **kwargs):
Log._GLOBAL_LOG.debug(msg, *args, **kwargs)
@staticmethod
def info(msg, *args, **kwargs):
Log._GLOBAL_LOG.info(msg, *args, **kwargs)
@staticmethod
def set_level(loglevel):
if isinstance(loglevel, int):
Log._GLOBAL_LOG.setLevel(loglevel)
return
elif isinstance(loglevel, str):
# idea from https://docs.python.org/3.5/howto/logging.html#logging-to-a-file
numeric_level = getattr(logging, loglevel.upper(), None)
if isinstance(numeric_level, int):
Log._GLOBAL_LOG.setLevel(numeric_level)
return
raise ValueError('Invalid log level: %s' % loglevel)
@staticmethod
def _static_init():
if Log._initialized:
return
logging.setLoggerClass(_Logger)
# The root logger's type is unfortunately (and surprisingly) not affected by
# `setLoggerClass`. Monkey patch it instead. TODO(vimota): Remove this, see the TODO
# associated with _Logger.
logging.RootLogger.findCaller = _Logger.findCaller
log_to_file = _LOG_TO_FILE_ENV.lower() in ("yes", "true", "t", "1") if _LOG_TO_FILE_ENV is not None else True
if log_to_file:
handler = logging.FileHandler(filename='/tmp/kaggle.log', mode='w')
else:
handler = logging.StreamHandler()
# ".1s" is for the first letter: http://stackoverflow.com/a/27453084/1869.
format_string = "%(asctime)s %(levelname).1s %(process)d %(filename)s:%(lineno)d] %(message)s"
handler.setFormatter(_LogFormatter(format_string))
logging.basicConfig(level=logging.INFO, handlers=[handler])
Log._initialized = True
Log._static_init()
|
import sys
from markups import MarkdownMarkup, ReStructuredTextMarkup
from PyQt5.QtGui import QTextCursor
LARGER_THAN_ANYTHING = sys.maxsize
class Row:
def __init__(self, block=None, text=None, separatorline=False, paddingchar=' '):
self.block = block
self.text = text
self.separatorline = separatorline
self.paddingchar = paddingchar
def __repr__(self):
return "<Row '%s' %s '%s'>" % (self.text, self.separatorline, self.paddingchar)
def _getTableLines(doc, pos, markupClass):
startblock = doc.findBlock(pos)
editedlineindex = 0
offset = pos - startblock.position()
rows = [ Row(block = startblock,
text = startblock.text()) ]
block = startblock.previous()
while any(c in block.text() for c in '+|'):
rows.insert(0, Row(block = block,
text = block.text()))
editedlineindex += 1
block = block.previous()
block = startblock.next()
while any(c in block.text() for c in '+|'):
rows.append(Row(block = block,
text = block.text()))
block = block.next()
if markupClass == MarkdownMarkup:
for i, row in enumerate(rows):
if i == 1:
row.separatorline = True
row.paddingchar = '-'
elif markupClass == ReStructuredTextMarkup:
for row in rows:
if row.text.strip().startswith(('+-','+=')):
row.separatorline = True
row.paddingchar = row.text.strip()[1]
row.text = row.text.replace('+', '|')
return rows, editedlineindex, offset
# Modify the edited line to put the table borders after the edition in their original positions.
# It does not matter that this function changes the position of table borders before the edition,
# because table editing mode only ever changes the table to the right of the cursor position.
def _sortaUndoEdit(rows, editedlineindex, offset, editsize):
aftertext = rows[editedlineindex].text
if editsize < 0:
beforetext = ' ' * -editsize + aftertext
else:
beforetext = aftertext[:offset] + aftertext[offset + editsize:]
rows[editedlineindex].text = beforetext
# Given text and the position of the n-th edge, returns n - 1
def _getEdgeIndex(text, edge):
return text[:edge].count('|')
def _determineRoomInCell(row, edge, edgeIndex, shrinking, startposition=0):
if len(row.text) > edge and row.text[edge] == '|' and \
(not edgeIndex or _getEdgeIndex(row.text, edge) == edgeIndex):
clearance = 0
cellwidth = 0
afterContent = True
for i in range(edge - 1, startposition - 1, -1):
if row.text[i] == '|':
break
else:
if row.text[i] == row.paddingchar and afterContent:
clearance += 1
else:
afterContent = False
cellwidth += 1
if row.separatorline:
if shrinking:
# do not shrink separator cells below 3
room = max(0, cellwidth - 3)
else:
# start expanding the cell if only the space for a right-align marker is left
room = max(0, cellwidth - 1)
else:
room = clearance
else:
room = LARGER_THAN_ANYTHING
return room
# Add an edit for a row to match the specified shift if it has an edge on the
# specified position
def _performShift(row, rowShift, edge, edgeIndex, shift):
editlist = []
# Any row that has an edge on the specified position and that doesn't
# already have edits that shift it 'shift' positions, will get an
# additional edit
if len(row.text) > edge and row.text[edge] == '|' and rowShift != shift and \
(not edgeIndex or _getEdgeIndex(row.text, edge) == edgeIndex):
editsize = -(rowShift - shift)
rowShift = shift
# Insert one position further to the left on separator lines, because
# there may be a space (for esthetical reasons) or an alignment marker
# on the last position before the edge and that should stay next to the
# edge.
if row.separatorline:
edge -= 1
editlist.append((edge, editsize))
return editlist, rowShift
# Finds the next edge position starting at offset in any row that is shifting.
# Rows that are not shifting when we are searching for an edge starting at
# offset, are rows that (upto offset) did not have any edges that aligned with
# shifting edges on other rows.
def _determineNextEdge(rows, rowShifts, offset):
nextedge = None
nextedgerow = None
for row, rowShift in zip(rows, rowShifts):
if rowShift != 0:
edge = row.text.find('|', offset)
if edge != -1 and (nextedge is None or edge < nextedge):
nextedge = edge
nextedgerow = row
return nextedge, _getEdgeIndex(nextedgerow.text, nextedge) if nextedge else None
# Return a list of edits to be made in other lines to adapt the table lines to
# a single edit in the edited line.
def _determineEditLists(rows, editedlineindex, offset, editsize, alignWithAnyEdge):
# rowShift represents how much the characters on a line will shift as a
# result of the already collected edits to be made.
rowShifts = [0 for _ in rows]
rowShifts[editedlineindex] = editsize
editLists = [[] for _ in rows]
# Find the next edge position on the edited row
currentedge, currentedgeindex = _determineNextEdge(rows, rowShifts, offset)
firstEdge = True
while currentedge:
if alignWithAnyEdge:
# Ignore what column the edge belongs to
currentedgeindex = None
if editsize < 0:
# How much an edge shifts to the left depends on how much room
# there is in the cells on any row that shares this edge.
leastLeftShift = min((-rowShift + _determineRoomInCell(row, currentedge, currentedgeindex, True)
for row, rowShift in zip(rows, rowShifts)))
shift = max(editsize, -leastLeftShift)
else:
# When shifting right, determine how much only once based on how
# much the edited cell needs to expand
if firstEdge:
room = _determineRoomInCell(rows[editedlineindex], currentedge, currentedgeindex, False, offset)
shift = max(0, editsize - room)
for i, row in enumerate(rows):
editList, newRowShift = _performShift(row, rowShifts[i], currentedge, currentedgeindex, shift)
rowShifts[i] = newRowShift
editLists[i].extend(editList)
currentedge, currentedgeindex = _determineNextEdge(rows, rowShifts, currentedge + 1)
firstEdge = False
return editLists
def _performEdits(cursor, rows, editLists, linewithoffset, offset):
cursor.joinPreviousEditBlock()
for i, (row, editList) in enumerate(zip(rows, editLists)):
for editpos, editsize in sorted(editList, reverse=True):
if i == linewithoffset:
editpos += offset
cursor.setPosition(row.block.position() + editpos)
if editsize > 0:
cursor.insertText(editsize * row.paddingchar)
else:
for _ in range(-editsize):
cursor.deletePreviousChar()
cursor.endEditBlock()
def adjustTableToChanges(doc, pos, editsize, markupClass):
if markupClass in (MarkdownMarkup, ReStructuredTextMarkup):
# This is needed because in ReSt cells can span multiple columns
# and we can therefore not determine which edges in other rows
# are supposed to be aligned with the edges in the edited row.
alignWithAnyEdge = (markupClass == ReStructuredTextMarkup)
rows, editedlineindex, offset = _getTableLines(doc, pos, markupClass)
_sortaUndoEdit(rows, editedlineindex, offset, editsize)
editLists = _determineEditLists(rows, editedlineindex, offset, editsize, alignWithAnyEdge)
cursor = QTextCursor(doc)
_performEdits(cursor, rows, editLists, editedlineindex, editsize)
def handleReturn(cursor, markupClass, newRow):
if markupClass not in (MarkdownMarkup, ReStructuredTextMarkup):
return False
positionInBlock = cursor.positionInBlock()
cursor.select(QTextCursor.BlockUnderCursor)
oldLine = cursor.selectedText().lstrip('\u2029')
if not ('| ' in oldLine or ' |' in oldLine):
cursor.setPosition(cursor.block().position() + positionInBlock)
return False
indent = 0
while oldLine[indent] in ' \t':
indent += 1
indentChars, oldLine = oldLine[:indent], oldLine[indent:]
newLine = ''.join('|' if c in '+|' else ' ' for c in oldLine).rstrip()
cursor.movePosition(QTextCursor.EndOfBlock)
if newRow and markupClass == MarkdownMarkup:
sepLine = ''.join(c if c in ' |' else '-' for c in oldLine)
cursor.insertText('\n' + indentChars + sepLine)
elif newRow:
sepLine = ''.join('+' if c in '+|' else '-' for c in oldLine)
cursor.insertText('\n' + indentChars + sepLine)
cursor.insertText('\n' + indentChars + newLine)
positionInBlock = min(positionInBlock, len(indentChars + newLine))
cursor.setPosition(cursor.block().position() + positionInBlock)
return True
|
import pickle
import pytest
import sys
from collections import defaultdict
from unittest.mock import Mock, patch
from kombu import Connection, Consumer, Producer, Exchange, Queue
from kombu.exceptions import MessageStateError
from kombu.utils import json
from kombu.utils.functional import ChannelPromise
from t.mocks import Transport
class test_Producer:
def setup(self):
self.exchange = Exchange('foo', 'direct')
self.connection = Connection(transport=Transport)
self.connection.connect()
assert self.connection.connection.connected
assert not self.exchange.is_bound
def test_repr(self):
p = Producer(self.connection)
assert repr(p)
def test_pickle(self):
chan = Mock()
producer = Producer(chan, serializer='pickle')
p2 = pickle.loads(pickle.dumps(producer))
assert p2.serializer == producer.serializer
def test_no_channel(self):
p = Producer(None)
assert not p._channel
@patch('kombu.messaging.maybe_declare')
def test_maybe_declare(self, maybe_declare):
p = self.connection.Producer()
q = Queue('foo')
p.maybe_declare(q)
maybe_declare.assert_called_with(q, p.channel, False)
@patch('kombu.common.maybe_declare')
def test_maybe_declare_when_entity_false(self, maybe_declare):
p = self.connection.Producer()
p.maybe_declare(None)
maybe_declare.assert_not_called()
def test_auto_declare(self):
channel = self.connection.channel()
p = Producer(channel, self.exchange, auto_declare=True)
# creates Exchange clone at bind
assert p.exchange is not self.exchange
assert p.exchange.is_bound
# auto_declare declares exchange'
assert 'exchange_declare' not in channel
p.publish('foo')
assert 'exchange_declare' in channel
def test_manual_declare(self):
channel = self.connection.channel()
p = Producer(channel, self.exchange, auto_declare=False)
assert p.exchange.is_bound
# auto_declare=False does not declare exchange
assert 'exchange_declare' not in channel
# p.declare() declares exchange')
p.declare()
assert 'exchange_declare' in channel
def test_prepare(self):
message = {'the quick brown fox': 'jumps over the lazy dog'}
channel = self.connection.channel()
p = Producer(channel, self.exchange, serializer='json')
m, ctype, cencoding = p._prepare(message, headers={})
assert json.loads(m) == message
assert ctype == 'application/json'
assert cencoding == 'utf-8'
def test_prepare_compression(self):
message = {'the quick brown fox': 'jumps over the lazy dog'}
channel = self.connection.channel()
p = Producer(channel, self.exchange, serializer='json')
headers = {}
m, ctype, cencoding = p._prepare(message, compression='zlib',
headers=headers)
assert ctype == 'application/json'
assert cencoding == 'utf-8'
assert headers['compression'] == 'application/x-gzip'
import zlib
assert json.loads(zlib.decompress(m).decode('utf-8')) == message
def test_prepare_custom_content_type(self):
message = b'the quick brown fox'
channel = self.connection.channel()
p = Producer(channel, self.exchange, serializer='json')
m, ctype, cencoding = p._prepare(message, content_type='custom')
assert m == message
assert ctype == 'custom'
assert cencoding == 'binary'
m, ctype, cencoding = p._prepare(message, content_type='custom',
content_encoding='alien')
assert m == message
assert ctype == 'custom'
assert cencoding == 'alien'
def test_prepare_is_already_unicode(self):
message = 'the quick brown fox'
channel = self.connection.channel()
p = Producer(channel, self.exchange, serializer='json')
m, ctype, cencoding = p._prepare(message, content_type='text/plain')
assert m == message.encode('utf-8')
assert ctype == 'text/plain'
assert cencoding == 'utf-8'
m, ctype, cencoding = p._prepare(message, content_type='text/plain',
content_encoding='utf-8')
assert m == message.encode('utf-8')
assert ctype == 'text/plain'
assert cencoding == 'utf-8'
def test_publish_with_Exchange_instance(self):
p = self.connection.Producer()
p.channel = Mock()
p.channel.connection.client.declared_entities = set()
p.publish('hello', exchange=Exchange('foo'), delivery_mode='transient')
assert p._channel.basic_publish.call_args[1]['exchange'] == 'foo'
def test_publish_with_expiration(self):
p = self.connection.Producer()
p.channel = Mock()
p.channel.connection.client.declared_entities = set()
p.publish('hello', exchange=Exchange('foo'), expiration=10)
properties = p._channel.prepare_message.call_args[0][5]
assert properties['expiration'] == '10000'
def test_publish_with_timeout(self):
p = self.connection.Producer()
p.channel = Mock()
p.channel.connection.client.declared_entities = set()
p.publish('test_timeout', exchange=Exchange('foo'), timeout=1)
timeout = p._channel.basic_publish.call_args[1]['timeout']
assert timeout == 1
def test_publish_with_reply_to(self):
p = self.connection.Producer()
p.channel = Mock()
p.channel.connection.client.declared_entities = set()
assert not p.exchange.name
p.publish('hello', exchange=Exchange('foo'), reply_to=Queue('foo'))
properties = p._channel.prepare_message.call_args[0][5]
assert properties['reply_to'] == 'foo'
def test_set_on_return(self):
chan = Mock()
chan.events = defaultdict(Mock)
p = Producer(ChannelPromise(lambda: chan), on_return='on_return')
p.channel
chan.events['basic_return'].add.assert_called_with('on_return')
def test_publish_retry_calls_ensure(self):
p = Producer(Mock())
p._connection = Mock()
p._connection.declared_entities = set()
ensure = p.connection.ensure = Mock()
p.publish('foo', exchange='foo', retry=True)
ensure.assert_called()
def test_publish_retry_with_declare(self):
p = self.connection.Producer()
p.maybe_declare = Mock()
p.connection.ensure = Mock()
ex = Exchange('foo')
p._publish('hello', 0, '', '', {}, {}, 'rk', 0, 0, ex, declare=[ex])
p.maybe_declare.assert_called_with(ex)
def test_revive_when_channel_is_connection(self):
p = self.connection.Producer()
p.exchange = Mock()
new_conn = Connection('memory://')
defchan = new_conn.default_channel
p.revive(new_conn)
assert p.channel is defchan
p.exchange.revive.assert_called_with(defchan)
def test_enter_exit(self):
p = self.connection.Producer()
p.release = Mock()
assert p.__enter__() is p
p.__exit__()
p.release.assert_called_with()
def test_connection_property_handles_AttributeError(self):
p = self.connection.Producer()
p.channel = object()
p.__connection__ = None
assert p.connection is None
def test_publish(self):
channel = self.connection.channel()
p = Producer(channel, self.exchange, serializer='json')
message = {'the quick brown fox': 'jumps over the lazy dog'}
ret = p.publish(message, routing_key='process')
assert 'prepare_message' in channel
assert 'basic_publish' in channel
m, exc, rkey = ret
assert json.loads(m['body']) == message
assert m['content_type'] == 'application/json'
assert m['content_encoding'] == 'utf-8'
assert m['priority'] == 0
assert m['properties']['delivery_mode'] == 2
assert exc == p.exchange.name
assert rkey == 'process'
def test_no_exchange(self):
chan = self.connection.channel()
p = Producer(chan)
assert not p.exchange.name
def test_revive(self):
chan = self.connection.channel()
p = Producer(chan)
chan2 = self.connection.channel()
p.revive(chan2)
assert p.channel is chan2
assert p.exchange.channel is chan2
def test_on_return(self):
chan = self.connection.channel()
def on_return(exception, exchange, routing_key, message):
pass
p = Producer(chan, on_return=on_return)
assert on_return in chan.events['basic_return']
assert p.on_return
class test_Consumer:
def setup(self):
self.connection = Connection(transport=Transport)
self.connection.connect()
assert self.connection.connection.connected
self.exchange = Exchange('foo', 'direct')
def test_accept(self):
a = Consumer(self.connection)
assert a.accept is None
b = Consumer(self.connection, accept=['json', 'pickle'])
assert b.accept == {
'application/json', 'application/x-python-serialize',
}
c = Consumer(self.connection, accept=b.accept)
assert b.accept == c.accept
def test_enter_exit_cancel_raises(self):
c = Consumer(self.connection)
c.cancel = Mock(name='Consumer.cancel')
c.cancel.side_effect = KeyError('foo')
with c:
pass
c.cancel.assert_called_with()
def test_enter_exit_cancel_not_called_on_connection_error(self):
c = Consumer(self.connection)
c.cancel = Mock(name='Consumer.cancel')
assert self.connection.connection_errors
with pytest.raises(self.connection.connection_errors[0]):
with c:
raise self.connection.connection_errors[0]()
c.cancel.assert_not_called()
def test_receive_callback_accept(self):
message = Mock(name='Message')
message.errors = []
callback = Mock(name='on_message')
c = Consumer(self.connection, accept=['json'], on_message=callback)
c.on_decode_error = None
c.channel = Mock(name='channel')
c.channel.message_to_python = None
c._receive_callback(message)
callback.assert_called_with(message)
assert message.accept == c.accept
def test_accept__content_disallowed(self):
conn = Connection('memory://')
q = Queue('foo', exchange=self.exchange)
p = conn.Producer()
p.publish(
{'complex': object()},
declare=[q], exchange=self.exchange, serializer='pickle',
)
callback = Mock(name='callback')
with conn.Consumer(queues=[q], callbacks=[callback]) as consumer:
with pytest.raises(consumer.ContentDisallowed):
conn.drain_events(timeout=1)
callback.assert_not_called()
def test_accept__content_allowed(self):
conn = Connection('memory://')
q = Queue('foo', exchange=self.exchange)
p = conn.Producer()
p.publish(
{'complex': object()},
declare=[q], exchange=self.exchange, serializer='pickle',
)
callback = Mock(name='callback')
with conn.Consumer(queues=[q], accept=['pickle'],
callbacks=[callback]):
conn.drain_events(timeout=1)
callback.assert_called()
body, message = callback.call_args[0]
assert body['complex']
def test_set_no_channel(self):
c = Consumer(None)
assert c.channel is None
c.revive(Mock())
assert c.channel
def test_set_no_ack(self):
channel = self.connection.channel()
queue = Queue('qname', self.exchange, 'rkey')
consumer = Consumer(channel, queue, auto_declare=True, no_ack=True)
assert consumer.no_ack
def test_add_queue_when_auto_declare(self):
consumer = self.connection.Consumer(auto_declare=True)
q = Mock()
q.return_value = q
consumer.add_queue(q)
assert q in consumer.queues
q.declare.assert_called_with()
def test_add_queue_when_not_auto_declare(self):
consumer = self.connection.Consumer(auto_declare=False)
q = Mock()
q.return_value = q
consumer.add_queue(q)
assert q in consumer.queues
assert not q.declare.call_count
def test_consume_without_queues_returns(self):
consumer = self.connection.Consumer()
consumer.queues[:] = []
assert consumer.consume() is None
def test_consuming_from(self):
consumer = self.connection.Consumer()
consumer.queues[:] = [Queue('a'), Queue('b'), Queue('d')]
consumer._active_tags = {'a': 1, 'b': 2}
assert not consumer.consuming_from(Queue('c'))
assert not consumer.consuming_from('c')
assert not consumer.consuming_from(Queue('d'))
assert not consumer.consuming_from('d')
assert consumer.consuming_from(Queue('a'))
assert consumer.consuming_from(Queue('b'))
assert consumer.consuming_from('b')
def test_receive_callback_without_m2p(self):
channel = self.connection.channel()
c = channel.Consumer()
m2p = getattr(channel, 'message_to_python')
channel.message_to_python = None
try:
message = Mock()
message.errors = []
message.decode.return_value = 'Hello'
recv = c.receive = Mock()
c._receive_callback(message)
recv.assert_called_with('Hello', message)
finally:
channel.message_to_python = m2p
def test_receive_callback__message_errors(self):
channel = self.connection.channel()
channel.message_to_python = None
c = channel.Consumer()
message = Mock()
try:
raise KeyError('foo')
except KeyError:
message.errors = [sys.exc_info()]
message._reraise_error.side_effect = KeyError()
with pytest.raises(KeyError):
c._receive_callback(message)
def test_set_callbacks(self):
channel = self.connection.channel()
queue = Queue('qname', self.exchange, 'rkey')
callbacks = [lambda x, y: x,
lambda x, y: x]
consumer = Consumer(channel, queue, auto_declare=True,
callbacks=callbacks)
assert consumer.callbacks == callbacks
def test_auto_declare(self):
channel = self.connection.channel()
queue = Queue('qname', self.exchange, 'rkey')
consumer = Consumer(channel, queue, auto_declare=True)
consumer.consume()
consumer.consume() # twice is a noop
assert consumer.queues[0] is not queue
assert consumer.queues[0].is_bound
assert consumer.queues[0].exchange.is_bound
assert consumer.queues[0].exchange is not self.exchange
for meth in ('exchange_declare',
'queue_declare',
'queue_bind',
'basic_consume'):
assert meth in channel
assert channel.called.count('basic_consume') == 1
assert consumer._active_tags
consumer.cancel_by_queue(queue.name)
consumer.cancel_by_queue(queue.name)
assert not consumer._active_tags
def test_consumer_tag_prefix(self):
channel = self.connection.channel()
queue = Queue('qname', self.exchange, 'rkey')
consumer = Consumer(channel, queue, tag_prefix='consumer_')
consumer.consume()
assert consumer._active_tags[queue.name].startswith('consumer_')
def test_manual_declare(self):
channel = self.connection.channel()
queue = Queue('qname', self.exchange, 'rkey')
consumer = Consumer(channel, queue, auto_declare=False)
assert consumer.queues[0] is not queue
assert consumer.queues[0].is_bound
assert consumer.queues[0].exchange.is_bound
assert consumer.queues[0].exchange is not self.exchange
for meth in ('exchange_declare',
'queue_declare',
'basic_consume'):
assert meth not in channel
consumer.declare()
for meth in ('exchange_declare',
'queue_declare',
'queue_bind'):
assert meth in channel
assert 'basic_consume' not in channel
consumer.consume()
assert 'basic_consume' in channel
def test_consume__cancel(self):
channel = self.connection.channel()
queue = Queue('qname', self.exchange, 'rkey')
consumer = Consumer(channel, queue, auto_declare=True)
consumer.consume()
consumer.cancel()
assert 'basic_cancel' in channel
assert not consumer._active_tags
def test___enter____exit__(self):
channel = self.connection.channel()
queue = Queue('qname', self.exchange, 'rkey')
consumer = Consumer(channel, queue, auto_declare=True)
context = consumer.__enter__()
assert context is consumer
assert consumer._active_tags
res = consumer.__exit__(None, None, None)
assert not res
assert 'basic_cancel' in channel
assert not consumer._active_tags
def test_flow(self):
channel = self.connection.channel()
queue = Queue('qname', self.exchange, 'rkey')
consumer = Consumer(channel, queue, auto_declare=True)
consumer.flow(False)
assert 'flow' in channel
def test_qos(self):
channel = self.connection.channel()
queue = Queue('qname', self.exchange, 'rkey')
consumer = Consumer(channel, queue, auto_declare=True)
consumer.qos(30, 10, False)
assert 'basic_qos' in channel
def test_purge(self):
channel = self.connection.channel()
b1 = Queue('qname1', self.exchange, 'rkey')
b2 = Queue('qname2', self.exchange, 'rkey')
b3 = Queue('qname3', self.exchange, 'rkey')
b4 = Queue('qname4', self.exchange, 'rkey')
consumer = Consumer(channel, [b1, b2, b3, b4], auto_declare=True)
consumer.purge()
assert channel.called.count('queue_purge') == 4
def test_multiple_queues(self):
channel = self.connection.channel()
b1 = Queue('qname1', self.exchange, 'rkey')
b2 = Queue('qname2', self.exchange, 'rkey')
b3 = Queue('qname3', self.exchange, 'rkey')
b4 = Queue('qname4', self.exchange, 'rkey')
consumer = Consumer(channel, [b1, b2, b3, b4])
consumer.consume()
assert channel.called.count('exchange_declare') == 4
assert channel.called.count('queue_declare') == 4
assert channel.called.count('queue_bind') == 4
assert channel.called.count('basic_consume') == 4
assert len(consumer._active_tags) == 4
consumer.cancel()
assert channel.called.count('basic_cancel') == 4
assert not len(consumer._active_tags)
def test_receive_callback(self):
channel = self.connection.channel()
b1 = Queue('qname1', self.exchange, 'rkey')
consumer = Consumer(channel, [b1])
received = []
def callback(message_data, message):
received.append(message_data)
message.ack()
message.payload # trigger cache
consumer.register_callback(callback)
consumer._receive_callback({'foo': 'bar'})
assert 'basic_ack' in channel
assert 'message_to_python' in channel
assert received[0] == {'foo': 'bar'}
def test_basic_ack_twice(self):
channel = self.connection.channel()
b1 = Queue('qname1', self.exchange, 'rkey')
consumer = Consumer(channel, [b1])
def callback(message_data, message):
message.ack()
message.ack()
consumer.register_callback(callback)
with pytest.raises(MessageStateError):
consumer._receive_callback({'foo': 'bar'})
def test_basic_reject(self):
channel = self.connection.channel()
b1 = Queue('qname1', self.exchange, 'rkey')
consumer = Consumer(channel, [b1])
def callback(message_data, message):
message.reject()
consumer.register_callback(callback)
consumer._receive_callback({'foo': 'bar'})
assert 'basic_reject' in channel
def test_basic_reject_twice(self):
channel = self.connection.channel()
b1 = Queue('qname1', self.exchange, 'rkey')
consumer = Consumer(channel, [b1])
def callback(message_data, message):
message.reject()
message.reject()
consumer.register_callback(callback)
with pytest.raises(MessageStateError):
consumer._receive_callback({'foo': 'bar'})
assert 'basic_reject' in channel
def test_basic_reject__requeue(self):
channel = self.connection.channel()
b1 = Queue('qname1', self.exchange, 'rkey')
consumer = Consumer(channel, [b1])
def callback(message_data, message):
message.requeue()
consumer.register_callback(callback)
consumer._receive_callback({'foo': 'bar'})
assert 'basic_reject:requeue' in channel
def test_basic_reject__requeue_twice(self):
channel = self.connection.channel()
b1 = Queue('qname1', self.exchange, 'rkey')
consumer = Consumer(channel, [b1])
def callback(message_data, message):
message.requeue()
message.requeue()
consumer.register_callback(callback)
with pytest.raises(MessageStateError):
consumer._receive_callback({'foo': 'bar'})
assert 'basic_reject:requeue' in channel
def test_receive_without_callbacks_raises(self):
channel = self.connection.channel()
b1 = Queue('qname1', self.exchange, 'rkey')
consumer = Consumer(channel, [b1])
with pytest.raises(NotImplementedError):
consumer.receive(1, 2)
def test_decode_error(self):
channel = self.connection.channel()
b1 = Queue('qname1', self.exchange, 'rkey')
consumer = Consumer(channel, [b1])
consumer.channel.throw_decode_error = True
with pytest.raises(ValueError):
consumer._receive_callback({'foo': 'bar'})
def test_on_decode_error_callback(self):
channel = self.connection.channel()
b1 = Queue('qname1', self.exchange, 'rkey')
thrown = []
def on_decode_error(msg, exc):
thrown.append((msg.body, exc))
consumer = Consumer(channel, [b1], on_decode_error=on_decode_error)
consumer.channel.throw_decode_error = True
consumer._receive_callback({'foo': 'bar'})
assert thrown
m, exc = thrown[0]
assert json.loads(m) == {'foo': 'bar'}
assert isinstance(exc, ValueError)
def test_recover(self):
channel = self.connection.channel()
b1 = Queue('qname1', self.exchange, 'rkey')
consumer = Consumer(channel, [b1])
consumer.recover()
assert 'basic_recover' in channel
def test_revive(self):
channel = self.connection.channel()
b1 = Queue('qname1', self.exchange, 'rkey')
consumer = Consumer(channel, [b1])
channel2 = self.connection.channel()
consumer.revive(channel2)
assert consumer.channel is channel2
assert consumer.queues[0].channel is channel2
assert consumer.queues[0].exchange.channel is channel2
def test_revive__with_prefetch_count(self):
channel = Mock(name='channel')
b1 = Queue('qname1', self.exchange, 'rkey')
Consumer(channel, [b1], prefetch_count=14)
channel.basic_qos.assert_called_with(0, 14, False)
def test__repr__(self):
channel = self.connection.channel()
b1 = Queue('qname1', self.exchange, 'rkey')
assert repr(Consumer(channel, [b1]))
def test_connection_property_handles_AttributeError(self):
p = self.connection.Consumer()
p.channel = object()
assert p.connection is None
|
import numpy as np
class Transaction(object):
"""
Handles the transaction of an asset, as used in the
Position class.
Parameters
----------
asset : `str`
The asset symbol of the transaction
quantity : `int`
Whole number quantity of shares in the transaction
dt : `pd.Timestamp`
The date/time of the transaction
price : `float`
The transaction price carried out
order_id : `int`
The unique order identifier
commission : `float`, optional
The trading commission
"""
def __init__(
self,
asset,
quantity,
dt,
price,
order_id,
commission=0.0
):
self.asset = asset
self.quantity = quantity
self.direction = np.copysign(1, self.quantity)
self.dt = dt
self.price = price
self.order_id = order_id
self.commission = commission
def __repr__(self):
"""
Provides a representation of the Transaction
to allow full recreation of the object.
Returns
-------
`str`
The string representation of the Transaction.
"""
return "%s(asset=%s, quantity=%s, dt=%s, " \
"price=%s, order_id=%s)" % (
type(self).__name__, self.asset,
self.quantity, self.dt,
self.price, self.order_id
)
@property
def cost_without_commission(self):
"""
Calculate the cost of the transaction without including
any commission costs.
Returns
-------
`float`
The transaction cost without commission.
"""
return self.quantity * self.price
@property
def cost_with_commission(self):
"""
Calculate the cost of the transaction including
any commission costs.
Returns
-------
`float`
The transaction cost with commission.
"""
if self.commission == 0.0:
return self.cost_without_commission
else:
return self.cost_without_commission + self.commission
|
from __future__ import division, print_function
"""
HTML PRE-based UI implementation
"""
from urwid import util
from urwid.main_loop import ExitMainLoop
from urwid.display_common import AttrSpec, BaseScreen
# replace control characters with ?'s
_trans_table = "?" * 32 + "".join([chr(x) for x in range(32, 256)])
_default_foreground = 'black'
_default_background = 'light gray'
class HtmlGeneratorSimulationError(Exception):
pass
class HtmlGenerator(BaseScreen):
# class variables
fragments = []
sizes = []
keys = []
started = True
def __init__(self):
super(HtmlGenerator, self).__init__()
self.colors = 16
self.bright_is_bold = False # ignored
self.has_underline = True # ignored
self.register_palette_entry(None,
_default_foreground, _default_background)
def set_terminal_properties(self, colors=None, bright_is_bold=None,
has_underline=None):
if colors is None:
colors = self.colors
if bright_is_bold is None:
bright_is_bold = self.bright_is_bold
if has_underline is None:
has_underline = self.has_underline
self.colors = colors
self.bright_is_bold = bright_is_bold
self.has_underline = has_underline
def set_mouse_tracking(self, enable=True):
"""Not yet implemented"""
pass
def set_input_timeouts(self, *args):
pass
def reset_default_terminal_palette(self, *args):
pass
def draw_screen(self, size, r ):
"""Create an html fragment from the render object.
Append it to HtmlGenerator.fragments list.
"""
# collect output in l
l = []
cols, rows = size
assert r.rows() == rows
if r.cursor is not None:
cx, cy = r.cursor
else:
cx = cy = None
y = -1
for row in r.content():
y += 1
col = 0
for a, cs, run in row:
if not str is bytes:
run = run.decode()
run = run.translate(_trans_table)
if isinstance(a, AttrSpec):
aspec = a
else:
aspec = self._palette[a][
{1: 1, 16: 0, 88:2, 256:3}[self.colors]]
if y == cy and col <= cx:
run_width = util.calc_width(run, 0,
len(run))
if col+run_width > cx:
l.append(html_span(run,
aspec, cx-col))
else:
l.append(html_span(run, aspec))
col += run_width
else:
l.append(html_span(run, aspec))
l.append("\n")
# add the fragment to the list
self.fragments.append( "<pre>%s</pre>" % "".join(l) )
def clear(self):
"""
Force the screen to be completely repainted on the next
call to draw_screen().
(does nothing for html_fragment)
"""
pass
def get_cols_rows(self):
"""Return the next screen size in HtmlGenerator.sizes."""
if not self.sizes:
raise HtmlGeneratorSimulationError("Ran out of screen sizes to return!")
return self.sizes.pop(0)
def get_input(self, raw_keys=False):
"""Return the next list of keypresses in HtmlGenerator.keys."""
if not self.keys:
raise ExitMainLoop()
if raw_keys:
return (self.keys.pop(0), [])
return self.keys.pop(0)
_default_aspec = AttrSpec(_default_foreground, _default_background)
(_d_fg_r, _d_fg_g, _d_fg_b, _d_bg_r, _d_bg_g, _d_bg_b) = (
_default_aspec.get_rgb_values())
def html_span(s, aspec, cursor = -1):
fg_r, fg_g, fg_b, bg_r, bg_g, bg_b = aspec.get_rgb_values()
# use real colours instead of default fg/bg
if fg_r is None:
fg_r, fg_g, fg_b = _d_fg_r, _d_fg_g, _d_fg_b
if bg_r is None:
bg_r, bg_g, bg_b = _d_bg_r, _d_bg_g, _d_bg_b
html_fg = "#%02x%02x%02x" % (fg_r, fg_g, fg_b)
html_bg = "#%02x%02x%02x" % (bg_r, bg_g, bg_b)
if aspec.standout:
html_fg, html_bg = html_bg, html_fg
extra = (";text-decoration:underline" * aspec.underline +
";font-weight:bold" * aspec.bold)
def html_span(fg, bg, s):
if not s: return ""
return ('<span style="color:%s;'
'background:%s%s">%s</span>' %
(fg, bg, extra, html_escape(s)))
if cursor >= 0:
c_off, _ign = util.calc_text_pos(s, 0, len(s), cursor)
c2_off = util.move_next_char(s, c_off, len(s))
return (html_span(html_fg, html_bg, s[:c_off]) +
html_span(html_bg, html_fg, s[c_off:c2_off]) +
html_span(html_fg, html_bg, s[c2_off:]))
else:
return html_span(html_fg, html_bg, s)
def html_escape(text):
"""Escape text so that it will be displayed safely within HTML"""
text = text.replace('&','&')
text = text.replace('<','<')
text = text.replace('>','>')
return text
def screenshot_init( sizes, keys ):
"""
Replace curses_display.Screen and raw_display.Screen class with
HtmlGenerator.
Call this function before executing an application that uses
curses_display.Screen to have that code use HtmlGenerator instead.
sizes -- list of ( columns, rows ) tuples to be returned by each call
to HtmlGenerator.get_cols_rows()
keys -- list of lists of keys to be returned by each call to
HtmlGenerator.get_input()
Lists of keys may include "window resize" to force the application to
call get_cols_rows and read a new screen size.
For example, the following call will prepare an application to:
1. start in 80x25 with its first call to get_cols_rows()
2. take a screenshot when it calls draw_screen(..)
3. simulate 5 "down" keys from get_input()
4. take a screenshot when it calls draw_screen(..)
5. simulate keys "a", "b", "c" and a "window resize"
6. resize to 20x10 on its second call to get_cols_rows()
7. take a screenshot when it calls draw_screen(..)
8. simulate a "Q" keypress to quit the application
screenshot_init( [ (80,25), (20,10) ],
[ ["down"]*5, ["a","b","c","window resize"], ["Q"] ] )
"""
try:
for (row,col) in sizes:
assert type(row) == int
assert row>0 and col>0
except (AssertionError, ValueError):
raise Exception("sizes must be in the form [ (col1,row1), (col2,row2), ...]")
try:
for l in keys:
assert type(l) == list
for k in l:
assert type(k) == str
except (AssertionError, ValueError):
raise Exception("keys must be in the form [ [keyA1, keyA2, ..], [keyB1, ..], ...]")
from . import curses_display
curses_display.Screen = HtmlGenerator
from . import raw_display
raw_display.Screen = HtmlGenerator
HtmlGenerator.sizes = sizes
HtmlGenerator.keys = keys
def screenshot_collect():
"""Return screenshots as a list of HTML fragments."""
l = HtmlGenerator.fragments
HtmlGenerator.fragments = []
return l
|
import pytest
from homeassistant.components.humidifier.const import (
ATTR_HUMIDITY,
ATTR_MODE,
DOMAIN,
MODE_AWAY,
MODE_ECO,
MODE_NORMAL,
SERVICE_SET_HUMIDITY,
SERVICE_SET_MODE,
)
from homeassistant.components.humidifier.reproduce_state import async_reproduce_states
from homeassistant.const import SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON
from homeassistant.core import Context, State
from tests.common import async_mock_service
ENTITY_1 = "humidifier.test1"
ENTITY_2 = "humidifier.test2"
async def test_reproducing_on_off_states(hass, caplog):
"""Test reproducing humidifier states."""
hass.states.async_set(ENTITY_1, "off", {ATTR_MODE: MODE_NORMAL, ATTR_HUMIDITY: 45})
hass.states.async_set(ENTITY_2, "on", {ATTR_MODE: MODE_NORMAL, ATTR_HUMIDITY: 45})
turn_on_calls = async_mock_service(hass, DOMAIN, SERVICE_TURN_ON)
turn_off_calls = async_mock_service(hass, DOMAIN, SERVICE_TURN_OFF)
mode_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_MODE)
humidity_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_HUMIDITY)
# These calls should do nothing as entities already in desired state
await hass.helpers.state.async_reproduce_state(
[
State(ENTITY_1, "off", {ATTR_MODE: MODE_NORMAL, ATTR_HUMIDITY: 45}),
State(ENTITY_2, "on", {ATTR_MODE: MODE_NORMAL, ATTR_HUMIDITY: 45}),
],
)
assert len(turn_on_calls) == 0
assert len(turn_off_calls) == 0
assert len(mode_calls) == 0
assert len(humidity_calls) == 0
# Test invalid state is handled
await hass.helpers.state.async_reproduce_state([State(ENTITY_1, "not_supported")])
assert "not_supported" in caplog.text
assert len(turn_on_calls) == 0
assert len(turn_off_calls) == 0
assert len(mode_calls) == 0
assert len(humidity_calls) == 0
# Make sure correct services are called
await hass.helpers.state.async_reproduce_state(
[
State(ENTITY_2, "off"),
State(ENTITY_1, "on", {}),
# Should not raise
State("humidifier.non_existing", "on"),
]
)
assert len(turn_on_calls) == 1
assert turn_on_calls[0].domain == "humidifier"
assert turn_on_calls[0].data == {"entity_id": ENTITY_1}
assert len(turn_off_calls) == 1
assert turn_off_calls[0].domain == "humidifier"
assert turn_off_calls[0].data == {"entity_id": ENTITY_2}
# Make sure we didn't call services for missing attributes
assert len(mode_calls) == 0
assert len(humidity_calls) == 0
async def test_multiple_attrs(hass):
"""Test turn on with multiple attributes."""
hass.states.async_set(ENTITY_1, STATE_OFF, {})
turn_on_calls = async_mock_service(hass, DOMAIN, SERVICE_TURN_ON)
turn_off_calls = async_mock_service(hass, DOMAIN, SERVICE_TURN_OFF)
mode_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_MODE)
humidity_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_HUMIDITY)
await async_reproduce_states(
hass, [State(ENTITY_1, STATE_ON, {ATTR_MODE: MODE_NORMAL, ATTR_HUMIDITY: 45})]
)
await hass.async_block_till_done()
assert len(turn_on_calls) == 1
assert turn_on_calls[0].data == {"entity_id": ENTITY_1}
assert len(turn_off_calls) == 0
assert len(mode_calls) == 1
assert mode_calls[0].data == {"entity_id": ENTITY_1, "mode": "normal"}
assert len(humidity_calls) == 1
assert humidity_calls[0].data == {"entity_id": ENTITY_1, "humidity": 45}
async def test_turn_off_multiple_attrs(hass):
"""Test set mode and humidity for off state."""
hass.states.async_set(ENTITY_1, STATE_ON, {})
turn_on_calls = async_mock_service(hass, DOMAIN, SERVICE_TURN_ON)
turn_off_calls = async_mock_service(hass, DOMAIN, SERVICE_TURN_OFF)
mode_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_MODE)
humidity_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_HUMIDITY)
await async_reproduce_states(
hass, [State(ENTITY_1, STATE_OFF, {ATTR_MODE: MODE_NORMAL, ATTR_HUMIDITY: 45})]
)
await hass.async_block_till_done()
assert len(turn_on_calls) == 0
assert len(turn_off_calls) == 1
assert turn_off_calls[0].data == {"entity_id": ENTITY_1}
assert len(mode_calls) == 0
assert len(humidity_calls) == 0
async def test_multiple_modes(hass):
"""Test that multiple states gets calls."""
hass.states.async_set(ENTITY_1, STATE_OFF, {})
hass.states.async_set(ENTITY_2, STATE_OFF, {})
turn_on_calls = async_mock_service(hass, DOMAIN, SERVICE_TURN_ON)
turn_off_calls = async_mock_service(hass, DOMAIN, SERVICE_TURN_OFF)
mode_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_MODE)
humidity_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_HUMIDITY)
await async_reproduce_states(
hass,
[
State(ENTITY_1, STATE_ON, {ATTR_MODE: MODE_ECO, ATTR_HUMIDITY: 40}),
State(ENTITY_2, STATE_ON, {ATTR_MODE: MODE_NORMAL, ATTR_HUMIDITY: 50}),
],
)
await hass.async_block_till_done()
assert len(turn_on_calls) == 2
assert len(turn_off_calls) == 0
assert len(mode_calls) == 2
# order is not guaranteed
assert any(
call.data == {"entity_id": ENTITY_1, "mode": MODE_ECO} for call in mode_calls
)
assert any(
call.data == {"entity_id": ENTITY_2, "mode": MODE_NORMAL} for call in mode_calls
)
assert len(humidity_calls) == 2
# order is not guaranteed
assert any(
call.data == {"entity_id": ENTITY_1, "humidity": 40} for call in humidity_calls
)
assert any(
call.data == {"entity_id": ENTITY_2, "humidity": 50} for call in humidity_calls
)
async def test_state_with_none(hass):
"""Test that none is not a humidifier state."""
hass.states.async_set(ENTITY_1, STATE_OFF, {})
turn_on_calls = async_mock_service(hass, DOMAIN, SERVICE_TURN_ON)
turn_off_calls = async_mock_service(hass, DOMAIN, SERVICE_TURN_OFF)
mode_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_MODE)
humidity_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_HUMIDITY)
await async_reproduce_states(hass, [State(ENTITY_1, None)])
await hass.async_block_till_done()
assert len(turn_on_calls) == 0
assert len(turn_off_calls) == 0
assert len(mode_calls) == 0
assert len(humidity_calls) == 0
async def test_state_with_context(hass):
"""Test that context is forwarded."""
hass.states.async_set(ENTITY_1, STATE_OFF, {})
turn_on_calls = async_mock_service(hass, DOMAIN, SERVICE_TURN_ON)
turn_off_calls = async_mock_service(hass, DOMAIN, SERVICE_TURN_OFF)
mode_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_MODE)
humidity_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_HUMIDITY)
context = Context()
await async_reproduce_states(
hass,
[State(ENTITY_1, STATE_ON, {ATTR_MODE: MODE_AWAY, ATTR_HUMIDITY: 45})],
context=context,
)
await hass.async_block_till_done()
assert len(turn_on_calls) == 1
assert turn_on_calls[0].data == {"entity_id": ENTITY_1}
assert turn_on_calls[0].context == context
assert len(turn_off_calls) == 0
assert len(mode_calls) == 1
assert mode_calls[0].data == {"entity_id": ENTITY_1, "mode": "away"}
assert mode_calls[0].context == context
assert len(humidity_calls) == 1
assert humidity_calls[0].data == {"entity_id": ENTITY_1, "humidity": 45}
assert humidity_calls[0].context == context
@pytest.mark.parametrize(
"service,attribute",
[(SERVICE_SET_MODE, ATTR_MODE), (SERVICE_SET_HUMIDITY, ATTR_HUMIDITY)],
)
async def test_attribute(hass, service, attribute):
"""Test that service call is made for each attribute."""
hass.states.async_set(ENTITY_1, STATE_ON, {})
turn_on_calls = async_mock_service(hass, DOMAIN, SERVICE_TURN_ON)
turn_off_calls = async_mock_service(hass, DOMAIN, SERVICE_TURN_OFF)
calls_1 = async_mock_service(hass, DOMAIN, service)
value = "dummy"
await async_reproduce_states(hass, [State(ENTITY_1, STATE_ON, {attribute: value})])
await hass.async_block_till_done()
assert len(turn_on_calls) == 0
assert len(turn_off_calls) == 0
assert len(calls_1) == 1
assert calls_1[0].data == {"entity_id": ENTITY_1, attribute: value}
|
import logging
from ..DataUploader import Plugin as DataUploaderPlugin
from .reader import AndroidReader, AndroidStatsReader
from ...common.interfaces import AbstractPlugin
try:
from volta.core.core import Core as VoltaCore
except Exception:
raise RuntimeError("Please install volta. https://github.com/yandex-load/volta")
logger = logging.getLogger(__name__)
class Plugin(AbstractPlugin):
SECTION = "android"
SECTION_META = "meta"
def __init__(self, core, cfg, name):
self.stats_reader = None
self.reader = None
super(Plugin, self).__init__(core, cfg, name)
self.device = None
try:
self.cfg = cfg['volta_options']
for key, value in self.cfg.items():
if not isinstance(value, dict):
logger.debug('Malformed VoltaConfig key: %s value %s', key, value)
raise RuntimeError('Malformed VoltaConfig passed, key: %s. Should by dict' % key)
except AttributeError:
logger.error('Failed to read Volta config', exc_info=True)
self.volta_core = VoltaCore(self.cfg)
@staticmethod
def get_key():
return __file__
def get_available_options(self):
opts = ["volta_options"]
return opts
def configure(self):
self.volta_core.configure()
def get_reader(self):
if self.reader is None:
self.reader = AndroidReader()
return self.reader
def get_stats_reader(self):
if self.stats_reader is None:
self.stats_reader = AndroidStatsReader()
return self.stats_reader
def prepare_test(self):
self.core.add_artifact_file(self.volta_core.currents_fname)
[self.core.add_artifact_file(fname) for fname in self.volta_core.event_fnames.values()]
def start_test(self):
try:
self.volta_core.start_test()
# FIXME raise/catch appropriate exception here
except: # noqa: E722
logger.info('Failed to start test of Android plugin', exc_info=True)
return 1
def is_test_finished(self):
try:
if hasattr(self.volta_core, 'phone'):
if hasattr(self.volta_core.phone, 'test_performer'):
if not self.volta_core.phone.test_performer:
logger.warning('There is no test performer process on the phone, interrupting test')
return 1
if not self.volta_core.phone.test_performer.is_finished():
logger.debug('Waiting for phone test to finish...')
return -1
else:
return self.volta_core.phone.test_performer.retcode
# FIXME raise/catch appropriate exception here
except: # noqa: E722
logger.error('Unknown exception of Android plugin. Interrupting test', exc_info=True)
return 1
def end_test(self, retcode):
try:
self.volta_core.end_test()
uploaders = self.core.get_plugins_of_type(DataUploaderPlugin)
for uploader in uploaders:
response = uploader.lp_job.api_client.link_mobile_job(
lp_key=uploader.lp_job.number,
mobile_key=self.volta_core.uploader.jobno
)
logger.info(
'Linked mobile job %s to %s for plugin: %s. Response: %s',
self.volta_core.uploader.jobno, uploader.lp_job.number, uploader.backend_type, response
)
# FIXME raise/catch appropriate exception here
except: # noqa: E722
logger.error('Failed to complete end_test of Android plugin', exc_info=True)
retcode = 1
return retcode
def get_info(self):
return AndroidInfo()
def post_process(self, retcode):
try:
self.volta_core.post_process()
# FIXME raise/catch appropriate exception here
except: # noqa: E722
logger.error('Failed to complete post_process of Android plugin', exc_info=True)
retcode = 1
return retcode
class AndroidInfo(object):
def __init__(self):
self.address = ''
self.port = 80
self.ammo_file = ''
self.duration = 0
self.loop_count = 1
self.instances = 1
self.rps_schedule = ''
|
import os
import pytest
import kombu
from time import sleep
from .common import (
BasicFunctionality, BaseExchangeTypes, BasePriority
)
def get_connection(
hostname, port, vhost):
return kombu.Connection(f'redis://{hostname}:{port}')
@pytest.fixture()
def connection(request):
# this fixture yields plain connections to broker and TLS encrypted
return get_connection(
hostname=os.environ.get('REDIS_HOST', 'localhost'),
port=os.environ.get('REDIS_6379_TCP', '6379'),
vhost=getattr(
request.config, "slaveinput", {}
).get("slaveid", None),
)
@pytest.fixture()
def invalid_connection():
return kombu.Connection('redis://localhost:12345')
@pytest.mark.env('redis')
@pytest.mark.flaky(reruns=5, reruns_delay=2)
class test_RedisBasicFunctionality(BasicFunctionality):
pass
@pytest.mark.env('redis')
@pytest.mark.flaky(reruns=5, reruns_delay=2)
class test_RedisBaseExchangeTypes(BaseExchangeTypes):
pass
@pytest.mark.env('redis')
@pytest.mark.flaky(reruns=5, reruns_delay=2)
class test_RedisPriority(BasePriority):
# Comparing to py-amqp transport has Redis transport several
# differences:
# 1. Order of priorities is reversed
# 2. drain_events() consumes only single value
# redis transport has lower numbers higher priority
PRIORITY_ORDER = 'desc'
def test_publish_consume(self, connection):
test_queue = kombu.Queue(
'priority_test', routing_key='priority_test', max_priority=10
)
received_messages = []
def callback(body, message):
received_messages.append(body)
message.ack()
with connection as conn:
with conn.channel() as channel:
producer = kombu.Producer(channel)
for msg, prio in [
[{'msg': 'first'}, 6],
[{'msg': 'second'}, 3],
[{'msg': 'third'}, 6],
]:
producer.publish(
msg,
retry=True,
exchange=test_queue.exchange,
routing_key=test_queue.routing_key,
declare=[test_queue],
serializer='pickle',
priority=prio
)
# Sleep to make sure that queue sorted based on priority
sleep(0.5)
consumer = kombu.Consumer(
conn, [test_queue], accept=['pickle']
)
consumer.register_callback(callback)
with consumer:
# drain_events() returns just on number in
# Virtual transports
conn.drain_events(timeout=1)
conn.drain_events(timeout=1)
conn.drain_events(timeout=1)
# Second message must be received first
assert received_messages[0] == {'msg': 'second'}
assert received_messages[1] == {'msg': 'first'}
assert received_messages[2] == {'msg': 'third'}
|
import argparse
import matplotlib.pyplot as plt
import chainer
from chainercv.datasets import ade20k_semantic_segmentation_label_colors
from chainercv.datasets import ade20k_semantic_segmentation_label_names
from chainercv.datasets import cityscapes_semantic_segmentation_label_colors
from chainercv.datasets import cityscapes_semantic_segmentation_label_names
from chainercv.experimental.links import PSPNetResNet101
from chainercv.utils import read_image
from chainercv.visualizations import vis_image
from chainercv.visualizations import vis_semantic_segmentation
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--gpu', '-g', type=int, default=-1)
parser.add_argument('--pretrained-model')
parser.add_argument('--input-size', type=int, default=713)
parser.add_argument(
'--dataset', choices=('cityscapes', 'ade20k'), default='cityscapes')
parser.add_argument('image')
args = parser.parse_args()
if args.dataset == 'cityscapes':
if args.pretrained_model is None:
args.pretrained_model = 'cityscapes'
label_names = cityscapes_semantic_segmentation_label_names
colors = cityscapes_semantic_segmentation_label_colors
elif args.dataset == 'ade20k':
if args.pretrained_model is None:
args.pretrained_model = 'ade20k'
label_names = ade20k_semantic_segmentation_label_names
colors = ade20k_semantic_segmentation_label_colors
input_size = (args.input_size, args.input_size)
model = PSPNetResNet101(
n_class=len(label_names),
pretrained_model=args.pretrained_model, input_size=input_size)
if args.gpu >= 0:
chainer.cuda.get_device_from_id(args.gpu).use()
model.to_gpu(args.gpu)
img = read_image(args.image)
labels = model.predict([img])
label = labels[0]
fig = plt.figure()
ax1 = fig.add_subplot(1, 2, 1)
vis_image(img, ax=ax1)
ax2 = fig.add_subplot(1, 2, 2)
ax2, legend_handles = vis_semantic_segmentation(
img, label, label_names, colors, ax=ax2)
ax2.legend(handles=legend_handles, bbox_to_anchor=(1, 1), loc=2)
plt.show()
if __name__ == '__main__':
main()
|
from json import loads
from homeassistant.components.advantage_air.const import DOMAIN as ADVANTAGE_AIR_DOMAIN
from homeassistant.components.advantage_air.sensor import (
ADVANTAGE_AIR_SERVICE_SET_TIME_TO,
ADVANTAGE_AIR_SET_COUNTDOWN_VALUE,
)
from homeassistant.const import ATTR_ENTITY_ID
from tests.components.advantage_air import (
TEST_SET_RESPONSE,
TEST_SET_URL,
TEST_SYSTEM_DATA,
TEST_SYSTEM_URL,
add_mock_config,
)
async def test_sensor_platform(hass, aioclient_mock):
"""Test sensor platform."""
aioclient_mock.get(
TEST_SYSTEM_URL,
text=TEST_SYSTEM_DATA,
)
aioclient_mock.get(
TEST_SET_URL,
text=TEST_SET_RESPONSE,
)
await add_mock_config(hass)
registry = await hass.helpers.entity_registry.async_get_registry()
assert len(aioclient_mock.mock_calls) == 1
# Test First TimeToOn Sensor
entity_id = "sensor.ac_one_time_to_on"
state = hass.states.get(entity_id)
assert state
assert int(state.state) == 0
entry = registry.async_get(entity_id)
assert entry
assert entry.unique_id == "uniqueid-ac1-timetoOn"
value = 20
await hass.services.async_call(
ADVANTAGE_AIR_DOMAIN,
ADVANTAGE_AIR_SERVICE_SET_TIME_TO,
{ATTR_ENTITY_ID: [entity_id], ADVANTAGE_AIR_SET_COUNTDOWN_VALUE: value},
blocking=True,
)
assert len(aioclient_mock.mock_calls) == 3
assert aioclient_mock.mock_calls[-2][0] == "GET"
assert aioclient_mock.mock_calls[-2][1].path == "/setAircon"
data = loads(aioclient_mock.mock_calls[-2][1].query["json"])
assert data["ac1"]["info"]["countDownToOn"] == value
assert aioclient_mock.mock_calls[-1][0] == "GET"
assert aioclient_mock.mock_calls[-1][1].path == "/getSystemData"
# Test First TimeToOff Sensor
entity_id = "sensor.ac_one_time_to_off"
state = hass.states.get(entity_id)
assert state
assert int(state.state) == 10
entry = registry.async_get(entity_id)
assert entry
assert entry.unique_id == "uniqueid-ac1-timetoOff"
value = 0
await hass.services.async_call(
ADVANTAGE_AIR_DOMAIN,
ADVANTAGE_AIR_SERVICE_SET_TIME_TO,
{ATTR_ENTITY_ID: [entity_id], ADVANTAGE_AIR_SET_COUNTDOWN_VALUE: value},
blocking=True,
)
assert len(aioclient_mock.mock_calls) == 5
assert aioclient_mock.mock_calls[-2][0] == "GET"
assert aioclient_mock.mock_calls[-2][1].path == "/setAircon"
data = loads(aioclient_mock.mock_calls[-2][1].query["json"])
assert data["ac1"]["info"]["countDownToOff"] == value
assert aioclient_mock.mock_calls[-1][0] == "GET"
assert aioclient_mock.mock_calls[-1][1].path == "/getSystemData"
# Test First Zone Vent Sensor
entity_id = "sensor.zone_open_with_sensor_vent"
state = hass.states.get(entity_id)
assert state
assert int(state.state) == 100
entry = registry.async_get(entity_id)
assert entry
assert entry.unique_id == "uniqueid-ac1-z01-vent"
# Test Second Zone Vent Sensor
entity_id = "sensor.zone_closed_with_sensor_vent"
state = hass.states.get(entity_id)
assert state
assert int(state.state) == 0
entry = registry.async_get(entity_id)
assert entry
assert entry.unique_id == "uniqueid-ac1-z02-vent"
# Test First Zone Signal Sensor
entity_id = "sensor.zone_open_with_sensor_signal"
state = hass.states.get(entity_id)
assert state
assert int(state.state) == 40
entry = registry.async_get(entity_id)
assert entry
assert entry.unique_id == "uniqueid-ac1-z01-signal"
# Test Second Zone Signal Sensor
entity_id = "sensor.zone_closed_with_sensor_signal"
state = hass.states.get(entity_id)
assert state
assert int(state.state) == 10
entry = registry.async_get(entity_id)
assert entry
assert entry.unique_id == "uniqueid-ac1-z02-signal"
|
import http.server
import threading
import logging
import pytest
from PyQt5.QtCore import QUrl
from PyQt5.QtNetwork import (QNetworkProxy, QNetworkProxyQuery, QHostInfo,
QHostAddress)
from qutebrowser.browser.network import pac
pytestmark = pytest.mark.usefixtures('qapp')
def _pac_common_test(test_str):
fun_str_f = """
function FindProxyForURL(domain, host) {{
{}
return "DIRECT; PROXY 127.0.0.1:8080; SOCKS 192.168.1.1:4444";
}}
"""
fun_str = fun_str_f.format(test_str)
res = pac.PACResolver(fun_str)
proxies = res.resolve(QNetworkProxyQuery(QUrl("https://example.com/test")))
assert len(proxies) == 3
assert proxies[0].type() == QNetworkProxy.NoProxy
assert proxies[1].type() == QNetworkProxy.HttpProxy
assert proxies[1].hostName() == "127.0.0.1"
assert proxies[1].port() == 8080
assert proxies[2].type() == QNetworkProxy.Socks5Proxy
assert proxies[2].hostName() == "192.168.1.1"
assert proxies[2].port() == 4444
def _pac_equality_test(call, expected):
test_str_f = """
var res = ({0});
var expected = ({1});
if(res !== expected) {{
throw new Error("failed test {0}: got '" + res + "', expected '" + expected + "'");
}}
"""
_pac_common_test(test_str_f.format(call, expected))
def _pac_except_test(caplog, call):
test_str_f = """
var thrown = false;
try {{
var res = ({0});
}} catch(e) {{
thrown = true;
}}
if(!thrown) {{
throw new Error("failed test {0}: got '" + res + "', expected exception");
}}
"""
with caplog.at_level(logging.ERROR):
_pac_common_test(test_str_f.format(call))
def _pac_noexcept_test(call):
test_str_f = """
var res = ({0});
"""
_pac_common_test(test_str_f.format(call))
# pylint: disable=invalid-name
@pytest.mark.parametrize("domain, expected", [
("known.domain", "'1.2.3.4'"),
("bogus.domain.foobar", "null")
])
def test_dnsResolve(monkeypatch, domain, expected):
def mock_fromName(host):
info = QHostInfo()
if host == "known.domain":
info.setAddresses([QHostAddress("1.2.3.4")])
return info
monkeypatch.setattr(QHostInfo, 'fromName', mock_fromName)
_pac_equality_test("dnsResolve('{}')".format(domain), expected)
def test_myIpAddress():
_pac_equality_test("isResolvable(myIpAddress())", "true")
@pytest.mark.parametrize("host, expected", [
("example", "true"),
("example.com", "false"),
("www.example.com", "false"),
])
def test_isPlainHostName(host, expected):
_pac_equality_test("isPlainHostName('{}')".format(host), expected)
def test_proxyBindings():
_pac_equality_test("JSON.stringify(ProxyConfig.bindings)", "'{}'")
def test_invalid_port():
test_str = """
function FindProxyForURL(domain, host) {
return "PROXY 127.0.0.1:FOO";
}
"""
res = pac.PACResolver(test_str)
with pytest.raises(pac.ParseProxyError):
res.resolve(QNetworkProxyQuery(QUrl("https://example.com/test")))
@pytest.mark.parametrize('string', ["", "{"])
def test_wrong_pac_string(string):
with pytest.raises(pac.EvalProxyError):
pac.PACResolver(string)
@pytest.mark.parametrize("value", [
"",
"DIRECT FOO",
"PROXY",
"SOCKS",
"FOOBAR",
])
def test_fail_parse(value):
test_str_f = """
function FindProxyForURL(domain, host) {{
return "{}";
}}
"""
res = pac.PACResolver(test_str_f.format(value))
with pytest.raises(pac.ParseProxyError):
res.resolve(QNetworkProxyQuery(QUrl("https://example.com/test")))
def test_fail_return():
test_str = """
function FindProxyForURL(domain, host) {
return null;
}
"""
res = pac.PACResolver(test_str)
with pytest.raises(pac.EvalProxyError):
res.resolve(QNetworkProxyQuery(QUrl("https://example.com/test")))
@pytest.mark.parametrize('url, has_secret', [
('http://example.com/secret', True), # path passed with HTTP
('http://example.com?secret=yes', True), # query passed with HTTP
('http://[email protected]', False), # user stripped with HTTP
('http://user:[email protected]', False), # password stripped with HTTP
('https://example.com/secret', False), # path stripped with HTTPS
('https://example.com?secret=yes', False), # query stripped with HTTPS
('https://[email protected]', False), # user stripped with HTTPS
('https://user:[email protected]', False), # password stripped with HTTPS
])
@pytest.mark.parametrize('from_file', [True, False])
def test_secret_url(url, has_secret, from_file):
"""Make sure secret parts in a URL are stripped correctly.
The following parts are considered secret:
- If the PAC info is loaded from a local file, nothing.
- If the URL to resolve is a HTTP URL, the username/password.
- If the URL to resolve is a HTTPS URL, the username/password, query
and path.
"""
test_str = """
function FindProxyForURL(domain, host) {{
has_secret = domain.indexOf("secret") !== -1;
expected_secret = {};
if (has_secret !== expected_secret) {{
throw new Error("Expected secret: " + expected_secret + ", found: " + has_secret + " in " + domain);
}}
return "DIRECT";
}}
""".format('true' if (has_secret or from_file) else 'false')
res = pac.PACResolver(test_str)
res.resolve(QNetworkProxyQuery(QUrl(url)), from_file=from_file)
def test_logging(qtlog):
"""Make sure console.log() works for PAC files."""
test_str = """
function FindProxyForURL(domain, host) {
console.log("logging test");
return "DIRECT";
}
"""
res = pac.PACResolver(test_str)
res.resolve(QNetworkProxyQuery(QUrl("https://example.com/test")))
assert len(qtlog.records) == 1
assert qtlog.records[0].message == 'logging test'
def fetcher_test(test_str):
class PACHandler(http.server.BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.send_header('Content-type', 'application/x-ns-proxy-autoconfig')
self.end_headers()
self.wfile.write(test_str.encode("ascii"))
ready_event = threading.Event()
def serve():
httpd = http.server.HTTPServer(("127.0.0.1", 8081), PACHandler)
ready_event.set()
httpd.handle_request()
httpd.server_close()
serve_thread = threading.Thread(target=serve, daemon=True)
serve_thread.start()
try:
ready_event.wait()
fetcher = pac.PACFetcher(QUrl("pac+http://127.0.0.1:8081"))
fetcher.fetch()
assert fetcher.fetch_error() is None
finally:
serve_thread.join()
return fetcher
def test_fetch_success():
test_str = """
function FindProxyForURL(domain, host) {
return "DIRECT; PROXY 127.0.0.1:8080; SOCKS 192.168.1.1:4444";
}
"""
res = fetcher_test(test_str)
proxies = res.resolve(QNetworkProxyQuery(QUrl("https://example.com/test")))
assert len(proxies) == 3
def test_fetch_evalerror(caplog):
test_str = """
function FindProxyForURL(domain, host) {
return "FOO";
}
"""
res = fetcher_test(test_str)
with caplog.at_level(logging.ERROR):
proxies = res.resolve(QNetworkProxyQuery(QUrl("https://example.com/test")))
assert len(proxies) == 1
assert proxies[0].port() == 9
|
import os
import pytest
@pytest.fixture(autouse=True)
def ensure_chdir():
old_dir = os.getcwd()
try:
yield
finally:
os.chdir(old_dir)
@pytest.fixture(scope="module")
def test_dir():
"""
Absolute path to the directory with the tests.
"""
return os.path.abspath(os.path.dirname(__file__))
@pytest.fixture(scope="session")
def default_locale() -> str:
return os.environ.get("NIKOLA_LOCALE_DEFAULT", "en")
|
from __future__ import print_function
import argparse
import os
import smtplib
import sys
from email import encoders
from email.utils import formatdate
from six.moves import input
from six.moves.configparser import RawConfigParser
from six.moves.email_mime_base import MIMEBase
from six.moves.email_mime_multipart import MIMEMultipart
from six.moves.email_mime_text import MIMEText
APP_DIR = os.environ['STASH_ROOT']
class Mail(object):
def __init__(self, cfg_file='', verbose=False):
#from config
self.cfg_file = cfg_file
self.verbose = verbose
self.user = ''
self.passwd = ''
self.auth = False
self.mailfrom = ''
self.host = 'smtp.fakehost.com'
self.port = 537
self.tls = False
self.read_cfg()
def _print(self, msg):
if self.verbose:
print(msg)
def read_cfg(self):
parser = RawConfigParser()
parser.read(self.cfg_file)
if not parser.has_section('mail'):
print('Creating cfg file.')
self.make_cfg()
self.auth = parser.get('mail', 'auth')
self.user = parser.get('mail', 'username')
self.passwd = parser.get('mail', 'password')
self.mailfrom = parser.get('mail', 'mailfrom')
self.host = parser.get('mail', 'host')
self.port = parser.get('mail', 'port')
self.tls = parser.get('mail', 'tls')
def edit_cfg(self):
global _stash
_stash('edit -t %s' % self.cfg_file)
sys.exit(0)
def make_cfg(self):
cfg = '''[mail]
host = smtp.mailserver.com
port = 587
mailfrom = Your email
tls = false
auth = true
username = Your user name
password = Your user password
'''
with open(self.cfg_file, 'w') as f:
f.write(cfg)
self.edit_cfg()
def send(self, sendto='', subject='', attach='', body=' '):
print('Sending email')
msg = MIMEMultipart()
msg["From"] = self.mailfrom
msg["To"] = sendto
msg["Subject"] = subject
msg['Date'] = formatdate(localtime=True)
#add messagw
self._print('Attaching msg: %s' % body)
message = MIMEText('text', "plain")
message.set_payload(body + '\n')
msg.attach(message)
# attach a file
if attach:
self._print('Attachment found: %s' % attach)
part = MIMEBase('application', "octet-stream")
part.set_payload(open(attach, "rb").read())
encoders.encode_base64(part)
part.add_header('Content-Disposition', 'attachment; filename="%s"' % attach)
msg.attach(part)
self._print('Creating SMTP')
server = smtplib.SMTP(self.host, int(self.port))
if self.tls == 'True' or self.tls == 'true':
server.starttls()
self._print('tls started.')
if self.auth == 'True' or self.auth == 'true':
try:
self._print('Logging into to SMTP: %s %s' % (self.user, self.passwd))
server.login(self.user, self.passwd) # optional
except Exception as e:
print('Failed Login %s' % e)
sys.exit(0)
else:
try:
self._print('Connecting to SMTP')
server.connect()
except Exception as e:
print('Failed to connect %s' % e)
sys.exit(0)
try:
self._print('Sending mail to: %s' % sendto)
server.sendmail(self.mailfrom, sendto, msg.as_string())
print('mail sent.')
server.close()
except Exception as e:
errorMsg = "Unable to send email. Error: %s" % str(e)
if __name__ == "__main__":
CONFIG = APP_DIR + '/.mailrc'
ap = argparse.ArgumentParser()
ap.add_argument('-s', '--subject', default='', action='store', dest='subject', help='Email Subject.')
ap.add_argument('-v', '--verbose', action='store_true', help='Verbose print')
ap.add_argument('-f', '--file', action='store', default='', help='Attachment to send.')
ap.add_argument('-e', action='store_true', help='Edit .mailrc', default=False)
ap.add_argument('sendto', action='store', default='', nargs='?', help='Send email to.')
ap.add_argument(
'message',
action='store',
default='',
nargs='?',
help='Email Message. Passing \'-\' will pass stdin from pipe.'
)
args = ap.parse_args()
smail = Mail(CONFIG, args.verbose)
if args.e == True:
smail.edit_cfg()
elif args.message or args.file and args.sendto:
if args.message == '-':
args.message = sys.stdin.read()
smail.send(sendto=args.sendto, subject=args.subject, attach=args.file, body=args.message)
else:
#try except blocks used do to stash reading EOF on no input
sendto = input('Send to: ') or None
if not sendto:
sys.exit(0)
subject = input('Subject: ') or ''
msg = input('Message: ') or ''
file = input('Attachment: ') or ''
smail.send(sendto=sendto, subject=subject, attach=file, body=msg)
|
from functools import partial
import os.path as op
import inspect
import numpy as np
from numpy.testing import (assert_array_almost_equal, assert_array_equal,
assert_equal, assert_allclose)
from scipy.io import loadmat
import pytest
from mne import pick_types, Annotations
from mne.datasets import testing
from mne.fixes import nullcontext
from mne.utils import requires_pandas
from mne.io import read_raw_edf, read_raw_bdf, read_raw_fif, edf, read_raw_gdf
from mne.io.tests.test_raw import _test_raw_reader
from mne.io.edf.edf import (_get_edf_default_event_id, _read_annotations_edf,
_read_ch, _parse_prefilter_string, _edf_str,
_read_edf_header, _read_header)
from mne.io.pick import channel_indices_by_type, get_channel_type_constants
from mne.annotations import events_from_annotations, read_annotations
td_mark = testing._pytest_mark()
FILE = inspect.getfile(inspect.currentframe())
data_dir = op.join(op.dirname(op.abspath(FILE)), 'data')
montage_path = op.join(data_dir, 'biosemi.hpts') # XXX: missing reader
bdf_path = op.join(data_dir, 'test.bdf')
edf_path = op.join(data_dir, 'test.edf')
duplicate_channel_labels_path = op.join(data_dir,
'duplicate_channel_labels.edf')
edf_uneven_path = op.join(data_dir, 'test_uneven_samp.edf')
bdf_eeglab_path = op.join(data_dir, 'test_bdf_eeglab.mat')
edf_eeglab_path = op.join(data_dir, 'test_edf_eeglab.mat')
edf_uneven_eeglab_path = op.join(data_dir, 'test_uneven_samp.mat')
edf_stim_channel_path = op.join(data_dir, 'test_edf_stim_channel.edf')
edf_txt_stim_channel_path = op.join(data_dir, 'test_edf_stim_channel.txt')
data_path = testing.data_path(download=False)
edf_stim_resamp_path = op.join(data_path, 'EDF', 'test_edf_stim_resamp.edf')
edf_overlap_annot_path = op.join(data_path, 'EDF',
'test_edf_overlapping_annotations.edf')
edf_reduced = op.join(data_path, 'EDF', 'test_reduced.edf')
bdf_stim_channel_path = op.join(data_path, 'BDF', 'test_bdf_stim_channel.bdf')
bdf_multiple_annotations_path = op.join(data_path, 'BDF',
'multiple_annotation_chans.bdf')
test_generator_bdf = op.join(data_path, 'BDF', 'test_generator_2.bdf')
test_generator_edf = op.join(data_path, 'EDF', 'test_generator_2.edf')
edf_annot_sub_s_path = op.join(data_path, 'EDF', 'subsecond_starttime.edf')
eog = ['REOG', 'LEOG', 'IEOG']
misc = ['EXG1', 'EXG5', 'EXG8', 'M1', 'M2']
def test_orig_units():
"""Test exposure of original channel units."""
raw = read_raw_edf(edf_path, preload=True)
# Test original units
orig_units = raw._orig_units
assert len(orig_units) == len(raw.ch_names)
assert orig_units['A1'] == 'µV' # formerly 'uV' edit by _check_orig_units
def test_subject_info(tmpdir):
"""Test exposure of original channel units."""
raw = read_raw_edf(edf_path)
assert raw.info['subject_info'] is None # XXX this is arguably a bug
edf_info = raw._raw_extras[0]
assert edf_info['subject_info'] is not None
want = {'id': 'X', 'sex': 'X', 'birthday': 'X', 'name': 'X'}
for key, val in want.items():
assert edf_info['subject_info'][key] == val, key
fname = tmpdir.join('test_raw.fif')
raw.save(fname)
raw = read_raw_fif(fname)
assert raw.info['subject_info'] is None # XXX should eventually round-trip
def test_bdf_data():
"""Test reading raw bdf files."""
# XXX BDF data for these is around 0.01 when it should be in the uV range,
# probably some bug
test_scaling = False
raw_py = _test_raw_reader(read_raw_bdf, input_fname=bdf_path,
eog=eog, misc=misc,
exclude=['M2', 'IEOG'],
test_scaling=test_scaling,
)
assert len(raw_py.ch_names) == 71
raw_py = _test_raw_reader(read_raw_bdf, input_fname=bdf_path,
montage='biosemi64', eog=eog, misc=misc,
exclude=['M2', 'IEOG'],
test_scaling=test_scaling)
assert len(raw_py.ch_names) == 71
assert 'RawEDF' in repr(raw_py)
picks = pick_types(raw_py.info, meg=False, eeg=True, exclude='bads')
data_py, _ = raw_py[picks]
# this .mat was generated using the EEG Lab Biosemi Reader
raw_eeglab = loadmat(bdf_eeglab_path)
raw_eeglab = raw_eeglab['data'] * 1e-6 # data are stored in microvolts
data_eeglab = raw_eeglab[picks]
# bdf saved as a single, resolution to seven decimal points in matlab
assert_array_almost_equal(data_py, data_eeglab, 8)
# Manually checking that float coordinates are imported
assert (raw_py.info['chs'][0]['loc']).any()
assert (raw_py.info['chs'][25]['loc']).any()
assert (raw_py.info['chs'][63]['loc']).any()
@testing.requires_testing_data
def test_bdf_crop_save_stim_channel(tmpdir):
"""Test EDF with various sampling rates."""
raw = read_raw_bdf(bdf_stim_channel_path)
raw.save(tmpdir.join('test-raw.fif'), tmin=1.2, tmax=4.0, overwrite=True)
@testing.requires_testing_data
@pytest.mark.parametrize('fname', [
edf_reduced,
edf_overlap_annot_path,
])
@pytest.mark.parametrize('stim_channel', (None, False, 'auto'))
def test_edf_others(fname, stim_channel):
"""Test EDF with various sampling rates and overlapping annotations."""
_test_raw_reader(
read_raw_edf, input_fname=fname, stim_channel=stim_channel,
verbose='error')
def test_edf_data_broken(tmpdir):
"""Test edf files."""
raw = _test_raw_reader(read_raw_edf, input_fname=edf_path,
exclude=['Ergo-Left', 'H10'], verbose='error')
raw_py = read_raw_edf(edf_path)
data = raw_py.get_data()
assert_equal(len(raw.ch_names) + 2, len(raw_py.ch_names))
# Test with number of records not in header (-1).
broken_fname = op.join(tmpdir, 'broken.edf')
with open(edf_path, 'rb') as fid_in:
fid_in.seek(0, 2)
n_bytes = fid_in.tell()
fid_in.seek(0, 0)
rbytes = fid_in.read()
with open(broken_fname, 'wb') as fid_out:
fid_out.write(rbytes[:236])
fid_out.write(b'-1 ')
fid_out.write(rbytes[244:244 + int(n_bytes * 0.4)])
with pytest.warns(RuntimeWarning,
match='records .* not match the file size'):
raw = read_raw_edf(broken_fname, preload=True)
read_raw_edf(broken_fname, exclude=raw.ch_names[:132], preload=True)
# Test with \x00's in the data
with open(broken_fname, 'wb') as fid_out:
fid_out.write(rbytes[:184])
assert rbytes[184:192] == b'36096 '
fid_out.write(rbytes[184:192].replace(b' ', b'\x00'))
fid_out.write(rbytes[192:])
raw_py = read_raw_edf(broken_fname)
data_new = raw_py.get_data()
assert_allclose(data, data_new)
def test_duplicate_channel_labels_edf():
"""Test reading edf file with duplicate channel names."""
EXPECTED_CHANNEL_NAMES = ['EEG F1-Ref-0', 'EEG F2-Ref', 'EEG F1-Ref-1']
with pytest.warns(RuntimeWarning, match='Channel names are not unique'):
raw = read_raw_edf(duplicate_channel_labels_path, preload=False)
assert raw.ch_names == EXPECTED_CHANNEL_NAMES
def test_parse_annotation(tmpdir):
"""Test parsing the tal channel."""
# test the parser
annot = (b'+180\x14Lights off\x14Close door\x14\x00\x00\x00\x00\x00'
b'+180\x14Lights off\x14\x00\x00\x00\x00\x00\x00\x00\x00'
b'+180\x14Close door\x14\x00\x00\x00\x00\x00\x00\x00\x00'
b'+3.14\x1504.20\x14nothing\x14\x00\x00\x00\x00'
b'+1800.2\x1525.5\x14Apnea\x14\x00\x00\x00\x00\x00\x00\x00'
b'+123\x14\x14\x00\x00\x00\x00\x00\x00\x00')
annot_file = tmpdir.join('annotations.txt')
annot_file.write(annot)
annot = [a for a in bytes(annot)]
annot[1::2] = [a * 256 for a in annot[1::2]]
tal_channel_A = np.array(list(map(sum, zip(annot[0::2], annot[1::2]))),
dtype=np.int64)
with open(str(annot_file), 'rb') as fid:
# ch_data = np.fromfile(fid, dtype='<i2', count=len(annot))
tal_channel_B = _read_ch(fid, subtype='EDF', dtype='<i2',
samp=(len(annot) - 1) // 2,
dtype_byte='This_parameter_is_not_used')
for tal_channel in [tal_channel_A, tal_channel_B]:
onset, duration, description = _read_annotations_edf([tal_channel])
assert_equal(np.column_stack((onset, duration, description)),
[[180., 0., 'Lights off'], [180., 0., 'Close door'],
[180., 0., 'Lights off'], [180., 0., 'Close door'],
[3.14, 4.2, 'nothing'], [1800.2, 25.5, 'Apnea']])
def test_find_events_backward_compatibility():
"""Test if events are detected correctly in a typical MNE workflow."""
EXPECTED_EVENTS = [[68, 0, 2],
[199, 0, 2],
[1024, 0, 3],
[1280, 0, 2]]
# test an actual file
raw = read_raw_edf(edf_path, preload=True)
event_id = _get_edf_default_event_id(raw.annotations.description)
event_id.pop('start')
events_from_EFA, _ = events_from_annotations(raw, event_id=event_id,
use_rounding=False)
assert_array_equal(events_from_EFA, EXPECTED_EVENTS)
@requires_pandas
@pytest.mark.parametrize('fname', [edf_path, bdf_path])
def test_to_data_frame(fname):
"""Test EDF/BDF Raw Pandas exporter."""
ext = op.splitext(fname)[1].lstrip('.').lower()
if ext == 'edf':
raw = read_raw_edf(fname, preload=True, verbose='error')
elif ext == 'bdf':
raw = read_raw_bdf(fname, preload=True, verbose='error')
_, times = raw[0, :10]
df = raw.to_data_frame(index='time')
assert (df.columns == raw.ch_names).all()
assert_array_equal(np.round(times * 1e3), df.index.values[:10])
df = raw.to_data_frame(index=None, scalings={'eeg': 1e13})
assert 'time' in df.columns
assert_array_equal(df.values[:, 1], raw._data[0] * 1e13)
def test_read_raw_edf_stim_channel_input_parameters():
"""Test edf raw reader deprecation."""
_MSG = "`read_raw_edf` is not supposed to trigger a deprecation warning"
with pytest.warns(None) as recwarn:
read_raw_edf(edf_path)
assert all([w.category != DeprecationWarning for w in recwarn.list]), _MSG
for invalid_stim_parameter in ['EDF Annotations', 'BDF Annotations']:
with pytest.raises(ValueError,
match="stim channel is not supported"):
read_raw_edf(edf_path, stim_channel=invalid_stim_parameter)
def _assert_annotations_equal(a, b):
assert_array_equal(a.onset, b.onset)
assert_array_equal(a.duration, b.duration)
assert_array_equal(a.description, b.description)
assert a.orig_time == b.orig_time
def test_read_annot(tmpdir):
"""Test parsing the tal channel."""
EXPECTED_ANNOTATIONS = [[180.0, 0, 'Lights off'], [180.0, 0, 'Close door'],
[180.0, 0, 'Lights off'], [180.0, 0, 'Close door'],
[3.14, 4.2, 'nothing'], [1800.2, 25.5, 'Apnea']]
EXPECTED_ONSET = [180.0, 180.0, 180.0, 180.0, 3.14, 1800.2]
EXPECTED_DURATION = [0, 0, 0, 0, 4.2, 25.5]
EXPECTED_DESC = ['Lights off', 'Close door', 'Lights off', 'Close door',
'nothing', 'Apnea']
EXPECTED_ANNOTATIONS = Annotations(onset=EXPECTED_ONSET,
duration=EXPECTED_DURATION,
description=EXPECTED_DESC,
orig_time=None)
annot = (b'+180\x14Lights off\x14Close door\x14\x00\x00\x00\x00\x00'
b'+180\x14Lights off\x14\x00\x00\x00\x00\x00\x00\x00\x00'
b'+180\x14Close door\x14\x00\x00\x00\x00\x00\x00\x00\x00'
b'+3.14\x1504.20\x14nothing\x14\x00\x00\x00\x00'
b'+1800.2\x1525.5\x14Apnea\x14\x00\x00\x00\x00\x00\x00\x00'
b'+123\x14\x14\x00\x00\x00\x00\x00\x00\x00')
annot_file = tmpdir.join('annotations.txt')
annot_file.write(annot)
onset, duration, desc = _read_annotations_edf(annotations=str(annot_file))
annotation = Annotations(onset=onset, duration=duration, description=desc,
orig_time=None)
_assert_annotations_equal(annotation, EXPECTED_ANNOTATIONS)
# Now test when reading from buffer of data
with open(str(annot_file), 'rb') as fid:
ch_data = np.fromfile(fid, dtype='<i2', count=len(annot))
onset, duration, desc = _read_annotations_edf([ch_data])
annotation = Annotations(onset=onset, duration=duration, description=desc,
orig_time=None)
_assert_annotations_equal(annotation, EXPECTED_ANNOTATIONS)
@testing.requires_testing_data
@pytest.mark.parametrize('fname', [test_generator_edf, test_generator_bdf])
def test_read_annotations(fname, recwarn):
"""Test IO of annotations from edf and bdf files via regexp."""
annot = read_annotations(fname)
assert len(annot.onset) == 2
def test_edf_prefilter_parse():
"""Test prefilter strings from header are parsed correctly."""
prefilter_basic = ["HP: 0Hz LP: 0Hz"]
highpass, lowpass = _parse_prefilter_string(prefilter_basic)
assert_array_equal(highpass, ["0"])
assert_array_equal(lowpass, ["0"])
prefilter_normal_multi_ch = ["HP: 1Hz LP: 30Hz"] * 10
highpass, lowpass = _parse_prefilter_string(prefilter_normal_multi_ch)
assert_array_equal(highpass, ["1"] * 10)
assert_array_equal(lowpass, ["30"] * 10)
prefilter_unfiltered_ch = prefilter_normal_multi_ch + [""]
highpass, lowpass = _parse_prefilter_string(prefilter_unfiltered_ch)
assert_array_equal(highpass, ["1"] * 10)
assert_array_equal(lowpass, ["30"] * 10)
prefilter_edf_specs_doc = ["HP:0.1Hz LP:75Hz N:50Hz"]
highpass, lowpass = _parse_prefilter_string(prefilter_edf_specs_doc)
assert_array_equal(highpass, ["0.1"])
assert_array_equal(lowpass, ["75"])
@testing.requires_testing_data
@pytest.mark.parametrize('fname', [test_generator_edf, test_generator_bdf])
def test_load_generator(fname, recwarn):
"""Test IO of annotations from edf and bdf files with raw info."""
ext = op.splitext(fname)[1][1:].lower()
if ext == 'edf':
raw = read_raw_edf(fname)
elif ext == 'bdf':
raw = read_raw_bdf(fname)
assert len(raw.annotations.onset) == 2
found_types = [k for k, v in
channel_indices_by_type(raw.info, picks=None).items()
if v]
assert len(found_types) == 1
events, event_id = events_from_annotations(raw)
ch_names = ['squarewave', 'ramp', 'pulse', 'ECG', 'noise', 'sine 1 Hz',
'sine 8 Hz', 'sine 8.5 Hz', 'sine 15 Hz', 'sine 17 Hz',
'sine 50 Hz']
assert raw.get_data().shape == (11, 120000)
assert raw.ch_names == ch_names
assert event_id == {'RECORD START': 2, 'REC STOP': 1}
assert_array_equal(events, [[0, 0, 2], [120000, 0, 1]])
@pytest.mark.parametrize('EXPECTED, test_input', [
pytest.param({'stAtUs': 'stim', 'tRigGer': 'stim', 'sine 1 Hz': 'eeg'},
'auto', id='auto'),
pytest.param({'stAtUs': 'eeg', 'tRigGer': 'eeg', 'sine 1 Hz': 'eeg'},
None, id='None'),
pytest.param({'stAtUs': 'eeg', 'tRigGer': 'eeg', 'sine 1 Hz': 'stim'},
'sine 1 Hz', id='single string'),
pytest.param({'stAtUs': 'eeg', 'tRigGer': 'eeg', 'sine 1 Hz': 'stim'},
2, id='single int'),
pytest.param({'stAtUs': 'eeg', 'tRigGer': 'eeg', 'sine 1 Hz': 'stim'},
-1, id='single int (revers indexing)'),
pytest.param({'stAtUs': 'stim', 'tRigGer': 'stim', 'sine 1 Hz': 'eeg'},
[0, 1], id='int list')])
def test_edf_stim_ch_pick_up(test_input, EXPECTED):
"""Test stim_channel."""
# This is fragile for EEG/EEG-CSD, so just omit csd
KIND_DICT = get_channel_type_constants()
TYPE_LUT = {v['kind']: k for k, v in KIND_DICT.items() if k not in
('csd', 'chpi')} # chpi not needed, and unhashable (a list)
fname = op.join(data_dir, 'test_stim_channel.edf')
raw = read_raw_edf(fname, stim_channel=test_input)
ch_types = {ch['ch_name']: TYPE_LUT[ch['kind']] for ch in raw.info['chs']}
assert ch_types == EXPECTED
@testing.requires_testing_data
def test_bdf_multiple_annotation_channels():
"""Test BDF with multiple annotation channels."""
raw = read_raw_bdf(bdf_multiple_annotations_path)
assert len(raw.annotations) == 10
descriptions = np.array(['signal_start', 'EEG-check#1', 'TestStim#1',
'TestStim#2', 'TestStim#3', 'TestStim#4',
'TestStim#5', 'TestStim#6', 'TestStim#7',
'Ligths-Off#1'], dtype='<U12')
assert_array_equal(descriptions, raw.annotations.description)
@testing.requires_testing_data
def test_edf_lowpass_zero():
"""Test if a lowpass filter of 0Hz is mapped to the Nyquist frequency."""
with pytest.warns(RuntimeWarning, match='too long.*truncated'):
raw = read_raw_edf(edf_stim_resamp_path)
assert_allclose(raw.info["lowpass"], raw.info["sfreq"] / 2)
@testing.requires_testing_data
def test_edf_annot_sub_s_onset():
"""Test reading of sub-second annotation onsets."""
raw = read_raw_edf(edf_annot_sub_s_path)
assert_allclose(raw.annotations.onset, [1.951172, 3.492188])
def test_invalid_date(tmpdir):
"""Test handling of invalid date in EDF header."""
with open(edf_path, 'rb') as f: # read valid test file
edf = bytearray(f.read())
# original date in header is 29.04.14 (2014-04-29) at pos 168:176
# but we also use Startdate if available,
# which starts at byte 88 and is b'Startdate 29-APR-2014 X X X'
# create invalid date 29.02.14 (2014 is not a leap year)
# one wrong: no warning
edf[101:104] = b'FEB'
assert edf[172] == ord('4')
fname = op.join(str(tmpdir), "temp.edf")
with open(fname, "wb") as f:
f.write(edf)
read_raw_edf(fname)
# other wrong: no warning
edf[101:104] = b'APR'
edf[172] = ord('2')
with open(fname, "wb") as f:
f.write(edf)
read_raw_edf(fname)
# both wrong: warning
edf[101:104] = b'FEB'
edf[172] = ord('2')
with open(fname, "wb") as f:
f.write(edf)
with pytest.warns(RuntimeWarning, match='Invalid date'):
read_raw_edf(fname)
# another invalid date 29.00.14 (0 is not a month)
assert edf[101:104] == b'FEB'
edf[172] = ord('0')
with open(fname, "wb") as f:
f.write(edf)
with pytest.warns(RuntimeWarning, match='Invalid date'):
read_raw_edf(fname)
def test_empty_chars():
"""Test blank char support."""
assert int(_edf_str(b'1819\x00 ')) == 1819
def _hp_lp_rev(*args, **kwargs):
out, orig_units = _read_edf_header(*args, **kwargs)
out['lowpass'], out['highpass'] = out['highpass'], out['lowpass']
# this will happen for test_edf_stim_resamp.edf
if len(out['lowpass']) and out['lowpass'][0] == '0.000' and \
len(out['highpass']) and out['highpass'][0] == '0.0':
out['highpass'][0] = '10.0'
return out, orig_units
@pytest.mark.filterwarnings('ignore:.*too long.*:RuntimeWarning')
@pytest.mark.parametrize('fname, lo, hi, warns', [
(edf_path, 256, 0, False),
(edf_uneven_path, 50, 0, False),
(edf_stim_channel_path, 64, 0, False),
pytest.param(edf_overlap_annot_path, 64, 0, False, marks=td_mark),
pytest.param(edf_reduced, 256, 0, False, marks=td_mark),
pytest.param(test_generator_edf, 100, 0, False, marks=td_mark),
pytest.param(edf_stim_resamp_path, 256, 0, True, marks=td_mark),
])
def test_hp_lp_reversed(fname, lo, hi, warns, monkeypatch):
"""Test HP/LP reversed (gh-8584)."""
fname = str(fname)
raw = read_raw_edf(fname)
assert raw.info['lowpass'] == lo
assert raw.info['highpass'] == hi
monkeypatch.setattr(edf.edf, '_read_edf_header', _hp_lp_rev)
if warns:
ctx = pytest.warns(RuntimeWarning, match='greater than lowpass')
new_lo, new_hi = raw.info['sfreq'] / 2., 0.
else:
ctx = nullcontext()
new_lo, new_hi = lo, hi
with ctx:
raw = read_raw_edf(fname)
assert raw.info['lowpass'] == new_lo
assert raw.info['highpass'] == new_hi
def test_degenerate():
"""Test checking of some bad inputs."""
for func in (read_raw_edf, read_raw_bdf, read_raw_gdf,
partial(_read_header, exclude=())):
with pytest.raises(NotImplementedError, match='Only.*txt.*'):
func(edf_txt_stim_channel_path)
|
from datetime import timedelta
import logging
from alpha_vantage.foreignexchange import ForeignExchange
from alpha_vantage.timeseries import TimeSeries
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import ATTR_ATTRIBUTION, CONF_API_KEY, CONF_CURRENCY, CONF_NAME
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
ATTR_CLOSE = "close"
ATTR_HIGH = "high"
ATTR_LOW = "low"
ATTRIBUTION = "Stock market information provided by Alpha Vantage"
CONF_FOREIGN_EXCHANGE = "foreign_exchange"
CONF_FROM = "from"
CONF_SYMBOL = "symbol"
CONF_SYMBOLS = "symbols"
CONF_TO = "to"
ICONS = {
"BTC": "mdi:currency-btc",
"EUR": "mdi:currency-eur",
"GBP": "mdi:currency-gbp",
"INR": "mdi:currency-inr",
"RUB": "mdi:currency-rub",
"TRY": "mdi:currency-try",
"USD": "mdi:currency-usd",
}
SCAN_INTERVAL = timedelta(minutes=5)
SYMBOL_SCHEMA = vol.Schema(
{
vol.Required(CONF_SYMBOL): cv.string,
vol.Optional(CONF_CURRENCY): cv.string,
vol.Optional(CONF_NAME): cv.string,
}
)
CURRENCY_SCHEMA = vol.Schema(
{
vol.Required(CONF_FROM): cv.string,
vol.Required(CONF_TO): cv.string,
vol.Optional(CONF_NAME): cv.string,
}
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Optional(CONF_FOREIGN_EXCHANGE): vol.All(cv.ensure_list, [CURRENCY_SCHEMA]),
vol.Optional(CONF_SYMBOLS): vol.All(cv.ensure_list, [SYMBOL_SCHEMA]),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Alpha Vantage sensor."""
api_key = config[CONF_API_KEY]
symbols = config.get(CONF_SYMBOLS, [])
conversions = config.get(CONF_FOREIGN_EXCHANGE, [])
if not symbols and not conversions:
msg = "No symbols or currencies configured."
hass.components.persistent_notification.create(msg, "Sensor alpha_vantage")
_LOGGER.warning(msg)
return
timeseries = TimeSeries(key=api_key)
dev = []
for symbol in symbols:
try:
_LOGGER.debug("Configuring timeseries for symbols: %s", symbol[CONF_SYMBOL])
timeseries.get_intraday(symbol[CONF_SYMBOL])
except ValueError:
_LOGGER.error("API Key is not valid or symbol '%s' not known", symbol)
dev.append(AlphaVantageSensor(timeseries, symbol))
forex = ForeignExchange(key=api_key)
for conversion in conversions:
from_cur = conversion.get(CONF_FROM)
to_cur = conversion.get(CONF_TO)
try:
_LOGGER.debug("Configuring forex %s - %s", from_cur, to_cur)
forex.get_currency_exchange_rate(from_currency=from_cur, to_currency=to_cur)
except ValueError as error:
_LOGGER.error(
"API Key is not valid or currencies '%s'/'%s' not known",
from_cur,
to_cur,
)
_LOGGER.debug(str(error))
dev.append(AlphaVantageForeignExchange(forex, conversion))
add_entities(dev, True)
_LOGGER.debug("Setup completed")
class AlphaVantageSensor(Entity):
"""Representation of a Alpha Vantage sensor."""
def __init__(self, timeseries, symbol):
"""Initialize the sensor."""
self._symbol = symbol[CONF_SYMBOL]
self._name = symbol.get(CONF_NAME, self._symbol)
self._timeseries = timeseries
self.values = None
self._unit_of_measurement = symbol.get(CONF_CURRENCY, self._symbol)
self._icon = ICONS.get(symbol.get(CONF_CURRENCY, "USD"))
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
@property
def state(self):
"""Return the state of the sensor."""
return self.values["1. open"]
@property
def device_state_attributes(self):
"""Return the state attributes."""
if self.values is not None:
return {
ATTR_ATTRIBUTION: ATTRIBUTION,
ATTR_CLOSE: self.values["4. close"],
ATTR_HIGH: self.values["2. high"],
ATTR_LOW: self.values["3. low"],
}
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return self._icon
def update(self):
"""Get the latest data and updates the states."""
_LOGGER.debug("Requesting new data for symbol %s", self._symbol)
all_values, _ = self._timeseries.get_intraday(self._symbol)
self.values = next(iter(all_values.values()))
_LOGGER.debug("Received new values for symbol %s", self._symbol)
class AlphaVantageForeignExchange(Entity):
"""Sensor for foreign exchange rates."""
def __init__(self, foreign_exchange, config):
"""Initialize the sensor."""
self._foreign_exchange = foreign_exchange
self._from_currency = config[CONF_FROM]
self._to_currency = config[CONF_TO]
if CONF_NAME in config:
self._name = config.get(CONF_NAME)
else:
self._name = f"{self._to_currency}/{self._from_currency}"
self._unit_of_measurement = self._to_currency
self._icon = ICONS.get(self._from_currency, "USD")
self.values = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
@property
def state(self):
"""Return the state of the sensor."""
return round(float(self.values["5. Exchange Rate"]), 4)
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return self._icon
@property
def device_state_attributes(self):
"""Return the state attributes."""
if self.values is not None:
return {
ATTR_ATTRIBUTION: ATTRIBUTION,
CONF_FROM: self._from_currency,
CONF_TO: self._to_currency,
}
def update(self):
"""Get the latest data and updates the states."""
_LOGGER.debug(
"Requesting new data for forex %s - %s",
self._from_currency,
self._to_currency,
)
self.values, _ = self._foreign_exchange.get_currency_exchange_rate(
from_currency=self._from_currency, to_currency=self._to_currency
)
_LOGGER.debug(
"Received new data for forex %s - %s",
self._from_currency,
self._to_currency,
)
|
import os
import pytest
from homeassistant.auth.const import GROUP_ID_ADMIN
from homeassistant.components import frontend
from homeassistant.components.hassio import STORAGE_KEY
from homeassistant.setup import async_setup_component
from tests.async_mock import patch
MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"}
@pytest.fixture(autouse=True)
def mock_all(aioclient_mock):
"""Mock all setup requests."""
aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"})
aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"})
aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"})
aioclient_mock.get(
"http://127.0.0.1/info",
json={
"result": "ok",
"data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None},
},
)
aioclient_mock.get(
"http://127.0.0.1/host/info",
json={
"result": "ok",
"data": {
"result": "ok",
"data": {
"chassis": "vm",
"operating_system": "Debian GNU/Linux 10 (buster)",
"kernel": "4.19.0-6-amd64",
},
},
},
)
aioclient_mock.get(
"http://127.0.0.1/core/info",
json={"result": "ok", "data": {"version_latest": "1.0.0"}},
)
aioclient_mock.get(
"http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}}
)
async def test_setup_api_ping(hass, aioclient_mock):
"""Test setup with API ping."""
with patch.dict(os.environ, MOCK_ENVIRON):
result = await async_setup_component(hass, "hassio", {})
assert result
assert aioclient_mock.call_count == 7
assert hass.components.hassio.get_core_info()["version_latest"] == "1.0.0"
assert hass.components.hassio.is_hassio()
async def test_setup_api_panel(hass, aioclient_mock):
"""Test setup with API ping."""
assert await async_setup_component(hass, "frontend", {})
with patch.dict(os.environ, MOCK_ENVIRON):
result = await async_setup_component(hass, "hassio", {})
assert result
panels = hass.data[frontend.DATA_PANELS]
assert panels.get("hassio").to_response() == {
"component_name": "custom",
"icon": "hass:home-assistant",
"title": "Supervisor",
"url_path": "hassio",
"require_admin": True,
"config": {
"_panel_custom": {
"embed_iframe": True,
"js_url": "/api/hassio/app/entrypoint.js",
"name": "hassio-main",
"trust_external": False,
}
},
}
async def test_setup_api_push_api_data(hass, aioclient_mock):
"""Test setup with API push."""
with patch.dict(os.environ, MOCK_ENVIRON):
result = await async_setup_component(
hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}}
)
assert result
assert aioclient_mock.call_count == 7
assert not aioclient_mock.mock_calls[1][2]["ssl"]
assert aioclient_mock.mock_calls[1][2]["port"] == 9999
assert aioclient_mock.mock_calls[1][2]["watchdog"]
async def test_setup_api_push_api_data_server_host(hass, aioclient_mock):
"""Test setup with API push with active server host."""
with patch.dict(os.environ, MOCK_ENVIRON):
result = await async_setup_component(
hass,
"hassio",
{"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}},
)
assert result
assert aioclient_mock.call_count == 7
assert not aioclient_mock.mock_calls[1][2]["ssl"]
assert aioclient_mock.mock_calls[1][2]["port"] == 9999
assert not aioclient_mock.mock_calls[1][2]["watchdog"]
async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage):
"""Test setup with API push default data."""
with patch.dict(os.environ, MOCK_ENVIRON):
result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}})
assert result
assert aioclient_mock.call_count == 7
assert not aioclient_mock.mock_calls[1][2]["ssl"]
assert aioclient_mock.mock_calls[1][2]["port"] == 8123
refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"]
hassio_user = await hass.auth.async_get_user(
hass_storage[STORAGE_KEY]["data"]["hassio_user"]
)
assert hassio_user is not None
assert hassio_user.system_generated
assert len(hassio_user.groups) == 1
assert hassio_user.groups[0].id == GROUP_ID_ADMIN
for token in hassio_user.refresh_tokens.values():
if token.token == refresh_token:
break
else:
assert False, "refresh token not found"
async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage):
"""Test setup with API push default data."""
# Create user without admin
user = await hass.auth.async_create_system_user("Hass.io")
assert not user.is_admin
await hass.auth.async_create_refresh_token(user)
hass_storage[STORAGE_KEY] = {
"data": {"hassio_user": user.id},
"key": STORAGE_KEY,
"version": 1,
}
with patch.dict(os.environ, MOCK_ENVIRON):
result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}})
assert result
assert user.is_admin
async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage):
"""Test setup with API push default data."""
user = await hass.auth.async_create_system_user("Hass.io test")
token = await hass.auth.async_create_refresh_token(user)
hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}}
with patch.dict(os.environ, MOCK_ENVIRON):
result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}})
assert result
assert aioclient_mock.call_count == 7
assert not aioclient_mock.mock_calls[1][2]["ssl"]
assert aioclient_mock.mock_calls[1][2]["port"] == 8123
assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token
async def test_setup_core_push_timezone(hass, aioclient_mock):
"""Test setup with API push default data."""
hass.config.time_zone = "testzone"
with patch.dict(os.environ, MOCK_ENVIRON):
result = await async_setup_component(hass, "hassio", {"hassio": {}})
assert result
assert aioclient_mock.call_count == 7
assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone"
with patch("homeassistant.util.dt.set_default_time_zone"):
await hass.config.async_update(time_zone="America/New_York")
await hass.async_block_till_done()
assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York"
async def test_setup_hassio_no_additional_data(hass, aioclient_mock):
"""Test setup with API push default data."""
with patch.dict(os.environ, MOCK_ENVIRON), patch.dict(
os.environ, {"HASSIO_TOKEN": "123456"}
):
result = await async_setup_component(hass, "hassio", {"hassio": {}})
assert result
assert aioclient_mock.call_count == 7
assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456"
async def test_fail_setup_without_environ_var(hass):
"""Fail setup if no environ variable set."""
with patch.dict(os.environ, {}, clear=True):
result = await async_setup_component(hass, "hassio", {})
assert not result
async def test_warn_when_cannot_connect(hass, caplog):
"""Fail warn when we cannot connect."""
with patch.dict(os.environ, MOCK_ENVIRON), patch(
"homeassistant.components.hassio.HassIO.is_connected",
return_value=None,
):
result = await async_setup_component(hass, "hassio", {})
assert result
assert hass.components.hassio.is_hassio()
assert "Not connected with Hass.io / system too busy!" in caplog.text
async def test_service_register(hassio_env, hass):
"""Check if service will be setup."""
assert await async_setup_component(hass, "hassio", {})
assert hass.services.has_service("hassio", "addon_start")
assert hass.services.has_service("hassio", "addon_stop")
assert hass.services.has_service("hassio", "addon_restart")
assert hass.services.has_service("hassio", "addon_stdin")
assert hass.services.has_service("hassio", "host_shutdown")
assert hass.services.has_service("hassio", "host_reboot")
assert hass.services.has_service("hassio", "host_reboot")
assert hass.services.has_service("hassio", "snapshot_full")
assert hass.services.has_service("hassio", "snapshot_partial")
assert hass.services.has_service("hassio", "restore_full")
assert hass.services.has_service("hassio", "restore_partial")
async def test_service_calls(hassio_env, hass, aioclient_mock):
"""Call service and check the API calls behind that."""
assert await async_setup_component(hass, "hassio", {})
aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"})
aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"})
aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"})
aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"})
aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"})
aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"})
aioclient_mock.post("http://127.0.0.1/snapshots/new/full", json={"result": "ok"})
aioclient_mock.post("http://127.0.0.1/snapshots/new/partial", json={"result": "ok"})
aioclient_mock.post(
"http://127.0.0.1/snapshots/test/restore/full", json={"result": "ok"}
)
aioclient_mock.post(
"http://127.0.0.1/snapshots/test/restore/partial", json={"result": "ok"}
)
await hass.services.async_call("hassio", "addon_start", {"addon": "test"})
await hass.services.async_call("hassio", "addon_stop", {"addon": "test"})
await hass.services.async_call("hassio", "addon_restart", {"addon": "test"})
await hass.services.async_call(
"hassio", "addon_stdin", {"addon": "test", "input": "test"}
)
await hass.async_block_till_done()
assert aioclient_mock.call_count == 7
assert aioclient_mock.mock_calls[-1][2] == "test"
await hass.services.async_call("hassio", "host_shutdown", {})
await hass.services.async_call("hassio", "host_reboot", {})
await hass.async_block_till_done()
assert aioclient_mock.call_count == 9
await hass.services.async_call("hassio", "snapshot_full", {})
await hass.services.async_call(
"hassio",
"snapshot_partial",
{"addons": ["test"], "folders": ["ssl"], "password": "123456"},
)
await hass.async_block_till_done()
assert aioclient_mock.call_count == 11
assert aioclient_mock.mock_calls[-1][2] == {
"addons": ["test"],
"folders": ["ssl"],
"password": "123456",
}
await hass.services.async_call("hassio", "restore_full", {"snapshot": "test"})
await hass.services.async_call(
"hassio",
"restore_partial",
{
"snapshot": "test",
"homeassistant": False,
"addons": ["test"],
"folders": ["ssl"],
"password": "123456",
},
)
await hass.async_block_till_done()
assert aioclient_mock.call_count == 13
assert aioclient_mock.mock_calls[-1][2] == {
"addons": ["test"],
"folders": ["ssl"],
"homeassistant": False,
"password": "123456",
}
async def test_service_calls_core(hassio_env, hass, aioclient_mock):
"""Call core service and check the API calls behind that."""
assert await async_setup_component(hass, "hassio", {})
aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"})
aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"})
await hass.services.async_call("homeassistant", "stop")
await hass.async_block_till_done()
assert aioclient_mock.call_count == 4
await hass.services.async_call("homeassistant", "check_config")
await hass.async_block_till_done()
assert aioclient_mock.call_count == 4
with patch(
"homeassistant.config.async_check_ha_config_file", return_value=None
) as mock_check_config:
await hass.services.async_call("homeassistant", "restart")
await hass.async_block_till_done()
assert mock_check_config.called
assert aioclient_mock.call_count == 5
|
import unittest
from absl import flags
import mock
from perfkitbenchmarker import configs
from perfkitbenchmarker import errors
from perfkitbenchmarker import linux_benchmarks
from perfkitbenchmarker import pkb # pylint: disable=unused-import
from perfkitbenchmarker import windows_benchmarks
import yaml
FLAGS = flags.FLAGS
FLAGS.mark_as_parsed()
CONFIG_NAME = 'a'
INVALID_NAME = 'b'
INVALID_YAML_CONFIG = """
a:
vm_groups:
default:
:
"""
VALID_CONFIG = """
a:
vm_groups:
default:
vm_spec: null
"""
CONFIG_A = """
a:
flags:
flag1: old_value
flag2: not_overwritten
vm_groups: {}
"""
CONFIG_B = """
a:
flags:
flag1: new_value
flag3: new_flag
"""
REF_CONFIG = """
a:
vm_groups:
default:
vm_spec: *default_single_core
"""
BAD_REF_CONFIG = """
a:
vm_groups:
default:
vm_spec: *anchor_does_not_exist
"""
class ConfigsTestCase(unittest.TestCase):
def testLoadAllDefaultConfigs(self):
all_benchmarks = (linux_benchmarks.BENCHMARKS +
windows_benchmarks.BENCHMARKS)
for benchmark_module in all_benchmarks:
self.assertIsInstance(benchmark_module.GetConfig({}), dict)
def testLoadValidConfig(self):
self.assertIsInstance(
configs.LoadMinimalConfig(VALID_CONFIG, CONFIG_NAME), dict)
def testWrongName(self):
with self.assertRaises(KeyError):
configs.LoadMinimalConfig(VALID_CONFIG, INVALID_NAME)
def testLoadInvalidYaml(self):
with self.assertRaises(errors.Config.ParseError):
configs.LoadMinimalConfig(INVALID_YAML_CONFIG, CONFIG_NAME)
def testMergeBasicConfigs(self):
old_config = yaml.safe_load(CONFIG_A)
new_config = yaml.safe_load(CONFIG_B)
config = configs.MergeConfigs(old_config, new_config)
# Key is present in both configs.
self.assertEqual(config['a']['flags']['flag1'], 'new_value')
# Key is only present in default config.
self.assertEqual(config['a']['flags']['flag2'], 'not_overwritten')
# Key is only present in the override config.
self.assertEqual(config['a']['flags']['flag3'], 'new_flag')
def testLoadConfigDoesMerge(self):
default = yaml.safe_load(CONFIG_A)
overrides = yaml.safe_load(CONFIG_B)
merged_config = configs.MergeConfigs(default, overrides)
config = configs.LoadConfig(CONFIG_A, overrides['a'], CONFIG_NAME)
self.assertEqual(merged_config['a'], config)
def testMergeConfigWithNoOverrides(self):
old_config = yaml.safe_load(CONFIG_A)
config = configs.MergeConfigs(old_config, None)
self.assertEqual(config, old_config)
def testLoadConfigWithExternalReference(self):
self.assertIsInstance(
configs.LoadMinimalConfig(REF_CONFIG, CONFIG_NAME), dict)
def testLoadConfigWithBadReference(self):
with self.assertRaises(errors.Config.ParseError):
configs.LoadMinimalConfig(BAD_REF_CONFIG, CONFIG_NAME)
def testConfigOverrideFlag(self):
p = mock.patch(configs.__name__ + '.FLAGS')
self.addCleanup(p.stop)
mock_flags = p.start()
config_override = [
'a.vm_groups.default.vm_count=5',
'a.flags.flag=value']
mock_flags.configure_mock(config_override=config_override,
benchmark_config_file=None)
config = configs.GetUserConfig()
self.assertEqual(config['a']['vm_groups']['default']['vm_count'], 5)
self.assertEqual(config['a']['flags']['flag'], 'value')
def testConfigImport(self):
p = mock.patch(configs.__name__ + '.FLAGS')
self.addCleanup(p.stop)
mock_flags = p.start()
mock_flags.configure_mock(benchmark_config_file='test_import.yml')
config = configs.GetUserConfig()
self.assertEqual(config['flags']['num_vms'], 3)
if __name__ == '__main__':
unittest.main()
|
import asyncio
import pytest
from homeassistant.components.auth import indieauth
from tests.async_mock import patch
from tests.test_util.aiohttp import AiohttpClientMocker
@pytest.fixture
def mock_session():
"""Mock aiohttp.ClientSession."""
mocker = AiohttpClientMocker()
with patch(
"aiohttp.ClientSession",
side_effect=lambda *args, **kwargs: mocker.create_session(
asyncio.get_event_loop()
),
):
yield mocker
def test_client_id_scheme():
"""Test we enforce valid scheme."""
assert indieauth._parse_client_id("http://ex.com/")
assert indieauth._parse_client_id("https://ex.com/")
with pytest.raises(ValueError):
indieauth._parse_client_id("ftp://ex.com")
def test_client_id_path():
"""Test we enforce valid path."""
assert indieauth._parse_client_id("http://ex.com").path == "/"
assert indieauth._parse_client_id("http://ex.com/hello").path == "/hello"
assert (
indieauth._parse_client_id("http://ex.com/hello/.world").path == "/hello/.world"
)
assert (
indieauth._parse_client_id("http://ex.com/hello./.world").path
== "/hello./.world"
)
with pytest.raises(ValueError):
indieauth._parse_client_id("http://ex.com/.")
with pytest.raises(ValueError):
indieauth._parse_client_id("http://ex.com/hello/./yo")
with pytest.raises(ValueError):
indieauth._parse_client_id("http://ex.com/hello/../yo")
def test_client_id_fragment():
"""Test we enforce valid fragment."""
with pytest.raises(ValueError):
indieauth._parse_client_id("http://ex.com/#yoo")
def test_client_id_user_pass():
"""Test we enforce valid username/password."""
with pytest.raises(ValueError):
indieauth._parse_client_id("http://[email protected]/")
with pytest.raises(ValueError):
indieauth._parse_client_id("http://user:[email protected]/")
def test_client_id_hostname():
"""Test we enforce valid hostname."""
assert indieauth._parse_client_id("http://www.home-assistant.io/")
assert indieauth._parse_client_id("http://[::1]")
assert indieauth._parse_client_id("http://127.0.0.1")
assert indieauth._parse_client_id("http://10.0.0.0")
assert indieauth._parse_client_id("http://10.255.255.255")
assert indieauth._parse_client_id("http://172.16.0.0")
assert indieauth._parse_client_id("http://172.31.255.255")
assert indieauth._parse_client_id("http://192.168.0.0")
assert indieauth._parse_client_id("http://192.168.255.255")
with pytest.raises(ValueError):
assert indieauth._parse_client_id("http://255.255.255.255/")
with pytest.raises(ValueError):
assert indieauth._parse_client_id("http://11.0.0.0/")
with pytest.raises(ValueError):
assert indieauth._parse_client_id("http://172.32.0.0/")
with pytest.raises(ValueError):
assert indieauth._parse_client_id("http://192.167.0.0/")
def test_parse_url_lowercase_host():
"""Test we update empty paths."""
assert indieauth._parse_url("http://ex.com/hello").path == "/hello"
assert indieauth._parse_url("http://EX.COM/hello").hostname == "ex.com"
parts = indieauth._parse_url("http://EX.COM:123/HELLO")
assert parts.netloc == "ex.com:123"
assert parts.path == "/HELLO"
def test_parse_url_path():
"""Test we update empty paths."""
assert indieauth._parse_url("http://ex.com").path == "/"
async def test_verify_redirect_uri():
"""Test that we verify redirect uri correctly."""
assert await indieauth.verify_redirect_uri(
None, "http://ex.com", "http://ex.com/callback"
)
with patch.object(indieauth, "fetch_redirect_uris", return_value=[]):
# Different domain
assert not await indieauth.verify_redirect_uri(
None, "http://ex.com", "http://different.com/callback"
)
# Different scheme
assert not await indieauth.verify_redirect_uri(
None, "http://ex.com", "https://ex.com/callback"
)
# Different subdomain
assert not await indieauth.verify_redirect_uri(
None, "https://sub1.ex.com", "https://sub2.ex.com/callback"
)
async def test_find_link_tag(hass, mock_session):
"""Test finding link tag."""
mock_session.get(
"http://127.0.0.1:8000",
text="""
<!doctype html>
<html>
<head>
<link rel="redirect_uri" href="hass://oauth2_redirect">
<link rel="other_value" href="hass://oauth2_redirect">
<link rel="redirect_uri" href="/beer">
</head>
...
</html>
""",
)
redirect_uris = await indieauth.fetch_redirect_uris(hass, "http://127.0.0.1:8000")
assert redirect_uris == ["hass://oauth2_redirect", "http://127.0.0.1:8000/beer"]
async def test_find_link_tag_max_size(hass, mock_session):
"""Test finding link tag."""
text = "".join(
[
'<link rel="redirect_uri" href="/wine">',
("0" * 1024 * 10),
'<link rel="redirect_uri" href="/beer">',
]
)
mock_session.get("http://127.0.0.1:8000", text=text)
redirect_uris = await indieauth.fetch_redirect_uris(hass, "http://127.0.0.1:8000")
assert redirect_uris == ["http://127.0.0.1:8000/wine"]
@pytest.mark.parametrize(
"client_id",
["https://home-assistant.io/android", "https://home-assistant.io/iOS"],
)
async def test_verify_redirect_uri_android_ios(client_id):
"""Test that we verify redirect uri correctly for Android/iOS."""
with patch.object(indieauth, "fetch_redirect_uris", return_value=[]):
assert await indieauth.verify_redirect_uri(
None, client_id, "homeassistant://auth-callback"
)
assert not await indieauth.verify_redirect_uri(
None, client_id, "homeassistant://something-else"
)
assert not await indieauth.verify_redirect_uri(
None, "https://incorrect.com", "homeassistant://auth-callback"
)
|
from io import BytesIO
from openpyxl import load_workbook
from weblate.formats.external import XlsxFormat
from weblate.formats.tests.test_formats import AutoFormatTest
from weblate.trans.tests.utils import get_test_file
XLSX_FILE = get_test_file("cs-mono.xlsx")
class XlsxFormatTest(AutoFormatTest):
FORMAT = XlsxFormat
FILE = XLSX_FILE
MIME = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
EXT = "xlsx"
COUNT = 4
MASK = "*/translations.xlsx"
EXPECTED_PATH = "cs_CZ/translations.xlsx"
FIND = "HELLO"
FIND_MATCH = "Hello, world!\r\n"
MATCH = b"PK"
NEW_UNIT_MATCH = b"PK"
BASE = XLSX_FILE
EXPECTED_FLAGS = ""
def assert_same(self, newdata, testdata):
newworkbook = load_workbook(BytesIO(newdata))
testworkbook = load_workbook(BytesIO(testdata))
self.assertEqual(len(newworkbook.worksheets), len(testworkbook.worksheets))
self.assertEqual(
list(newworkbook.active.values), list(testworkbook.active.values)
)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import unittest
import six
class TextTestResult(unittest.TextTestResult):
if six.PY2:
def addSubTest(self, test, subtest, err): # pylint: disable=invalid-name
if err is not None:
if getattr(self, 'failfast', False):
self.stop()
subtest_error_details = (subtest, self._exc_info_to_string(err, test))
if issubclass(err[0], test.failureException):
self.failures.append(subtest_error_details)
else:
self.errors.append(subtest_error_details)
self._mirrorOutput = True
|
__docformat__ = "restructuredtext en"
from logilab.common.tree import VNode
from six import string_types
class BaseComponent(VNode):
"""base report component
attributes
* id : the component's optional id
* klass : the component's optional klass
"""
def __init__(self, id=None, klass=None):
VNode.__init__(self, id)
self.klass = klass
class BaseLayout(BaseComponent):
"""base container node
attributes
* BaseComponent attributes
* children : components in this table (i.e. the table's cells)
"""
def __init__(self, children=(), **kwargs):
super(BaseLayout, self).__init__(**kwargs)
for child in children:
if isinstance(child, BaseComponent):
self.append(child)
else:
self.add_text(child)
def append(self, child):
"""overridden to detect problems easily"""
assert child not in self.parents()
VNode.append(self, child)
def parents(self):
"""return the ancestor nodes"""
assert self.parent is not self
if self.parent is None:
return []
return [self.parent] + self.parent.parents()
def add_text(self, text):
"""shortcut to add text data"""
self.children.append(Text(text))
# non container nodes #########################################################
class Text(BaseComponent):
"""a text portion
attributes :
* BaseComponent attributes
* data : the text value as an encoded or unicode string
"""
def __init__(self, data, escaped=True, **kwargs):
super(Text, self).__init__(**kwargs)
#if isinstance(data, unicode):
# data = data.encode('ascii')
assert isinstance(data, string_types), data.__class__
self.escaped = escaped
self.data = data
class VerbatimText(Text):
"""a verbatim text, display the raw data
attributes :
* BaseComponent attributes
* data : the text value as an encoded or unicode string
"""
class Link(BaseComponent):
"""a labelled link
attributes :
* BaseComponent attributes
* url : the link's target (REQUIRED)
* label : the link's label as a string (use the url by default)
"""
def __init__(self, url, label=None, **kwargs):
super(Link, self).__init__(**kwargs)
assert url
self.url = url
self.label = label or url
class Image(BaseComponent):
"""an embedded or a single image
attributes :
* BaseComponent attributes
* filename : the image's filename (REQUIRED)
* stream : the stream object containing the image data (REQUIRED)
* title : the image's optional title
"""
def __init__(self, filename, stream, title=None, **kwargs):
super(Image, self).__init__(**kwargs)
assert filename
assert stream
self.filename = filename
self.stream = stream
self.title = title
# container nodes #############################################################
class Section(BaseLayout):
"""a section
attributes :
* BaseLayout attributes
a title may also be given to the constructor, it'll be added
as a first element
a description may also be given to the constructor, it'll be added
as a first paragraph
"""
def __init__(self, title=None, description=None, **kwargs):
super(Section, self).__init__(**kwargs)
if description:
self.insert(0, Paragraph([Text(description)]))
if title:
self.insert(0, Title(children=(title,)))
class Title(BaseLayout):
"""a title
attributes :
* BaseLayout attributes
A title must not contains a section nor a paragraph!
"""
class Span(BaseLayout):
"""a title
attributes :
* BaseLayout attributes
A span should only contains Text and Link nodes (in-line elements)
"""
class Paragraph(BaseLayout):
"""a simple text paragraph
attributes :
* BaseLayout attributes
A paragraph must not contains a section !
"""
class Table(BaseLayout):
"""some tabular data
attributes :
* BaseLayout attributes
* cols : the number of columns of the table (REQUIRED)
* rheaders : the first row's elements are table's header
* cheaders : the first col's elements are table's header
* title : the table's optional title
"""
def __init__(self, cols, title=None,
rheaders=0, cheaders=0, rrheaders=0, rcheaders=0,
**kwargs):
super(Table, self).__init__(**kwargs)
assert isinstance(cols, int)
self.cols = cols
self.title = title
self.rheaders = rheaders
self.cheaders = cheaders
self.rrheaders = rrheaders
self.rcheaders = rcheaders
class List(BaseLayout):
"""some list data
attributes :
* BaseLayout attributes
"""
|
import os
import os.path
import sys
import glob
import shutil
import fnmatch
sys.path.insert(0, os.path.join(os.path.dirname(__file__), os.pardir,
os.pardir))
from scripts import utils
recursive_lint = ('__pycache__', '*.pyc')
lint = ('build', 'dist', 'pkg/pkg', 'pkg/qutebrowser-*.pkg.tar.xz', 'pkg/src',
'pkg/qutebrowser', 'qutebrowser.egg-info', 'setuptools-*.egg',
'setuptools-*.zip', 'doc/qutebrowser.asciidoc', 'doc/*.html',
'doc/qutebrowser.1', 'README.html', 'qutebrowser/html/doc')
def remove(path):
"""Remove either a file or directory unless --dry-run is given."""
if os.path.isdir(path):
print("rm -r '{}'".format(path))
if '--dry-run' not in sys.argv:
shutil.rmtree(path)
else:
print("rm '{}'".format(path))
if '--dry-run' not in sys.argv:
os.remove(path)
def main():
"""Clean up lint in the current dir."""
utils.change_cwd()
for elem in lint:
for f in glob.glob(elem):
remove(f)
for root, _dirs, _files in os.walk(os.getcwd()):
path = os.path.basename(root)
if any(fnmatch.fnmatch(path, e) for e in recursive_lint):
remove(root)
if __name__ == '__main__':
main()
|
from aiohttp import ClientSession
from xbox.webapi.authentication.manager import AuthenticationManager
from xbox.webapi.authentication.models import OAuth2TokenResponse
from homeassistant.helpers import config_entry_oauth2_flow
from homeassistant.util.dt import utc_from_timestamp
class AsyncConfigEntryAuth(AuthenticationManager):
"""Provide xbox authentication tied to an OAuth2 based config entry."""
def __init__(
self,
websession: ClientSession,
oauth_session: config_entry_oauth2_flow.OAuth2Session,
):
"""Initialize xbox auth."""
# Leaving out client credentials as they are handled by Home Assistant
super().__init__(websession, "", "", "")
self._oauth_session = oauth_session
self.oauth = self._get_oauth_token()
async def refresh_tokens(self) -> None:
"""Return a valid access token."""
if not self._oauth_session.valid_token:
await self._oauth_session.async_ensure_token_valid()
self.oauth = self._get_oauth_token()
# This will skip the OAuth refresh and only refresh User and XSTS tokens
await super().refresh_tokens()
def _get_oauth_token(self) -> OAuth2TokenResponse:
tokens = {**self._oauth_session.token}
issued = tokens["expires_at"] - tokens["expires_in"]
del tokens["expires_at"]
token_response = OAuth2TokenResponse.parse_obj(tokens)
token_response.issued = utc_from_timestamp(issued)
return token_response
|
import pickle
from io import StringIO, BytesIO
from kombu.utils.div import emergency_dump_state
class MyStringIO(StringIO):
def close(self):
pass
class MyBytesIO(BytesIO):
def close(self):
pass
class test_emergency_dump_state:
def test_dump(self, stdouts):
fh = MyBytesIO()
stderr = StringIO()
emergency_dump_state(
{'foo': 'bar'}, open_file=lambda n, m: fh, stderr=stderr)
assert pickle.loads(fh.getvalue()) == {'foo': 'bar'}
assert stderr.getvalue()
assert not stdouts.stdout.getvalue()
def test_dump_second_strategy(self, stdouts):
fh = MyStringIO()
stderr = StringIO()
def raise_something(*args, **kwargs):
raise KeyError('foo')
emergency_dump_state(
{'foo': 'bar'},
open_file=lambda n, m: fh,
dump=raise_something,
stderr=stderr,
)
assert 'foo' in fh.getvalue()
assert 'bar' in fh.getvalue()
assert stderr.getvalue()
assert not stdouts.stdout.getvalue()
|
Subsets and Splits