text
stringlengths 213
32.3k
|
---|
import asyncio
from asyncio import TimeoutError as AsyncIOTimeoutError
from aiohttp import ClientError, ClientTimeout
from bond_api import Bond
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_HOST
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.entity import SLOW_UPDATE_WARNING
from .const import DOMAIN
from .utils import BondHub
PLATFORMS = ["cover", "fan", "light", "switch"]
_API_TIMEOUT = SLOW_UPDATE_WARNING - 1
async def async_setup(hass: HomeAssistant, config: dict):
"""Set up the Bond component."""
hass.data.setdefault(DOMAIN, {})
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Set up Bond from a config entry."""
host = entry.data[CONF_HOST]
token = entry.data[CONF_ACCESS_TOKEN]
bond = Bond(host=host, token=token, timeout=ClientTimeout(total=_API_TIMEOUT))
hub = BondHub(bond)
try:
await hub.setup()
except (ClientError, AsyncIOTimeoutError, OSError) as error:
raise ConfigEntryNotReady from error
hass.data[DOMAIN][entry.entry_id] = hub
if not entry.unique_id:
hass.config_entries.async_update_entry(entry, unique_id=hub.bond_id)
device_registry = await dr.async_get_registry(hass)
device_registry.async_get_or_create(
config_entry_id=entry.entry_id,
identifiers={(DOMAIN, hub.bond_id)},
manufacturer="Olibra",
name=hub.bond_id,
model=hub.target,
sw_version=hub.fw_ver,
)
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
|
import logging
import sys
from kalliope.core import FileManager
from kalliope.core.TTS.TTSModule import TTSModule, MissingTTSParameter
logging.basicConfig()
logger = logging.getLogger("kalliope")
# TODO : voicerss lib dependancies are not working as expected in python3
# CF: git : https://github.com/kalliope-project/kalliope/pull/397
# TODO : remove this check, when fixed :
# https://bitbucket.org/daycoder/cachingutil/pull-requests/1/fix-python3-packages-paths/diff
if sys.version_info[0] == 3:
logger.error("[Voicerss] WARNING : VOICERSS is not working for python3 yet !")
else:
from voicerss_tts.voicerss_tts import TextToSpeech
TTS_URL = "http://www.voicerss.org/controls/speech.ashx"
TTS_CONTENT_TYPE = "audio/mpeg"
TTS_TIMEOUT_SEC = 30
class Voicerss(TTSModule):
def __init__(self, **kwargs):
super(Voicerss, self).__init__(**kwargs)
self.key = kwargs.get('key', None)
self.rate = kwargs.get('rate', 0)
self.codec = kwargs.get('codec', 'MP3')
self.audio_format = kwargs.get('audio_format', '44khz_16bit_stereo')
self.ssml = kwargs.get('ssml', False)
self.base64 = kwargs.get('base64', False)
self.ssl = kwargs.get('ssl', False)
self._check_parameters()
def say(self, words):
"""
:param words: The sentence to say
"""
self.generate_and_play(words, self._generate_audio_file)
def _check_parameters(self):
"""
Check parameters are ok, raise MissingTTSParameterException exception otherwise.
:return: true if parameters are ok, raise an exception otherwise
.. raises:: MissingTTSParameterException
"""
if self.language == "default" or self.language is None or self.key is None:
raise MissingTTSParameter("[voicerss] Missing mandatory parameters, check documentation !")
return True
def _generate_audio_file(self):
"""
Generic method used as a Callback in TTSModule
- must provided the audio file and write it on the disk
.. raises:: FailToLoadSoundFile
"""
voicerss = TextToSpeech(
api_key=self.key,
text=self.words,
language=self.language,
rate=self.rate,
codec=self.codec,
audio_format=self.audio_format,
ssml=self.ssml,
base64=self.base64,
ssl=self.ssl)
# TODO : voicerss lib dependancies are not working as expected in python3
# CF: git : https://github.com/kalliope-project/kalliope/pull/397
# TODO : remove this check, when fixed :
# https://bitbucket.org/daycoder/cachingutil/pull-requests/1/fix-python3-packages-paths/diff
if sys.version_info[0] < 3:
# OK we get the audio we can write the sound file
FileManager.write_in_file(self.file_path, voicerss.speech)
|
import asyncio
from collections import deque
from datetime import timedelta
import logging
import aiohttp
import async_timeout
import voluptuous as vol
from homeassistant.components.camera import (
PLATFORM_SCHEMA,
STATE_IDLE,
STATE_RECORDING,
Camera,
)
from homeassistant.components.camera.const import DOMAIN
from homeassistant.const import CONF_NAME, CONF_TIMEOUT, CONF_WEBHOOK_ID
from homeassistant.core import callback
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.event import async_track_point_in_utc_time
import homeassistant.util.dt as dt_util
_LOGGER = logging.getLogger(__name__)
CONF_BUFFER_SIZE = "buffer"
CONF_IMAGE_FIELD = "field"
DEFAULT_NAME = "Push Camera"
ATTR_FILENAME = "filename"
ATTR_LAST_TRIP = "last_trip"
PUSH_CAMERA_DATA = "push_camera"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_BUFFER_SIZE, default=1): cv.positive_int,
vol.Optional(CONF_TIMEOUT, default=timedelta(seconds=5)): vol.All(
cv.time_period, cv.positive_timedelta
),
vol.Optional(CONF_IMAGE_FIELD, default="image"): cv.string,
vol.Required(CONF_WEBHOOK_ID): cv.string,
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Push Camera platform."""
if PUSH_CAMERA_DATA not in hass.data:
hass.data[PUSH_CAMERA_DATA] = {}
webhook_id = config.get(CONF_WEBHOOK_ID)
cameras = [
PushCamera(
hass,
config[CONF_NAME],
config[CONF_BUFFER_SIZE],
config[CONF_TIMEOUT],
config[CONF_IMAGE_FIELD],
webhook_id,
)
]
async_add_entities(cameras)
async def handle_webhook(hass, webhook_id, request):
"""Handle incoming webhook POST with image files."""
try:
with async_timeout.timeout(5):
data = dict(await request.post())
except (asyncio.TimeoutError, aiohttp.web.HTTPException) as error:
_LOGGER.error("Could not get information from POST <%s>", error)
return
camera = hass.data[PUSH_CAMERA_DATA][webhook_id]
if camera.image_field not in data:
_LOGGER.warning("Webhook call without POST parameter <%s>", camera.image_field)
return
await camera.update_image(
data[camera.image_field].file.read(), data[camera.image_field].filename
)
class PushCamera(Camera):
"""The representation of a Push camera."""
def __init__(self, hass, name, buffer_size, timeout, image_field, webhook_id):
"""Initialize push camera component."""
super().__init__()
self._name = name
self._last_trip = None
self._filename = None
self._expired_listener = None
self._state = STATE_IDLE
self._timeout = timeout
self.queue = deque([], buffer_size)
self._current_image = None
self._image_field = image_field
self.webhook_id = webhook_id
self.webhook_url = hass.components.webhook.async_generate_url(webhook_id)
async def async_added_to_hass(self):
"""Call when entity is added to hass."""
self.hass.data[PUSH_CAMERA_DATA][self.webhook_id] = self
try:
self.hass.components.webhook.async_register(
DOMAIN, self.name, self.webhook_id, handle_webhook
)
except ValueError:
_LOGGER.error(
"In <%s>, webhook_id <%s> already used", self.name, self.webhook_id
)
@property
def image_field(self):
"""HTTP field containing the image file."""
return self._image_field
@property
def state(self):
"""Return current state of the camera."""
return self._state
async def update_image(self, image, filename):
"""Update the camera image."""
if self._state == STATE_IDLE:
self._state = STATE_RECORDING
self._last_trip = dt_util.utcnow()
self.queue.clear()
self._filename = filename
self.queue.appendleft(image)
@callback
def reset_state(now):
"""Set state to idle after no new images for a period of time."""
self._state = STATE_IDLE
self._expired_listener = None
_LOGGER.debug("Reset state")
self.async_write_ha_state()
if self._expired_listener:
self._expired_listener()
self._expired_listener = async_track_point_in_utc_time(
self.hass, reset_state, dt_util.utcnow() + self._timeout
)
self.async_write_ha_state()
async def async_camera_image(self):
"""Return a still image response."""
if self.queue:
if self._state == STATE_IDLE:
self.queue.rotate(1)
self._current_image = self.queue[0]
return self._current_image
@property
def name(self):
"""Return the name of this camera."""
return self._name
@property
def motion_detection_enabled(self):
"""Camera Motion Detection Status."""
return False
@property
def device_state_attributes(self):
"""Return the state attributes."""
return {
name: value
for name, value in (
(ATTR_LAST_TRIP, self._last_trip),
(ATTR_FILENAME, self._filename),
)
if value is not None
}
|
import numpy as np
import pandas as pd
import xarray as xr
from . import parameterized, randn, requires_dask
nx = 3000
long_nx = 30000000
ny = 2000
nt = 1000
window = 20
randn_xy = randn((nx, ny), frac_nan=0.1)
randn_xt = randn((nx, nt))
randn_t = randn((nt,))
randn_long = randn((long_nx,), frac_nan=0.1)
class Rolling:
def setup(self, *args, **kwargs):
self.ds = xr.Dataset(
{
"var1": (("x", "y"), randn_xy),
"var2": (("x", "t"), randn_xt),
"var3": (("t",), randn_t),
},
coords={
"x": np.arange(nx),
"y": np.linspace(0, 1, ny),
"t": pd.date_range("1970-01-01", periods=nt, freq="D"),
"x_coords": ("x", np.linspace(1.1, 2.1, nx)),
},
)
self.da_long = xr.DataArray(
randn_long, dims="x", coords={"x": np.arange(long_nx) * 0.1}
)
@parameterized(["func", "center"], (["mean", "count"], [True, False]))
def time_rolling(self, func, center):
getattr(self.ds.rolling(x=window, center=center), func)().load()
@parameterized(["func", "pandas"], (["mean", "count"], [True, False]))
def time_rolling_long(self, func, pandas):
if pandas:
se = self.da_long.to_series()
getattr(se.rolling(window=window), func)()
else:
getattr(self.da_long.rolling(x=window), func)().load()
@parameterized(["window_", "min_periods"], ([20, 40], [5, None]))
def time_rolling_np(self, window_, min_periods):
self.ds.rolling(x=window_, center=False, min_periods=min_periods).reduce(
getattr(np, "nanmean")
).load()
@parameterized(["center", "stride"], ([True, False], [1, 200]))
def time_rolling_construct(self, center, stride):
self.ds.rolling(x=window, center=center).construct(
"window_dim", stride=stride
).mean(dim="window_dim").load()
class RollingDask(Rolling):
def setup(self, *args, **kwargs):
requires_dask()
super().setup(**kwargs)
self.ds = self.ds.chunk({"x": 100, "y": 50, "t": 50})
self.da_long = self.da_long.chunk({"x": 10000})
|
from __future__ import print_function
import pytest
from plumbum import colors
from plumbum.colorlib.styles import ANSIStyle as Style, ColorNotFound
from plumbum.colorlib import htmlcolors
import sys
class TestImportColors:
def testDifferentImports(self):
import plumbum.colors
from plumbum.colors import bold
from plumbum.colors.fg import red
assert str(red) == str(colors.red)
assert str(bold) == str(colors.bold)
class TestANSIColor:
def setup_method(self, method):
colors.use_color = True
def testColorSlice(self):
vals = colors[:8]
assert len(vals) == 8
assert vals[1] == colors.red
vals = colors[40:50]
assert len(vals) == 10
assert vals[1] == colors.full(41)
def testLoadNumericalColor(self):
assert colors.full(2) == colors[2]
assert colors.simple(2) == colors(2)
assert colors(54) == colors[54]
assert colors(1,30,77) == colors.rgb(1,30,77)
assert colors[1,30,77] == colors.rgb(1,30,77)
def testColorStrings(self):
assert '\033[0m' == colors.reset
assert '\033[1m' == colors.bold
assert '\033[39m' == colors.fg.reset
def testNegateIsReset(self):
assert colors.reset == ~colors
assert colors.fg.reset == ~colors.fg
assert colors.bg.reset == ~colors.bg
def testFromPreviousColor(self):
assert colors(colors.red) == colors.red
assert colors(colors.bg.red) == colors.bg.red
assert colors(colors.bold) == colors.bold
def testFromCode(self):
assert colors('\033[31m') == colors.red
def testEmptyStyle(self):
assert str(colors()) == ''
assert str(colors('')) == ''
assert str(colors(None)) == ''
def testLoadColorByName(self):
assert colors['LightBlue'] == colors.fg['LightBlue']
assert colors.bg['light_green'] == colors.bg['LightGreen']
assert colors['DeepSkyBlue1'] == colors['#00afff']
assert colors['DeepSkyBlue1'] == colors.hex('#00afff')
assert colors['DeepSkyBlue1'] == colors[39]
assert colors.DeepSkyBlue1 == colors[39]
assert colors.deepskyblue1 == colors[39]
assert colors.Deep_Sky_Blue1 == colors[39]
assert colors.RED == colors.red
with pytest.raises(AttributeError):
colors.Notacolorsatall
def testMultiColor(self):
sumcolors = colors.bold & colors.blue
assert colors.bold.reset & colors.fg.reset == ~sumcolors
def testSums(self):
# Sums should not be communitave, last one is used
assert colors.red == colors.blue & colors.red
assert colors.bg.green == colors.bg.red & colors.bg.green
def testRepresentations(self):
colors1 = colors.full(87)
assert colors1 == colors.DarkSlateGray2
assert colors1.basic == colors.DarkSlateGray2
assert str(colors1.basic) == str(colors.LightGray)
colors2 = colors.rgb(1,45,214)
assert str(colors2.full) == str(colors.Blue3A)
def testFromAnsi(self):
for c in colors[1:7]:
assert c == colors.from_ansi(str(c))
for c in colors.bg[1:7]:
assert c == colors.from_ansi(str(c))
for c in colors:
assert c == colors.from_ansi(str(c))
for c in colors.bg:
assert c == colors.from_ansi(str(c))
for c in colors[:16]:
assert c == colors.from_ansi(str(c))
for c in colors.bg[:16]:
assert c == colors.from_ansi(str(c))
for c in (colors.bold, colors.underline, colors.italics):
assert c == colors.from_ansi(str(c))
col = colors.bold & colors.fg.green & colors.bg.blue & colors.underline
assert col == colors.from_ansi(str(col))
col = colors.reset
assert col == colors.from_ansi(str(col))
def testWrappedColor(self):
string = 'This is a string'
wrapped = '\033[31mThis is a string\033[39m'
assert colors.red.wrap(string) == wrapped
assert colors.red | string == wrapped
assert colors.red[string] == wrapped
newcolors = colors.blue & colors.underline
assert newcolors[string] == string | newcolors
assert newcolors.wrap(string) == string | colors.blue & colors.underline
def testUndoColor(self):
assert '\033[39m' == ~colors.fg
assert '\033[49m' == ~colors.bg
assert '\033[22m' == ~colors.bold
assert '\033[22m' == ~colors.dim
for i in range(7):
assert '\033[39m' == ~colors(i)
assert '\033[49m' == ~colors.bg(i)
assert '\033[39m' == ~colors.fg(i)
assert '\033[49m' == ~colors.bg(i)
for i in range(256):
assert '\033[39m' == ~colors.fg[i]
assert '\033[49m' == ~colors.bg[i]
assert '\033[0m' == ~colors.reset
assert colors.do_nothing == ~colors.do_nothing
assert colors.bold.reset == ~colors.bold
def testLackOfColor(self):
Style.use_color = False
assert '' == colors.fg.red
assert '' == ~colors.fg
assert '' == colors.fg['LightBlue']
def testFromHex(self):
with pytest.raises(ColorNotFound):
colors.hex('asdf')
with pytest.raises(ColorNotFound):
colors.hex('#1234Z2')
with pytest.raises(ColorNotFound):
colors.hex(12)
def testDirectCall(self, capsys):
colors.blue()
assert capsys.readouterr()[0] == str(colors.blue)
def testPrint(self, capsys):
colors.yellow.print('This is printed to stdout', end='')
assert capsys.readouterr()[0] == str(colors.yellow.wrap('This is printed to stdout'))
class TestHTMLColor:
def test_html(self):
red_tagged = '<font color="#C00000">This is tagged</font>'
assert htmlcolors.red["This is tagged"] == red_tagged
assert "This is tagged" | htmlcolors.red == red_tagged
twin_tagged = '<font color="#C00000"><em>This is tagged</em></font>'
assert "This is tagged" | htmlcolors.red & htmlcolors.em == twin_tagged
assert "This is tagged" | htmlcolors.em & htmlcolors.red == twin_tagged
assert htmlcolors.em & htmlcolors.red | "This is tagged" == twin_tagged
|
import os.path as op
import numpy as np
from numpy.testing import (assert_array_almost_equal, assert_allclose,
assert_equal)
import pytest
import matplotlib.pyplot as plt
from mne import find_events, Epochs, pick_types
from mne.io import read_raw_fif
from mne.io.array import RawArray
from mne.io.tests.test_raw import _test_raw_reader
from mne.io.meas_info import create_info
from mne.io.pick import get_channel_type_constants
from mne.utils import run_tests_if_main
from mne.channels import make_dig_montage
base_dir = op.join(op.dirname(__file__), '..', '..', 'tests', 'data')
fif_fname = op.join(base_dir, 'test_raw.fif')
def test_long_names():
"""Test long name support."""
info = create_info(['a' * 15 + 'b', 'a' * 16], 1000., verbose='error')
data = np.empty((2, 1000))
raw = RawArray(data, info)
assert raw.ch_names == ['a' * 13 + '-0', 'a' * 13 + '-1']
info = create_info(['a' * 16] * 11, 1000., verbose='error')
data = np.empty((11, 1000))
raw = RawArray(data, info)
assert raw.ch_names == ['a' * 12 + '-%s' % ii for ii in range(11)]
def test_array_copy():
"""Test copying during construction."""
info = create_info(1, 1000.)
data = np.empty((1, 1000))
# 'auto' (default)
raw = RawArray(data, info)
assert raw._data is data
assert raw.info is not info
raw = RawArray(data.astype(np.float32), info)
assert raw._data is not data
assert raw.info is not info
# 'info' (more restrictive)
raw = RawArray(data, info, copy='info')
assert raw._data is data
assert raw.info is not info
with pytest.raises(ValueError, match="data copying was not .* copy='info"):
RawArray(data.astype(np.float32), info, copy='info')
# 'data'
raw = RawArray(data, info, copy='data')
assert raw._data is not data
assert raw.info is info
# 'both'
raw = RawArray(data, info, copy='both')
assert raw._data is not data
assert raw.info is not info
raw = RawArray(data.astype(np.float32), info, copy='both')
assert raw._data is not data
assert raw.info is not info
# None
raw = RawArray(data, info, copy=None)
assert raw._data is data
assert raw.info is info
with pytest.raises(ValueError, match='data copying was not .* copy=None'):
RawArray(data.astype(np.float32), info, copy=None)
@pytest.mark.slowtest
def test_array_raw():
"""Test creating raw from array."""
# creating
raw = read_raw_fif(fif_fname).crop(2, 5)
data, times = raw[:, :]
sfreq = raw.info['sfreq']
ch_names = [(ch[4:] if 'STI' not in ch else ch)
for ch in raw.info['ch_names']] # change them, why not
types = list()
for ci in range(101):
types.extend(('grad', 'grad', 'mag'))
types.extend(['ecog', 'seeg', 'hbo']) # really 3 meg channels
types.extend(['stim'] * 9)
types.extend(['eeg'] * 60)
picks = np.concatenate([pick_types(raw.info, meg=True)[::20],
pick_types(raw.info, meg=False, stim=True),
pick_types(raw.info, meg=False, eeg=True)[::20]])
del raw
data = data[picks]
ch_names = np.array(ch_names)[picks].tolist()
types = np.array(types)[picks].tolist()
types.pop(-1)
# wrong length
pytest.raises(ValueError, create_info, ch_names, sfreq, types)
# bad entry
types.append('foo')
pytest.raises(KeyError, create_info, ch_names, sfreq, types)
types[-1] = 'eog'
# default type
info = create_info(ch_names, sfreq)
assert_equal(info['chs'][0]['kind'],
get_channel_type_constants()['misc']['kind'])
# use real types
info = create_info(ch_names, sfreq, types)
raw2 = _test_raw_reader(RawArray, test_preloading=False,
data=data, info=info, first_samp=2 * data.shape[1])
data2, times2 = raw2[:, :]
assert_allclose(data, data2)
assert_allclose(times, times2)
assert ('RawArray' in repr(raw2))
pytest.raises(TypeError, RawArray, info, data)
# filtering
picks = pick_types(raw2.info, meg=True, misc=True, exclude='bads')[:4]
assert_equal(len(picks), 4)
raw_lp = raw2.copy()
kwargs = dict(fir_design='firwin', picks=picks)
raw_lp.filter(None, 4.0, h_trans_bandwidth=4., **kwargs)
raw_hp = raw2.copy()
raw_hp.filter(16.0, None, l_trans_bandwidth=4., **kwargs)
raw_bp = raw2.copy()
raw_bp.filter(8.0, 12.0, l_trans_bandwidth=4., h_trans_bandwidth=4.,
**kwargs)
raw_bs = raw2.copy()
raw_bs.filter(16.0, 4.0, l_trans_bandwidth=4., h_trans_bandwidth=4.,
**kwargs)
data, _ = raw2[picks, :]
lp_data, _ = raw_lp[picks, :]
hp_data, _ = raw_hp[picks, :]
bp_data, _ = raw_bp[picks, :]
bs_data, _ = raw_bs[picks, :]
sig_dec = 15
assert_array_almost_equal(data, lp_data + bp_data + hp_data, sig_dec)
assert_array_almost_equal(data, bp_data + bs_data, sig_dec)
# plotting
raw2.plot()
raw2.plot_psd(tmax=2., average=True, n_fft=1024, spatial_colors=False)
plt.close('all')
# epoching
events = find_events(raw2, stim_channel='STI 014')
events[:, 2] = 1
assert len(events) > 2
epochs = Epochs(raw2, events, 1, -0.2, 0.4, preload=True)
evoked = epochs.average()
assert_equal(evoked.nave, len(events) - 1)
# complex data
rng = np.random.RandomState(0)
data = rng.randn(1, 100) + 1j * rng.randn(1, 100)
raw = RawArray(data, create_info(1, 1000., 'eeg'))
assert_allclose(raw._data, data)
# Using digital montage to give MNI electrode coordinates
n_elec = 10
ts_size = 10000
Fs = 512.
ch_names = [str(i) for i in range(n_elec)]
ch_pos_loc = np.random.randint(60, size=(n_elec, 3)).tolist()
data = np.random.rand(n_elec, ts_size)
montage = make_dig_montage(
ch_pos=dict(zip(ch_names, ch_pos_loc)),
coord_frame='head'
)
info = create_info(ch_names, Fs, 'ecog')
raw = RawArray(data, info)
raw.set_montage(montage)
raw.plot_psd(average=False) # looking for nonexistent layout
raw.plot_psd_topo()
run_tests_if_main()
|
from twisted.internet.task import react
from twisted.internet.defer import inlineCallbacks as coroutine
from autobahn.twisted.wamp import Connection
# A single session joins a first realm, leaves and joins another realm
# all over the same, still running transport
@coroutine
def main(transport):
session = ApplicationSession()
# join a first realm and do something
yield session.join(transport, 'myrealm1')
result = yield session.call('com.myapp.add2', 2, 3)
print("Result: {}".format(result))
# leave the realm. the transport will NOT be closed!
yield session.leave()
# join a different realm and do something
yield session.join(transport, 'myrealm2')
result = yield session.call('com.foobar.mul2', 2, 3)
print("Result: {}".format(result))
# leave the realm. the transport will NOT be closed!
yield session.leave()
# now close the transport. after this, the transport cannot
# be reused!
yield transport.close()
if __name__ == '__main__':
connection = Connection(main)
react(connection.start)
|
import logging
from homeassistant.components.fan import (
SPEED_HIGH,
SPEED_LOW,
SPEED_MEDIUM,
SPEED_OFF,
SUPPORT_SET_SPEED,
FanEntity,
)
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from . import DOMAIN, SIGNAL_UPDATE_SMARTY
_LOGGER = logging.getLogger(__name__)
SPEED_LIST = [SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH]
SPEED_MAPPING = {1: SPEED_LOW, 2: SPEED_MEDIUM, 3: SPEED_HIGH}
SPEED_TO_MODE = {v: k for k, v in SPEED_MAPPING.items()}
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Smarty Fan Platform."""
smarty = hass.data[DOMAIN]["api"]
name = hass.data[DOMAIN]["name"]
async_add_entities([SmartyFan(name, smarty)], True)
class SmartyFan(FanEntity):
"""Representation of a Smarty Fan."""
def __init__(self, name, smarty):
"""Initialize the entity."""
self._name = name
self._speed = SPEED_OFF
self._state = None
self._smarty = smarty
@property
def should_poll(self):
"""Do not poll."""
return False
@property
def name(self):
"""Return the name of the fan."""
return self._name
@property
def icon(self):
"""Return the icon to use in the frontend."""
return "mdi:air-conditioner"
@property
def supported_features(self):
"""Return the list of supported features."""
return SUPPORT_SET_SPEED
@property
def speed_list(self):
"""List of available fan modes."""
return SPEED_LIST
@property
def is_on(self):
"""Return state of the fan."""
return self._state
@property
def speed(self) -> str:
"""Return speed of the fan."""
return self._speed
def set_speed(self, speed: str) -> None:
"""Set the speed of the fan."""
_LOGGER.debug("Set the fan speed to %s", speed)
if speed == SPEED_OFF:
self.turn_off()
else:
self._smarty.set_fan_speed(SPEED_TO_MODE.get(speed))
self._speed = speed
self._state = True
def turn_on(self, speed=None, **kwargs):
"""Turn on the fan."""
_LOGGER.debug("Turning on fan. Speed is %s", speed)
if speed is None:
if self._smarty.turn_on(SPEED_TO_MODE.get(self._speed)):
self._state = True
self._speed = SPEED_MEDIUM
else:
if self._smarty.set_fan_speed(SPEED_TO_MODE.get(speed)):
self._speed = speed
self._state = True
self.schedule_update_ha_state()
def turn_off(self, **kwargs):
"""Turn off the fan."""
_LOGGER.debug("Turning off fan")
if self._smarty.turn_off():
self._state = False
self.schedule_update_ha_state()
async def async_added_to_hass(self):
"""Call to update fan."""
async_dispatcher_connect(self.hass, SIGNAL_UPDATE_SMARTY, self._update_callback)
@callback
def _update_callback(self):
"""Call update method."""
self.async_schedule_update_ha_state(True)
def update(self):
"""Update state."""
_LOGGER.debug("Updating state")
result = self._smarty.fan_speed
if result:
self._speed = SPEED_MAPPING[result]
_LOGGER.debug("Speed is %s, Mode is %s", self._speed, result)
self._state = True
else:
self._state = False
|
from kalliope.core.Utils.FileManager import FileManager
from requests.auth import HTTPBasicAuth
from kalliope.core.TTS.TTSModule import TTSModule, MissingTTSParameter
import logging
import requests
logging.basicConfig()
logger = logging.getLogger("kalliope")
API_VERSION = "v1"
TTS_URL = "https://stream.watsonplatform.net/text-to-speech/api/"
TTS_CONTENT_TYPE = "audio/wav"
TTS_PITCH = "default"
TTS_RATE = "default"
class Watson(TTSModule):
def __init__(self, **kwargs):
super(Watson, self).__init__(**kwargs)
# set parameter from what we receive from the settings
self.apikey = kwargs.get('apikey', None)
self.location = kwargs.get('location', TTS_URL)
self.voice = kwargs.get('voice', None)
self.pitch = kwargs.get('pitch', TTS_PITCH)
self.rate = kwargs.get('rate', TTS_RATE)
self._check_parameters()
def _check_parameters(self):
"""
Check parameters are ok, raise missingparameters exception otherwise.
:return: true if parameters are ok, raise an exception otherwise
.. raises:: MissingParameterException
"""
if self.apikey is None or self.voice is None:
raise MissingTTSParameter("[Watson] Missing parameters, check documentation !")
return True
def say(self, words):
"""
"""
self.generate_and_play(words, self._generate_audio_file)
def _generate_audio_file(self):
"""
Generic method used as a Callback in TTSModule
"""
# Prepare payload
payload = self.get_payload()
headers = {
"Content-Type": "application/json",
"Accept": "audio/wav"
}
endpoint_location = self.location if self.location.endswith('/') else self.location+"/"
url = "%s/synthesize?voice=%s" % (endpoint_location + API_VERSION, self.voice)
response = requests.post(url,
auth=HTTPBasicAuth("apikey", self.apikey),
headers=headers,
json=payload)
logger.debug("[Watson TTS] status code: %s" % response.status_code)
if response.status_code == 200:
# OK we get the audio we can write the sound file
FileManager.write_in_file(self.file_path, response.content)
else:
logger.debug("[Watson TTS] Fail to get audio. Header: %s" % response.headers)
def get_payload(self):
return {
"text": "<s><prosody pitch='"+self.pitch+"' rate='"+self.rate+"'>" + self.words + "</prosody></s>"
}
|
import logging
from pyqvrpro import Client
from pyqvrpro.client import AuthenticationError, InsufficientPermissionsError
from requests.exceptions import ConnectionError as RequestsConnectionError
import voluptuous as vol
from homeassistant.components.camera import DOMAIN as CAMERA_DOMAIN
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_USERNAME
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.discovery import load_platform
from .const import (
CONF_EXCLUDE_CHANNELS,
DOMAIN,
SERVICE_START_RECORD,
SERVICE_STOP_RECORD,
)
DEFAULT_PORT = 8080
SERVICE_CHANNEL_GUID = "guid"
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_EXCLUDE_CHANNELS, default=[]): vol.All(
cv.ensure_list_csv, [cv.positive_int]
),
}
)
},
extra=vol.ALLOW_EXTRA,
)
SERVICE_CHANNEL_RECORD_SCHEMA = vol.Schema(
{vol.Required(SERVICE_CHANNEL_GUID): cv.string}
)
def setup(hass, config):
"""Set up the QVR Pro component."""
conf = config[DOMAIN]
user = conf[CONF_USERNAME]
password = conf[CONF_PASSWORD]
host = conf[CONF_HOST]
port = conf[CONF_PORT]
excluded_channels = conf[CONF_EXCLUDE_CHANNELS]
try:
qvrpro = Client(user, password, host, port=port)
channel_resp = qvrpro.get_channel_list()
except InsufficientPermissionsError:
_LOGGER.error("User must have Surveillance Management permission")
return False
except AuthenticationError:
_LOGGER.error("Authentication failed")
return False
except RequestsConnectionError:
_LOGGER.error("Error connecting to QVR server")
return False
channels = []
for channel in channel_resp["channels"]:
if channel["channel_index"] + 1 in excluded_channels:
continue
channels.append(channel)
hass.data[DOMAIN] = {"channels": channels, "client": qvrpro}
load_platform(hass, CAMERA_DOMAIN, DOMAIN, {}, config)
# Register services
def handle_start_record(call):
guid = call.data[SERVICE_CHANNEL_GUID]
qvrpro.start_recording(guid)
def handle_stop_record(call):
guid = call.data[SERVICE_CHANNEL_GUID]
qvrpro.stop_recording(guid)
hass.services.register(
DOMAIN,
SERVICE_START_RECORD,
handle_start_record,
schema=SERVICE_CHANNEL_RECORD_SCHEMA,
)
hass.services.register(
DOMAIN,
SERVICE_STOP_RECORD,
handle_stop_record,
schema=SERVICE_CHANNEL_RECORD_SCHEMA,
)
return True
|
from homeassistant.setup import async_setup_component
from tests.components.light import common
from tests.components.switch import common as switch_common
async def test_default_state(hass):
"""Test light switch default state."""
await async_setup_component(
hass,
"light",
{
"light": {
"platform": "switch",
"entity_id": "switch.test",
"name": "Christmas Tree Lights",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("light.christmas_tree_lights")
assert state is not None
assert state.state == "unavailable"
assert state.attributes["supported_features"] == 0
assert state.attributes.get("brightness") is None
assert state.attributes.get("hs_color") is None
assert state.attributes.get("color_temp") is None
assert state.attributes.get("white_value") is None
assert state.attributes.get("effect_list") is None
assert state.attributes.get("effect") is None
async def test_light_service_calls(hass):
"""Test service calls to light."""
await async_setup_component(hass, "switch", {"switch": [{"platform": "demo"}]})
await async_setup_component(
hass,
"light",
{"light": [{"platform": "switch", "entity_id": "switch.decorative_lights"}]},
)
await hass.async_block_till_done()
assert hass.states.get("light.light_switch").state == "on"
await common.async_toggle(hass, "light.light_switch")
assert hass.states.get("switch.decorative_lights").state == "off"
assert hass.states.get("light.light_switch").state == "off"
await common.async_turn_on(hass, "light.light_switch")
assert hass.states.get("switch.decorative_lights").state == "on"
assert hass.states.get("light.light_switch").state == "on"
await common.async_turn_off(hass, "light.light_switch")
await hass.async_block_till_done()
assert hass.states.get("switch.decorative_lights").state == "off"
assert hass.states.get("light.light_switch").state == "off"
async def test_switch_service_calls(hass):
"""Test service calls to switch."""
await async_setup_component(hass, "switch", {"switch": [{"platform": "demo"}]})
await async_setup_component(
hass,
"light",
{"light": [{"platform": "switch", "entity_id": "switch.decorative_lights"}]},
)
await hass.async_block_till_done()
assert hass.states.get("light.light_switch").state == "on"
await switch_common.async_turn_off(hass, "switch.decorative_lights")
await hass.async_block_till_done()
assert hass.states.get("switch.decorative_lights").state == "off"
assert hass.states.get("light.light_switch").state == "off"
await switch_common.async_turn_on(hass, "switch.decorative_lights")
await hass.async_block_till_done()
assert hass.states.get("switch.decorative_lights").state == "on"
assert hass.states.get("light.light_switch").state == "on"
|
from __future__ import print_function
from __future__ import with_statement
import os
import sys
import unittest
import rospkg
from . import pmon
from .baretest import BareTestCase
from .baretest import print_runner_summary
from . core import create_xml_runner
from . core import xml_results_file
from .junitxml import Result
from .junitxml import print_summary # noqa: F401
_NAME = 'rosunit'
def rosunitmain():
from optparse import OptionParser
parser = OptionParser(usage='usage: %prog [options] <file> [test args...]', prog=_NAME)
parser.add_option('-t', '--text',
action='store_true', dest='text_mode', default=False,
help='Run with stdout output instead of XML output')
parser.add_option('--time-limit', metavar='TIME_LIMIT',
dest='time_limit', default=60,
help='Set time limit for test')
parser.add_option('--name', metavar='TEST_NAME',
dest='test_name', default=None,
help='Test name')
parser.add_option('--package', metavar='PACKAGE_NAME',
dest='pkg', default=None,
help='Package name (optional)')
(options, args) = parser.parse_args()
if len(args) < 1:
parser.error('You must supply a test file.')
test_file = args[0]
if options.test_name:
test_name = options.test_name
else:
test_name = os.path.basename(test_file)
if '.' in test_name:
test_name = test_name[:test_name.rfind('.')]
time_limit = float(options.time_limit) if options.time_limit else None
# If the caller didn't tell us the package name, we'll try to infer it.
# compute some common names we'll be using to generate test names and files
pkg = options.pkg
if not pkg:
pkg = rospkg.get_package_name(test_file)
if not pkg:
print("Error: failed to determine package name for file '%s'; maybe you should supply the --package argument to rosunit?" % (test_file))
sys.exit(1)
try:
runner_result = None
results = Result('rosunit', 0, 0, 0)
test_case = BareTestCase(test_file, args[1:],
retry=0, time_limit=time_limit,
test_name=test_name, text_mode=options.text_mode, package_name=pkg)
suite = unittest.TestSuite()
suite.addTest(test_case)
if options.text_mode:
result = unittest.TextTestRunner(stream=sys.stdout, verbosity=2).run(suite)
else:
results_file = xml_results_file(pkg, test_name, True)
# the is_rostest really just means "wrapper"
xml_runner = create_xml_runner(pkg, test_name,
results_file=results_file,
is_rostest=True)
runner_result = xml_runner.run(suite)
finally:
pmon.pmon_shutdown()
# summary is worthless if textMode is on as we cannot scrape .xml results
results = test_case.results
if not options.text_mode:
print_runner_summary(runner_result, results)
else:
print('WARNING: overall test result is not accurate when --text is enabled')
if runner_result is not None and not runner_result.wasSuccessful():
sys.exit(1)
elif results.num_errors or results.num_failures:
sys.exit(2)
if __name__ == '__main__':
rosunitmain()
|
from kalliope.core.Models.settings.SettingsEntry import SettingsEntry
class Tts(SettingsEntry):
"""
This Class is representing a Text To Speech (TTS) with its name and parameters
.. note:: must be defined in the settings.yml
"""
def __init__(self, name=None, parameters=None):
super(Tts, self).__init__(name=name)
self.parameters = parameters
def __str__(self):
return str(self.serialize())
def serialize(self):
return {
'name': self.name,
'parameters': self.parameters
}
def __eq__(self, other):
"""
This is used to compare 2 objects
:param other: the Tts to compare
:return: True if both ttss are similar, False otherwise
"""
return self.__dict__ == other.__dict__
|
from typing import Callable
from homeassistant.components.light import SUPPORT_BRIGHTNESS, LightEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from .dynalitebase import DynaliteBase, async_setup_entry_base
async def async_setup_entry(
hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities: Callable
) -> None:
"""Record the async_add_entities function to add them later when received from Dynalite."""
async_setup_entry_base(
hass, config_entry, async_add_entities, "light", DynaliteLight
)
class DynaliteLight(DynaliteBase, LightEntity):
"""Representation of a Dynalite Channel as a Home Assistant Light."""
@property
def brightness(self) -> int:
"""Return the brightness of this light between 0..255."""
return self._device.brightness
@property
def is_on(self) -> bool:
"""Return true if device is on."""
return self._device.is_on
async def async_turn_on(self, **kwargs) -> None:
"""Turn the light on."""
await self._device.async_turn_on(**kwargs)
async def async_turn_off(self, **kwargs) -> None:
"""Turn the light off."""
await self._device.async_turn_off(**kwargs)
@property
def supported_features(self) -> int:
"""Flag supported features."""
return SUPPORT_BRIGHTNESS
|
import boto3
import os
import subprocess
import sys
import time
from random import shuffle
from src.config import cache_paths, create_cache_directories, NAIP_DATA_DIR, LABELS_DATA_DIR
class NAIPDownloader:
"""Downloads NAIP images from S3, by state/year."""
def __init__(self, number_of_naips, should_randomize, state, year):
"""Download some arbitrary NAIP images from the aws-naip S3 bucket."""
self.number_of_naips = number_of_naips
self.should_randomize = should_randomize
self.state = state
self.year = year
self.resolution = '1m'
self.spectrum = 'rgbir'
self.bucket_url = 's3://aws-naip/'
self.url_base = '{}{}/{}/{}/{}/'.format(self.bucket_url, self.state, self.year,
self.resolution, self.spectrum)
self.make_directory(NAIP_DATA_DIR, full_path=True)
def make_directory(self, new_dir, full_path=False):
"""Make a new directory tree if it doesn't already exist."""
if full_path:
path = ''
for token in new_dir.split('/'):
path += token + '/'
try:
os.mkdir(path)
except:
pass
return path
try:
os.mkdir(new_dir)
except:
pass
return new_dir
def download_naips(self):
"""Download self.number_of_naips of the naips for a given state."""
create_cache_directories()
self.configure_s3cmd()
naip_filenames = self.list_naips()
if self.should_randomize:
shuffle(naip_filenames)
naip_local_paths = self.download_from_s3(naip_filenames)
cache_paths(naip_local_paths)
return naip_local_paths
def configure_s3cmd(self):
"""Configure s3cmd with AWS credentials."""
file_path = os.environ.get("HOME") + '/.s3cfg'
f = open(file_path, 'r')
filedata = f.read()
f.close()
access = os.environ.get("AWS_ACCESS_KEY_ID")
secret = os.environ.get("AWS_SECRET_ACCESS_KEY")
newdata = filedata.replace("AWS_ACCESS_KEY", access)
newdata = newdata.replace("AWS_SECRET_KEY", secret)
f = open(file_path, 'w')
f.write(newdata)
f.close()
def list_naips(self):
"""Make a list of NAIPs based on the init parameters for the class."""
# list the contents of the bucket directory
bash_command = "s3cmd ls --recursive --skip-existing {} --requester-pays".format(
self.url_base)
process = subprocess.Popen(bash_command.split(" "), stdout=subprocess.PIPE)
output = process.communicate()[0]
naip_filenames = []
print(output)
for line in output.split('\n'):
parts = line.split(self.url_base)
print(parts)
# there may be subdirectories for each state, where directories need to be made
if len(parts) == 2:
naip_path = parts[1]
naip_filenames.append(naip_path)
naip_subpath = os.path.join(NAIP_DATA_DIR, naip_path.split('/')[0])
if not os.path.exists(naip_subpath):
os.mkdir(naip_subpath)
labels_subpath = os.path.join(LABELS_DATA_DIR, naip_path.split('/')[0])
if not os.path.exists(labels_subpath):
os.mkdir(labels_subpath)
else:
pass
# skip non filename lines from response
return naip_filenames
def download_from_s3(self, naip_filenames):
"""Download the NAIPs and return a list of the file paths."""
s3_client = boto3.client('s3')
naip_local_paths = []
max_range = self.number_of_naips
if max_range == -1:
max_range = len(naip_filenames)
t0 = time.time()
has_printed = False
for filename in naip_filenames[0:max_range]:
# for filename in ['m_3807708_ne_18_1_20130924.tif']:
full_path = os.path.join(NAIP_DATA_DIR, filename)
if os.path.exists(full_path):
print("NAIP {} already downloaded".format(full_path))
else:
if not has_printed:
print("DOWNLOADING {} NAIPs...".format(max_range))
has_printed = True
url_without_prefix = self.url_base.split(self.bucket_url)[1]
s3_url = '{}{}'.format(url_without_prefix, filename)
s3_client.download_file('aws-naip', s3_url, full_path, {'RequestPayer': 'requester'
})
naip_local_paths.append(full_path)
if time.time() - t0 > 0.01:
print("downloads took {0:.1f}s".format(time.time() - t0))
return naip_local_paths
if __name__ == '__main__':
parameters_message = "parameters are: download"
if len(sys.argv) == 1:
print(parameters_message)
elif sys.argv[1] == 'download':
naiper = NAIPDownloader()
naiper.download_naips()
else:
print(parameters_message)
|
from __future__ import absolute_import, unicode_literals
from ._compat import implements_to_string
class TmuxpException(Exception):
"""Base Exception for Tmuxp Errors."""
class ConfigError(TmuxpException):
"""Error parsing tmuxp configuration dict."""
pass
class EmptyConfigException(ConfigError):
"""Configuration is empty."""
pass
class BeforeLoadScriptNotExists(OSError):
def __init__(self, *args, **kwargs):
super(BeforeLoadScriptNotExists, self).__init__(*args, **kwargs)
self.strerror = "before_script file '%s' doesn't exist." % self.strerror
@implements_to_string
class BeforeLoadScriptError(Exception):
"""Exception replacing :py:class:`subprocess.CalledProcessError` for
:meth:`tmuxp.util.run_before_script`.
"""
def __init__(self, returncode, cmd, output=None):
self.returncode = returncode
self.cmd = cmd
self.output = output
self.message = (
'before_script failed with returncode {returncode}.\n'
'command: {cmd}\n'
'Error output:\n'
'{output}'
).format(returncode=self.returncode, cmd=self.cmd, output=self.output)
def __str__(self):
return self.message
|
import asyncio
from datetime import timedelta
import logging
import aiohttp
import async_timeout
import voluptuous as vol
from homeassistant.components.sensor import ENTITY_ID_FORMAT, PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_ATTRIBUTION,
ATTR_ID,
ATTR_LATITUDE,
ATTR_LOCATION,
ATTR_LONGITUDE,
ATTR_NAME,
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_NAME,
CONF_RADIUS,
LENGTH_FEET,
LENGTH_METERS,
)
from homeassistant.exceptions import PlatformNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity, async_generate_entity_id
from homeassistant.helpers.event import async_track_time_interval
from homeassistant.util import distance, location
_LOGGER = logging.getLogger(__name__)
ATTR_EMPTY_SLOTS = "empty_slots"
ATTR_EXTRA = "extra"
ATTR_FREE_BIKES = "free_bikes"
ATTR_NETWORK = "network"
ATTR_NETWORKS_LIST = "networks"
ATTR_STATIONS_LIST = "stations"
ATTR_TIMESTAMP = "timestamp"
ATTR_UID = "uid"
CONF_NETWORK = "network"
CONF_STATIONS_LIST = "stations"
DEFAULT_ENDPOINT = "https://api.citybik.es/{uri}"
PLATFORM = "citybikes"
MONITORED_NETWORKS = "monitored-networks"
NETWORKS_URI = "v2/networks"
REQUEST_TIMEOUT = 5 # In seconds; argument to asyncio.timeout
SCAN_INTERVAL = timedelta(minutes=5) # Timely, and doesn't suffocate the API
STATIONS_URI = "v2/networks/{uid}?fields=network.stations"
CITYBIKES_ATTRIBUTION = (
"Information provided by the CityBikes Project (https://citybik.es/#about)"
)
CITYBIKES_NETWORKS = "citybikes_networks"
PLATFORM_SCHEMA = vol.All(
cv.has_at_least_one_key(CONF_RADIUS, CONF_STATIONS_LIST),
PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_NAME, default=""): cv.string,
vol.Optional(CONF_NETWORK): cv.string,
vol.Inclusive(CONF_LATITUDE, "coordinates"): cv.latitude,
vol.Inclusive(CONF_LONGITUDE, "coordinates"): cv.longitude,
vol.Optional(CONF_RADIUS, "station_filter"): cv.positive_int,
vol.Optional(CONF_STATIONS_LIST, "station_filter"): vol.All(
cv.ensure_list, vol.Length(min=1), [cv.string]
),
}
),
)
NETWORK_SCHEMA = vol.Schema(
{
vol.Required(ATTR_ID): cv.string,
vol.Required(ATTR_NAME): cv.string,
vol.Required(ATTR_LOCATION): vol.Schema(
{
vol.Required(ATTR_LATITUDE): cv.latitude,
vol.Required(ATTR_LONGITUDE): cv.longitude,
},
extra=vol.REMOVE_EXTRA,
),
},
extra=vol.REMOVE_EXTRA,
)
NETWORKS_RESPONSE_SCHEMA = vol.Schema(
{vol.Required(ATTR_NETWORKS_LIST): [NETWORK_SCHEMA]}
)
STATION_SCHEMA = vol.Schema(
{
vol.Required(ATTR_FREE_BIKES): cv.positive_int,
vol.Required(ATTR_EMPTY_SLOTS): vol.Any(cv.positive_int, None),
vol.Required(ATTR_LATITUDE): cv.latitude,
vol.Required(ATTR_LONGITUDE): cv.longitude,
vol.Required(ATTR_ID): cv.string,
vol.Required(ATTR_NAME): cv.string,
vol.Required(ATTR_TIMESTAMP): cv.string,
vol.Optional(ATTR_EXTRA): vol.Schema(
{vol.Optional(ATTR_UID): cv.string}, extra=vol.REMOVE_EXTRA
),
},
extra=vol.REMOVE_EXTRA,
)
STATIONS_RESPONSE_SCHEMA = vol.Schema(
{
vol.Required(ATTR_NETWORK): vol.Schema(
{vol.Required(ATTR_STATIONS_LIST): [STATION_SCHEMA]}, extra=vol.REMOVE_EXTRA
)
}
)
class CityBikesRequestError(Exception):
"""Error to indicate a CityBikes API request has failed."""
async def async_citybikes_request(hass, uri, schema):
"""Perform a request to CityBikes API endpoint, and parse the response."""
try:
session = async_get_clientsession(hass)
with async_timeout.timeout(REQUEST_TIMEOUT):
req = await session.get(DEFAULT_ENDPOINT.format(uri=uri))
json_response = await req.json()
return schema(json_response)
except (asyncio.TimeoutError, aiohttp.ClientError):
_LOGGER.error("Could not connect to CityBikes API endpoint")
except ValueError:
_LOGGER.error("Received non-JSON data from CityBikes API endpoint")
except vol.Invalid as err:
_LOGGER.error("Received unexpected JSON from CityBikes API endpoint: %s", err)
raise CityBikesRequestError
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the CityBikes platform."""
if PLATFORM not in hass.data:
hass.data[PLATFORM] = {MONITORED_NETWORKS: {}}
latitude = config.get(CONF_LATITUDE, hass.config.latitude)
longitude = config.get(CONF_LONGITUDE, hass.config.longitude)
network_id = config.get(CONF_NETWORK)
stations_list = set(config.get(CONF_STATIONS_LIST, []))
radius = config.get(CONF_RADIUS, 0)
name = config[CONF_NAME]
if not hass.config.units.is_metric:
radius = distance.convert(radius, LENGTH_FEET, LENGTH_METERS)
# Create a single instance of CityBikesNetworks.
networks = hass.data.setdefault(CITYBIKES_NETWORKS, CityBikesNetworks(hass))
if not network_id:
network_id = await networks.get_closest_network_id(latitude, longitude)
if network_id not in hass.data[PLATFORM][MONITORED_NETWORKS]:
network = CityBikesNetwork(hass, network_id)
hass.data[PLATFORM][MONITORED_NETWORKS][network_id] = network
hass.async_create_task(network.async_refresh())
async_track_time_interval(hass, network.async_refresh, SCAN_INTERVAL)
else:
network = hass.data[PLATFORM][MONITORED_NETWORKS][network_id]
await network.ready.wait()
devices = []
for station in network.stations:
dist = location.distance(
latitude, longitude, station[ATTR_LATITUDE], station[ATTR_LONGITUDE]
)
station_id = station[ATTR_ID]
station_uid = str(station.get(ATTR_EXTRA, {}).get(ATTR_UID, ""))
if radius > dist or stations_list.intersection((station_id, station_uid)):
if name:
uid = "_".join([network.network_id, name, station_id])
else:
uid = "_".join([network.network_id, station_id])
entity_id = async_generate_entity_id(ENTITY_ID_FORMAT, uid, hass=hass)
devices.append(CityBikesStation(network, station_id, entity_id))
async_add_entities(devices, True)
class CityBikesNetworks:
"""Represent all CityBikes networks."""
def __init__(self, hass):
"""Initialize the networks instance."""
self.hass = hass
self.networks = None
self.networks_loading = asyncio.Condition()
async def get_closest_network_id(self, latitude, longitude):
"""Return the id of the network closest to provided location."""
try:
await self.networks_loading.acquire()
if self.networks is None:
networks = await async_citybikes_request(
self.hass, NETWORKS_URI, NETWORKS_RESPONSE_SCHEMA
)
self.networks = networks[ATTR_NETWORKS_LIST]
result = None
minimum_dist = None
for network in self.networks:
network_latitude = network[ATTR_LOCATION][ATTR_LATITUDE]
network_longitude = network[ATTR_LOCATION][ATTR_LONGITUDE]
dist = location.distance(
latitude, longitude, network_latitude, network_longitude
)
if minimum_dist is None or dist < minimum_dist:
minimum_dist = dist
result = network[ATTR_ID]
return result
except CityBikesRequestError as err:
raise PlatformNotReady from err
finally:
self.networks_loading.release()
class CityBikesNetwork:
"""Thin wrapper around a CityBikes network object."""
def __init__(self, hass, network_id):
"""Initialize the network object."""
self.hass = hass
self.network_id = network_id
self.stations = []
self.ready = asyncio.Event()
async def async_refresh(self, now=None):
"""Refresh the state of the network."""
try:
network = await async_citybikes_request(
self.hass,
STATIONS_URI.format(uid=self.network_id),
STATIONS_RESPONSE_SCHEMA,
)
self.stations = network[ATTR_NETWORK][ATTR_STATIONS_LIST]
self.ready.set()
except CityBikesRequestError as err:
if now is not None:
self.ready.clear()
else:
raise PlatformNotReady from err
class CityBikesStation(Entity):
"""CityBikes API Sensor."""
def __init__(self, network, station_id, entity_id):
"""Initialize the sensor."""
self._network = network
self._station_id = station_id
self._station_data = {}
self.entity_id = entity_id
@property
def state(self):
"""Return the state of the sensor."""
return self._station_data.get(ATTR_FREE_BIKES)
@property
def name(self):
"""Return the name of the sensor."""
return self._station_data.get(ATTR_NAME)
async def async_update(self):
"""Update station state."""
for station in self._network.stations:
if station[ATTR_ID] == self._station_id:
self._station_data = station
break
@property
def device_state_attributes(self):
"""Return the state attributes."""
if self._station_data:
return {
ATTR_ATTRIBUTION: CITYBIKES_ATTRIBUTION,
ATTR_UID: self._station_data.get(ATTR_EXTRA, {}).get(ATTR_UID),
ATTR_LATITUDE: self._station_data[ATTR_LATITUDE],
ATTR_LONGITUDE: self._station_data[ATTR_LONGITUDE],
ATTR_EMPTY_SLOTS: self._station_data[ATTR_EMPTY_SLOTS],
ATTR_TIMESTAMP: self._station_data[ATTR_TIMESTAMP],
}
return {ATTR_ATTRIBUTION: CITYBIKES_ATTRIBUTION}
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return "bikes"
@property
def icon(self):
"""Return the icon."""
return "mdi:bike"
|
import contextlib
import os
import random
import string
import tempfile
import pytest
from molecule import config
from molecule import logger
from molecule import util
LOG = logger.get_logger(__name__)
pytest_plugins = ['helpers_namespace']
@pytest.helpers.register
def run_command(cmd, env=os.environ, log=True):
if log:
cmd = _rebake_command(cmd, env)
return util.run_command(cmd)
def _rebake_command(cmd, env, out=LOG.out, err=LOG.error):
return cmd.bake(_env=env, _out=out, _err=err)
@pytest.fixture
def random_string(l=5):
return ''.join(random.choice(string.ascii_uppercase) for _ in range(l))
@contextlib.contextmanager
def change_dir_to(dir_name):
cwd = os.getcwd()
os.chdir(dir_name)
yield
os.chdir(cwd)
@pytest.fixture
def temp_dir(tmpdir, random_string, request):
directory = tmpdir.mkdir(random_string)
with change_dir_to(directory.strpath):
yield directory
@pytest.helpers.register
def molecule_project_directory():
return os.getcwd()
@pytest.helpers.register
def molecule_directory():
return config.molecule_directory(molecule_project_directory())
@pytest.helpers.register
def molecule_scenario_directory():
return os.path.join(molecule_directory(), 'default')
@pytest.helpers.register
def molecule_file():
return get_molecule_file(molecule_scenario_directory())
@pytest.helpers.register
def get_molecule_file(path):
return config.molecule_file(path)
@pytest.helpers.register
def molecule_ephemeral_directory():
project_directory = 'test-project'
scenario_name = 'test-instance'
return os.path.join(tempfile.gettempdir(), 'molecule', project_directory,
scenario_name)
def pytest_addoption(parser):
parser.addoption(
'--delegated', action='store_true', help='Run delegated driver tests.')
def pytest_collection_modifyitems(items):
marker = pytest.config.getoption('-m')
is_sharded = False
shard_id = 0
shards_num = 0
if not marker.startswith('shard_'):
return
shard_id, _, shards_num = marker[6:].partition('_of_')
if shards_num:
shard_id = int(shard_id)
shards_num = int(shards_num)
is_sharded = True
else:
raise ValueError('shard_{}_of_{} marker is invalid')
if not is_sharded:
return
if not (0 < shard_id <= shards_num):
raise ValueError(
'shard_id must be greater than 0 and not bigger than shards_num')
for test_counter, item in enumerate(items):
cur_shard_id = test_counter % shards_num + 1
marker = getattr(pytest.mark, 'shard_{}_of_{}'.format(
cur_shard_id,
shards_num,
))
item.add_marker(marker)
del marker
print('Running sharded test group #{} out of {}'.format(
shard_id, shards_num))
@pytest.fixture(autouse=True)
def reset_pytest_vars(monkeypatch):
"""Make PYTEST_* env vars inaccessible to subprocesses."""
for var_name in tuple(os.environ):
if var_name.startswith('PYTEST_'):
monkeypatch.delenv(var_name, raising=False)
|
from .__about__ import * # noqa: F401,F403 isort: skip
import sys
import traceback
import warnings
import pkg_resources
from pygal import maps
from pygal.config import Config
from pygal.graph.bar import Bar
from pygal.graph.box import Box
from pygal.graph.dot import Dot
from pygal.graph.funnel import Funnel
from pygal.graph.gauge import Gauge
from pygal.graph.graph import Graph
from pygal.graph.histogram import Histogram
from pygal.graph.horizontalbar import HorizontalBar
from pygal.graph.horizontalline import HorizontalLine
from pygal.graph.horizontalstackedbar import HorizontalStackedBar
from pygal.graph.horizontalstackedline import HorizontalStackedLine
from pygal.graph.line import Line
from pygal.graph.pie import Pie
from pygal.graph.pyramid import Pyramid, VerticalPyramid
from pygal.graph.radar import Radar
from pygal.graph.solidgauge import SolidGauge
from pygal.graph.stackedbar import StackedBar
from pygal.graph.stackedline import StackedLine
from pygal.graph.time import DateLine, DateTimeLine, TimeDeltaLine, TimeLine
from pygal.graph.treemap import Treemap
from pygal.graph.xy import XY
CHARTS_BY_NAME = dict([
(k, v) for k, v in locals().items()
if isinstance(v, type) and issubclass(v, Graph) and v != Graph
])
from pygal.graph.map import BaseMap
for entry in pkg_resources.iter_entry_points('pygal.maps'):
try:
module = entry.load()
except Exception:
warnings.warn(
'Unable to load %s pygal plugin \n\n%s' %
(entry, traceback.format_exc()), Warning
)
continue
setattr(maps, entry.name, module)
for k, v in module.__dict__.items():
if isinstance(v, type) and issubclass(v, BaseMap) and v != BaseMap:
CHARTS_BY_NAME[entry.name.capitalize() + k + 'Map'] = v
CHARTS_NAMES = list(CHARTS_BY_NAME.keys())
CHARTS = list(CHARTS_BY_NAME.values())
class PluginImportFixer(object):
"""
Allow external map plugins to be imported from pygal.maps package.
It is a ``sys.meta_path`` loader.
"""
def find_module(self, fullname, path=None):
"""
Tell if the module to load can be loaded by
the load_module function, ie: if it is a ``pygal.maps.*``
module.
"""
if fullname.startswith('pygal.maps.') and hasattr(
maps, fullname.split('.')[2]):
return self
return None
def load_module(self, name):
"""
Load the ``pygal.maps.name`` module from the previously
loaded plugin
"""
if name not in sys.modules:
sys.modules[name] = getattr(maps, name.split('.')[2])
return sys.modules[name]
sys.meta_path += [PluginImportFixer()]
|
import concurrent
import contextlib
import datetime
import logging
import random
import time
from pathlib import Path
from types import SimpleNamespace
from typing import TYPE_CHECKING, Callable, List, MutableMapping, Optional, Tuple, Union
from redbot.core import Config
from redbot.core.bot import Red
from redbot.core.commands import Cog
from redbot.core.i18n import Translator
from redbot.core.utils import AsyncIter
from redbot.core.utils.dbtools import APSWConnectionWrapper
from ..audio_logging import debug_exc_log
from ..sql_statements import (
LAVALINK_CREATE_INDEX,
LAVALINK_CREATE_TABLE,
LAVALINK_DELETE_OLD_ENTRIES,
LAVALINK_FETCH_ALL_ENTRIES_GLOBAL,
LAVALINK_QUERY,
LAVALINK_QUERY_ALL,
LAVALINK_QUERY_LAST_FETCHED_RANDOM,
LAVALINK_UPDATE,
LAVALINK_UPSERT,
SPOTIFY_CREATE_INDEX,
SPOTIFY_CREATE_TABLE,
SPOTIFY_DELETE_OLD_ENTRIES,
SPOTIFY_QUERY,
SPOTIFY_QUERY_ALL,
SPOTIFY_QUERY_LAST_FETCHED_RANDOM,
SPOTIFY_UPDATE,
SPOTIFY_UPSERT,
YOUTUBE_CREATE_INDEX,
YOUTUBE_CREATE_TABLE,
YOUTUBE_DELETE_OLD_ENTRIES,
YOUTUBE_QUERY,
YOUTUBE_QUERY_ALL,
YOUTUBE_QUERY_LAST_FETCHED_RANDOM,
YOUTUBE_UPDATE,
YOUTUBE_UPSERT,
PRAGMA_FETCH_user_version,
PRAGMA_SET_journal_mode,
PRAGMA_SET_read_uncommitted,
PRAGMA_SET_temp_store,
PRAGMA_SET_user_version,
)
from .api_utils import (
LavalinkCacheFetchForGlobalResult,
LavalinkCacheFetchResult,
SpotifyCacheFetchResult,
YouTubeCacheFetchResult,
)
if TYPE_CHECKING:
from .. import Audio
log = logging.getLogger("red.cogs.Audio.api.LocalDB")
_ = Translator("Audio", Path(__file__))
_SCHEMA_VERSION = 3
class BaseWrapper:
def __init__(
self, bot: Red, config: Config, conn: APSWConnectionWrapper, cog: Union["Audio", Cog]
):
self.bot = bot
self.config = config
self.database = conn
self.statement = SimpleNamespace()
self.statement.pragma_temp_store = PRAGMA_SET_temp_store
self.statement.pragma_journal_mode = PRAGMA_SET_journal_mode
self.statement.pragma_read_uncommitted = PRAGMA_SET_read_uncommitted
self.statement.set_user_version = PRAGMA_SET_user_version
self.statement.get_user_version = PRAGMA_FETCH_user_version
self.fetch_result: Optional[Callable] = None
self.cog = cog
async def init(self) -> None:
"""Initialize the local cache"""
with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor:
executor.submit(self.database.cursor().execute, self.statement.pragma_temp_store)
executor.submit(self.database.cursor().execute, self.statement.pragma_journal_mode)
executor.submit(self.database.cursor().execute, self.statement.pragma_read_uncommitted)
executor.submit(self.maybe_migrate)
executor.submit(self.database.cursor().execute, LAVALINK_CREATE_TABLE)
executor.submit(self.database.cursor().execute, LAVALINK_CREATE_INDEX)
executor.submit(self.database.cursor().execute, YOUTUBE_CREATE_TABLE)
executor.submit(self.database.cursor().execute, YOUTUBE_CREATE_INDEX)
executor.submit(self.database.cursor().execute, SPOTIFY_CREATE_TABLE)
executor.submit(self.database.cursor().execute, SPOTIFY_CREATE_INDEX)
await self.clean_up_old_entries()
def close(self) -> None:
"""Close the connection with the local cache"""
with contextlib.suppress(Exception):
self.database.close()
async def clean_up_old_entries(self) -> None:
"""Delete entries older than x in the local cache tables"""
max_age = await self.config.cache_age()
maxage = datetime.datetime.now(tz=datetime.timezone.utc) - datetime.timedelta(days=max_age)
maxage_int = int(time.mktime(maxage.timetuple()))
values = {"maxage": maxage_int}
with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor:
executor.submit(self.database.cursor().execute, LAVALINK_DELETE_OLD_ENTRIES, values)
executor.submit(self.database.cursor().execute, YOUTUBE_DELETE_OLD_ENTRIES, values)
executor.submit(self.database.cursor().execute, SPOTIFY_DELETE_OLD_ENTRIES, values)
def maybe_migrate(self) -> None:
"""Maybe migrate Database schema for the local cache"""
current_version = 0
with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor:
for future in concurrent.futures.as_completed(
[executor.submit(self.database.cursor().execute, self.statement.get_user_version)]
):
try:
row_result = future.result()
current_version = row_result.fetchone()
break
except Exception as exc:
debug_exc_log(log, exc, "Failed to completed fetch from database")
if isinstance(current_version, tuple):
current_version = current_version[0]
if current_version == _SCHEMA_VERSION:
return
executor.submit(
self.database.cursor().execute,
self.statement.set_user_version,
{"version": _SCHEMA_VERSION},
)
async def insert(self, values: List[MutableMapping]) -> None:
"""Insert an entry into the local cache"""
try:
with self.database.transaction() as transaction:
transaction.executemany(self.statement.upsert, values)
except Exception as exc:
debug_exc_log(log, exc, "Error during table insert")
async def update(self, values: MutableMapping) -> None:
"""Update an entry of the local cache"""
try:
time_now = int(datetime.datetime.now(datetime.timezone.utc).timestamp())
values["last_fetched"] = time_now
with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor:
executor.submit(self.database.cursor().execute, self.statement.update, values)
except Exception as exc:
debug_exc_log(log, exc, "Error during table update")
async def _fetch_one(
self, values: MutableMapping
) -> Optional[
Union[LavalinkCacheFetchResult, SpotifyCacheFetchResult, YouTubeCacheFetchResult]
]:
"""Get an entry from the local cache"""
max_age = await self.config.cache_age()
maxage = datetime.datetime.now(tz=datetime.timezone.utc) - datetime.timedelta(days=max_age)
maxage_int = int(time.mktime(maxage.timetuple()))
values.update({"maxage": maxage_int})
row = None
with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor:
for future in concurrent.futures.as_completed(
[executor.submit(self.database.cursor().execute, self.statement.get_one, values)]
):
try:
row_result = future.result()
row = row_result.fetchone()
except Exception as exc:
debug_exc_log(log, exc, "Failed to completed fetch from database")
if not row:
return None
if self.fetch_result is None:
return None
return self.fetch_result(*row)
async def _fetch_all(
self, values: MutableMapping
) -> List[Union[LavalinkCacheFetchResult, SpotifyCacheFetchResult, YouTubeCacheFetchResult]]:
"""Get all entries from the local cache"""
output = []
row_result = []
if self.fetch_result is None:
return []
with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor:
for future in concurrent.futures.as_completed(
[executor.submit(self.database.cursor().execute, self.statement.get_all, values)]
):
try:
row_result = future.result()
except Exception as exc:
debug_exc_log(log, exc, "Failed to completed fetch from database")
async for row in AsyncIter(row_result):
output.append(self.fetch_result(*row))
return output
async def _fetch_random(
self, values: MutableMapping
) -> Optional[
Union[LavalinkCacheFetchResult, SpotifyCacheFetchResult, YouTubeCacheFetchResult]
]:
"""Get a random entry from the local cache"""
row = None
with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor:
for future in concurrent.futures.as_completed(
[
executor.submit(
self.database.cursor().execute, self.statement.get_random, values
)
]
):
try:
row_result = future.result()
rows = row_result.fetchall()
if rows:
row = random.choice(rows)
else:
row = None
except Exception as exc:
debug_exc_log(log, exc, "Failed to completed random fetch from database")
if not row:
return None
if self.fetch_result is None:
return None
return self.fetch_result(*row)
class YouTubeTableWrapper(BaseWrapper):
def __init__(
self, bot: Red, config: Config, conn: APSWConnectionWrapper, cog: Union["Audio", Cog]
):
super().__init__(bot, config, conn, cog)
self.statement.upsert = YOUTUBE_UPSERT
self.statement.update = YOUTUBE_UPDATE
self.statement.get_one = YOUTUBE_QUERY
self.statement.get_all = YOUTUBE_QUERY_ALL
self.statement.get_random = YOUTUBE_QUERY_LAST_FETCHED_RANDOM
self.fetch_result = YouTubeCacheFetchResult
async def fetch_one(
self, values: MutableMapping
) -> Tuple[Optional[str], Optional[datetime.datetime]]:
"""Get an entry from the Youtube table"""
result = await self._fetch_one(values)
if not result or not isinstance(result.query, str):
return None, None
return result.query, result.updated_on
async def fetch_all(self, values: MutableMapping) -> List[YouTubeCacheFetchResult]:
"""Get all entries from the Youtube table"""
result = await self._fetch_all(values)
if result and isinstance(result[0], YouTubeCacheFetchResult):
return result
return []
async def fetch_random(self, values: MutableMapping) -> Optional[str]:
"""Get a random entry from the Youtube table"""
result = await self._fetch_random(values)
if not result or not isinstance(result.query, str):
return None
return result.query
class SpotifyTableWrapper(BaseWrapper):
def __init__(
self, bot: Red, config: Config, conn: APSWConnectionWrapper, cog: Union["Audio", Cog]
):
super().__init__(bot, config, conn, cog)
self.statement.upsert = SPOTIFY_UPSERT
self.statement.update = SPOTIFY_UPDATE
self.statement.get_one = SPOTIFY_QUERY
self.statement.get_all = SPOTIFY_QUERY_ALL
self.statement.get_random = SPOTIFY_QUERY_LAST_FETCHED_RANDOM
self.fetch_result = SpotifyCacheFetchResult
async def fetch_one(
self, values: MutableMapping
) -> Tuple[Optional[str], Optional[datetime.datetime]]:
"""Get an entry from the Spotify table"""
result = await self._fetch_one(values)
if not result or not isinstance(result.query, str):
return None, None
return result.query, result.updated_on
async def fetch_all(self, values: MutableMapping) -> List[SpotifyCacheFetchResult]:
"""Get all entries from the Spotify table"""
result = await self._fetch_all(values)
if result and isinstance(result[0], SpotifyCacheFetchResult):
return result
return []
async def fetch_random(self, values: MutableMapping) -> Optional[str]:
"""Get a random entry from the Spotify table"""
result = await self._fetch_random(values)
if not result or not isinstance(result.query, str):
return None
return result.query
class LavalinkTableWrapper(BaseWrapper):
def __init__(
self, bot: Red, config: Config, conn: APSWConnectionWrapper, cog: Union["Audio", Cog]
):
super().__init__(bot, config, conn, cog)
self.statement.upsert = LAVALINK_UPSERT
self.statement.update = LAVALINK_UPDATE
self.statement.get_one = LAVALINK_QUERY
self.statement.get_all = LAVALINK_QUERY_ALL
self.statement.get_random = LAVALINK_QUERY_LAST_FETCHED_RANDOM
self.statement.get_all_global = LAVALINK_FETCH_ALL_ENTRIES_GLOBAL
self.fetch_result = LavalinkCacheFetchResult
self.fetch_for_global: Optional[Callable] = LavalinkCacheFetchForGlobalResult
async def fetch_one(
self, values: MutableMapping
) -> Tuple[Optional[MutableMapping], Optional[datetime.datetime]]:
"""Get an entry from the Lavalink table"""
result = await self._fetch_one(values)
if not result or not isinstance(result.query, dict):
return None, None
return result.query, result.updated_on
async def fetch_all(self, values: MutableMapping) -> List[LavalinkCacheFetchResult]:
"""Get all entries from the Lavalink table"""
result = await self._fetch_all(values)
if result and isinstance(result[0], LavalinkCacheFetchResult):
return result
return []
async def fetch_random(self, values: MutableMapping) -> Optional[MutableMapping]:
"""Get a random entry from the Lavalink table"""
result = await self._fetch_random(values)
if not result or not isinstance(result.query, dict):
return None
return result.query
async def fetch_all_for_global(self) -> List[LavalinkCacheFetchForGlobalResult]:
"""Get all entries from the Lavalink table"""
output: List[LavalinkCacheFetchForGlobalResult] = []
row_result = []
if self.fetch_for_global is None:
return []
with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor:
for future in concurrent.futures.as_completed(
[executor.submit(self.database.cursor().execute, self.statement.get_all_global)]
):
try:
row_result = future.result()
except Exception as exc:
debug_exc_log(log, exc, "Failed to completed fetch from database")
async for row in AsyncIter(row_result):
output.append(self.fetch_for_global(*row))
return output
class LocalCacheWrapper:
"""Wraps all table apis into 1 object representing the local cache"""
def __init__(
self, bot: Red, config: Config, conn: APSWConnectionWrapper, cog: Union["Audio", Cog]
):
self.bot = bot
self.config = config
self.database = conn
self.cog = cog
self.lavalink: LavalinkTableWrapper = LavalinkTableWrapper(bot, config, conn, self.cog)
self.spotify: SpotifyTableWrapper = SpotifyTableWrapper(bot, config, conn, self.cog)
self.youtube: YouTubeTableWrapper = YouTubeTableWrapper(bot, config, conn, self.cog)
|
import typing
import pandas as pd
import numpy as np
import matchzoo
def pack(df: pd.DataFrame) -> 'matchzoo.DataPack':
"""
Pack a :class:`DataPack` using `df`.
The `df` must have `text_left` and `text_right` columns. Optionally,
the `df` can have `id_left`, `id_right` to index `text_left` and
`text_right` respectively. `id_left`, `id_right` will be automatically
generated if not specified.
:param df: Input :class:`pandas.DataFrame` to use.
Examples::
>>> import matchzoo as mz
>>> import pandas as pd
>>> df = pd.DataFrame(data={'text_left': list('AABC'),
... 'text_right': list('abbc'),
... 'label': [0, 1, 1, 0]})
>>> mz.pack(df).frame()
id_left text_left id_right text_right label
0 L-0 A R-0 a 0
1 L-0 A R-1 b 1
2 L-1 B R-1 b 1
3 L-2 C R-2 c 0
"""
if 'text_left' not in df or 'text_right' not in df:
raise ValueError(
'Input data frame must have `text_left` and `text_right`.')
# Gather IDs
if 'id_left' not in df:
id_left = _gen_ids(df, 'text_left', 'L-')
else:
id_left = df['id_left']
if 'id_right' not in df:
id_right = _gen_ids(df, 'text_right', 'R-')
else:
id_right = df['id_right']
# Build Relation
relation = pd.DataFrame(data={'id_left': id_left, 'id_right': id_right})
for col in df:
if col not in ['id_left', 'id_right', 'text_left', 'text_right']:
relation[col] = df[col]
# Build Left and Right
left = _merge(df, id_left, 'text_left', 'id_left')
right = _merge(df, id_right, 'text_right', 'id_right')
return matchzoo.DataPack(relation, left, right)
def _merge(data: pd.DataFrame, ids: typing.Union[list, np.array],
text_label: str, id_label: str):
left = pd.DataFrame(data={
text_label: data[text_label], id_label: ids
})
left.drop_duplicates(id_label, inplace=True)
left.set_index(id_label, inplace=True)
return left
def _gen_ids(data: pd.DataFrame, col: str, prefix: str):
lookup = {}
for text in data[col].unique():
lookup[text] = prefix + str(len(lookup))
return data[col].map(lookup)
|
from typing import Dict
from homeassistant.components.device_tracker import SOURCE_TYPE_GPS
from homeassistant.components.device_tracker.config_entry import TrackerEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.typing import HomeAssistantType
from .account import IcloudAccount, IcloudDevice
from .const import (
DEVICE_LOCATION_HORIZONTAL_ACCURACY,
DEVICE_LOCATION_LATITUDE,
DEVICE_LOCATION_LONGITUDE,
DOMAIN,
)
async def async_setup_scanner(
hass: HomeAssistantType, config, see, discovery_info=None
):
"""Old way of setting up the iCloud tracker."""
async def async_setup_entry(
hass: HomeAssistantType, entry: ConfigEntry, async_add_entities
) -> None:
"""Set up device tracker for iCloud component."""
account = hass.data[DOMAIN][entry.unique_id]
tracked = set()
@callback
def update_account():
"""Update the values of the account."""
add_entities(account, async_add_entities, tracked)
account.listeners.append(
async_dispatcher_connect(hass, account.signal_device_new, update_account)
)
update_account()
@callback
def add_entities(account, async_add_entities, tracked):
"""Add new tracker entities from the account."""
new_tracked = []
for dev_id, device in account.devices.items():
if dev_id in tracked or device.location is None:
continue
new_tracked.append(IcloudTrackerEntity(account, device))
tracked.add(dev_id)
if new_tracked:
async_add_entities(new_tracked, True)
class IcloudTrackerEntity(TrackerEntity):
"""Represent a tracked device."""
def __init__(self, account: IcloudAccount, device: IcloudDevice):
"""Set up the iCloud tracker entity."""
self._account = account
self._device = device
self._unsub_dispatcher = None
@property
def unique_id(self) -> str:
"""Return a unique ID."""
return self._device.unique_id
@property
def name(self) -> str:
"""Return the name of the device."""
return self._device.name
@property
def location_accuracy(self):
"""Return the location accuracy of the device."""
return self._device.location[DEVICE_LOCATION_HORIZONTAL_ACCURACY]
@property
def latitude(self):
"""Return latitude value of the device."""
return self._device.location[DEVICE_LOCATION_LATITUDE]
@property
def longitude(self):
"""Return longitude value of the device."""
return self._device.location[DEVICE_LOCATION_LONGITUDE]
@property
def battery_level(self) -> int:
"""Return the battery level of the device."""
return self._device.battery_level
@property
def source_type(self) -> str:
"""Return the source type, eg gps or router, of the device."""
return SOURCE_TYPE_GPS
@property
def icon(self) -> str:
"""Return the icon."""
return icon_for_icloud_device(self._device)
@property
def device_state_attributes(self) -> Dict[str, any]:
"""Return the device state attributes."""
return self._device.state_attributes
@property
def device_info(self) -> Dict[str, any]:
"""Return the device information."""
return {
"identifiers": {(DOMAIN, self._device.unique_id)},
"name": self._device.name,
"manufacturer": "Apple",
"model": self._device.device_model,
}
async def async_added_to_hass(self):
"""Register state update callback."""
self._unsub_dispatcher = async_dispatcher_connect(
self.hass, self._account.signal_device_update, self.async_write_ha_state
)
async def async_will_remove_from_hass(self):
"""Clean up after entity before removal."""
self._unsub_dispatcher()
def icon_for_icloud_device(icloud_device: IcloudDevice) -> str:
"""Return a battery icon valid identifier."""
switcher = {
"iPad": "mdi:tablet-ipad",
"iPhone": "mdi:cellphone-iphone",
"iPod": "mdi:ipod",
"iMac": "mdi:desktop-mac",
"MacBookPro": "mdi:laptop-mac",
}
return switcher.get(icloud_device.device_class, "mdi:cellphone-link")
|
import unittest
import os
import sys
from kalliope.core.Utils.FileManager import FileManager
class TestFileManager(unittest.TestCase):
"""
Class to test FileManager
"""
def setUp(self):
pass
def create_file_manager(self):
file_manager = FileManager()
self.assertIsInstance(FileManager, file_manager)
def test_create_directory(self):
"""
Test to create a new directory.
"""
# set up
cache_path = "/tmp/kalliope/tests/testDirectory"
if os.path.exists(cache_path):
os.removedirs(cache_path)
# Test FileManager.create_directory
FileManager.create_directory(cache_path)
self.assertTrue(os.path.exists(cache_path),
"Fail creating a directory to the path ")
# Remove the directory
os.removedirs(cache_path)
def test_write_in_file(self):
"""
Test to write in file.
"""
# set up the context
dir_path = "/tmp/kalliope/tests/"
file_name = "test_FileManager_writeInFile"
file_path = os.path.join(dir_path,file_name)
in_file_text = "[Kalliope] Testing the write_in_file method from Utils.FileManager"
if os.path.exists(file_path):
os.remove(file_path)
if not os.path.exists(dir_path):
os.makedirs(dir_path)
# Test FileManager.write_in_file
FileManager.write_in_file(file_path=file_path, content=in_file_text)
with open(file_path, 'r') as content_file:
content = content_file.read()
self.assertEqual(content, in_file_text,
"Fail writing in the file ")
if sys.version_info[0] > 2:
# Test writing of bytes object for python3
FileManager.write_in_file(file_path=file_path, content=bytes(in_file_text, 'utf-8'))
with open(file_path, 'r') as content_file:
content = content_file.read()
self.assertEqual(content, in_file_text,
"Fail writing in the file ")
# Clean up
if os.path.exists(file_path):
os.remove(file_path)
# run into IOError by trying to write something in root
dir_path = "/root/"
file_name = "test_FileManager_writeInFile"
file_path = os.path.join(dir_path, file_name)
self.assertFalse(FileManager.write_in_file(file_path=file_path, content=in_file_text))
def test_file_is_empty(self):
"""
Test that the file is empty
"""
# set up the context
dir_path = "/tmp/kalliope/tests/"
file_name = "test_FileManager_fileIsEmpty"
file_path = os.path.join(dir_path, file_name)
if os.path.exists(file_path):
os.remove(file_path)
if not os.path.exists(dir_path):
os.makedirs(dir_path)
# Test FileManager.file_is_empty
with open(file_path, "wb") as file_open:
file_open.write(b"")
file_open.close()
self.assertTrue(FileManager.file_is_empty(file_path=file_path),
"Fail matching to verify that file is empty ")
# Clean up
if os.path.exists(file_path):
os.remove(file_path)
def test_remove_file(self):
"""
Test to remove a file
"""
# set up the context
dir_path = "/tmp/kalliope/tests/"
file_name = "test_FileManager_fileRemove"
file_path = os.path.join(dir_path, file_name)
if os.path.exists(file_path):
os.remove(file_path)
if not os.path.exists(dir_path):
os.makedirs(dir_path)
# Test to remove the file
# FileManager.remove_file
with open(file_path, "wb") as file_open:
file_open.write(b"")
file_open.close()
FileManager.remove_file(file_path=file_path)
self.assertFalse(os.path.exists(file_path),
"Fail removing the file")
def test_is_path_creatable(self):
"""
Test if the path is creatable for the user
Does the user has the permission to use this path ?
"""
# set up the context
dir_path = "/tmp/kalliope/tests/"
file_name = "test_FileManager_filePathCreatable"
file_path = os.path.join(dir_path, file_name)
if os.path.exists(file_path):
os.remove(file_path)
if not os.path.exists(dir_path):
os.makedirs(dir_path)
# test not allowed : return False
not_allowed_root_path = "/root/"
not_allowed_path = os.path.join(not_allowed_root_path, file_name)
self.assertFalse(FileManager.is_path_creatable(not_allowed_path),
"Fail to assert not accessing this path ")
# test allowed : return True
self.assertTrue(FileManager.is_path_creatable(file_path))
def test_is_path_exists_or_creatable(self):
"""
Test the _is_path_exists_or_creatable
4 scenarii :
- the file exists and is creatable : return True
- the file does not exist but is creatable : return True
- the file exists but is not allowed : return True --> need a review !
- the file does not exist and is not allowed : return False
"""
# set up the context
dir_path = "/tmp/kalliope/tests/"
file_name = "test_FileManager_fileIsPathExistsOrCreatable"
file_path = os.path.join(dir_path, file_name)
if os.path.exists(file_path):
os.remove(file_path)
if not os.path.exists(dir_path):
os.makedirs(dir_path)
# Test the file exist and creatable : return True
with open(file_path, "wb") as file_open:
file_open.write(b"[Kalliope] Test Running the test_is_path_exists_or_creatable method")
file_open.close()
self.assertTrue(FileManager.is_path_exists_or_creatable(file_path),
"Fail to assert the file exist ")
# test the file not exist but creatable : return True
os.remove(file_path)
self.assertTrue(FileManager.is_path_exists_or_creatable(file_path),
"Fail asserting the file does not exist ")
# test the file exist but not creatable : return True
# file_exist_not_allowed = "/root/.ssh/known_hosts"
# self.assertTrue(FileManager.is_path_creatable(file_exist_not_allowed))
# test the file not exist and not allowed : return False
not_allowed_root_path = "/root/"
not_allowed_path = os.path.join(not_allowed_root_path, file_name)
self.assertFalse(FileManager.is_path_creatable(not_allowed_path),
"Fail to assert not accessing this path ")
if __name__ == '__main__':
unittest.main()
|
import logging
from datetime import datetime
from itertools import islice
from .cursors import mod_cursor
import dateutil.parser as iso_date
logger = logging.getLogger(__name__)
# prevent sqlite3 from throwing too many arguments errors (#145)
def gen_slice(list_, length=100):
it = iter(list_)
while True:
slice_ = [_ for _ in islice(it, length)]
if not slice_:
return
yield slice_
def placeholders(args):
return '(%s)' % ','.join('?' * len(args))
class SyncMixin(object):
"""Sync mixin to the :class:`NodeCache <acdcli.cache.db.NodeCache>`"""
def remove_purged(self, purged: list):
"""Removes purged nodes from database
:param purged: list of purged node IDs"""
if not purged:
return
for slice_ in gen_slice(purged):
with mod_cursor(self._conn) as c:
c.execute('DELETE FROM nodes WHERE id IN %s' % placeholders(slice_), slice_)
c.execute('DELETE FROM files WHERE id IN %s' % placeholders(slice_), slice_)
c.execute('DELETE FROM parentage WHERE parent IN %s' % placeholders(slice_), slice_)
c.execute('DELETE FROM parentage WHERE child IN %s' % placeholders(slice_), slice_)
c.execute('DELETE FROM labels WHERE id IN %s' % placeholders(slice_), slice_)
logger.info('Purged %i node(s).' % len(purged))
def insert_nodes(self, nodes: list, partial=True):
"""Inserts mixed list of files and folders into cache."""
files = []
folders = []
for node in nodes:
if node['status'] == 'PENDING':
continue
kind = node['kind']
if kind == 'FILE':
if not 'name' in node or not node['name']:
logger.warning('Skipping file %s because its name is empty.' % node['id'])
continue
files.append(node)
elif kind == 'FOLDER':
if (not 'name' in node or not node['name']) \
and (not 'isRoot' in node or not node['isRoot']):
logger.warning('Skipping non-root folder %s because its name is empty.'
% node['id'])
continue
folders.append(node)
elif kind != 'ASSET':
logger.warning('Cannot insert unknown node type "%s".' % kind)
self.insert_folders(folders)
self.insert_files(files)
self.insert_parentage(files + folders, partial)
def insert_node(self, node: dict):
"""Inserts single file or folder into cache."""
if not node:
return
self.insert_nodes([node])
def insert_folders(self, folders: list):
""" Inserts list of folders into cache. Sets 'update' column to current date.
:param folders: list of raw dict-type folders"""
if not folders:
return
with mod_cursor(self._conn) as c:
for f in folders:
c.execute(
'INSERT OR REPLACE INTO nodes '
'(id, type, name, description, created, modified, updated, status) '
'VALUES (?, "folder", ?, ?, ?, ?, ?, ?)',
[f['id'], f.get('name'), f.get('description'),
iso_date.parse(f['createdDate']), iso_date.parse(f['modifiedDate']),
datetime.utcnow(),
f['status']
]
)
logger.info('Inserted/updated %d folder(s).' % len(folders))
def insert_files(self, files: list):
if not files:
return
with mod_cursor(self._conn) as c:
for f in files:
c.execute('INSERT OR REPLACE INTO nodes '
'(id, type, name, description, created, modified, updated, status)'
'VALUES (?, "file", ?, ?, ?, ?, ?, ?)',
[f['id'], f.get('name'), f.get('description'),
iso_date.parse(f['createdDate']), iso_date.parse(f['modifiedDate']),
datetime.utcnow(),
f['status']
]
)
c.execute('INSERT OR REPLACE INTO files (id, md5, size) VALUES (?, ?, ?)',
[f['id'],
f.get('contentProperties', {}).get('md5',
'd41d8cd98f00b204e9800998ecf8427e'),
f.get('contentProperties', {}).get('size', 0)
]
)
logger.info('Inserted/updated %d file(s).' % len(files))
def insert_parentage(self, nodes: list, partial=True):
if not nodes:
return
if partial:
with mod_cursor(self._conn) as c:
for slice_ in gen_slice(nodes):
c.execute('DELETE FROM parentage WHERE child IN %s' % placeholders(slice_),
[n['id'] for n in slice_])
with mod_cursor(self._conn) as c:
for n in nodes:
for p in n['parents']:
c.execute('INSERT OR IGNORE INTO parentage VALUES (?, ?)', [p, n['id']])
logger.info('Parented %d node(s).' % len(nodes))
|
import pytest
from samsungctl.exceptions import AccessDenied, UnhandledResponse
from samsungtvws.exceptions import ConnectionFailure
from websocket import WebSocketProtocolException
from homeassistant.components.samsungtv.const import (
CONF_MANUFACTURER,
CONF_MODEL,
DOMAIN,
)
from homeassistant.components.ssdp import (
ATTR_SSDP_LOCATION,
ATTR_UPNP_FRIENDLY_NAME,
ATTR_UPNP_MANUFACTURER,
ATTR_UPNP_MODEL_NAME,
ATTR_UPNP_UDN,
)
from homeassistant.const import CONF_HOST, CONF_ID, CONF_METHOD, CONF_NAME, CONF_TOKEN
from tests.async_mock import DEFAULT as DEFAULT_MOCK, Mock, PropertyMock, call, patch
MOCK_USER_DATA = {CONF_HOST: "fake_host", CONF_NAME: "fake_name"}
MOCK_SSDP_DATA = {
ATTR_SSDP_LOCATION: "https://fake_host:12345/test",
ATTR_UPNP_FRIENDLY_NAME: "[TV]fake_name",
ATTR_UPNP_MANUFACTURER: "fake_manufacturer",
ATTR_UPNP_MODEL_NAME: "fake_model",
ATTR_UPNP_UDN: "uuid:fake_uuid",
}
MOCK_SSDP_DATA_NOPREFIX = {
ATTR_SSDP_LOCATION: "http://fake2_host:12345/test",
ATTR_UPNP_FRIENDLY_NAME: "fake2_name",
ATTR_UPNP_MANUFACTURER: "fake2_manufacturer",
ATTR_UPNP_MODEL_NAME: "fake2_model",
ATTR_UPNP_UDN: "fake2_uuid",
}
AUTODETECT_LEGACY = {
"name": "HomeAssistant",
"description": "HomeAssistant",
"id": "ha.component.samsung",
"method": "legacy",
"port": None,
"host": "fake_host",
"timeout": 31,
}
AUTODETECT_WEBSOCKET_PLAIN = {
"host": "fake_host",
"name": "HomeAssistant",
"port": 8001,
"timeout": 31,
"token": None,
}
AUTODETECT_WEBSOCKET_SSL = {
"host": "fake_host",
"name": "HomeAssistant",
"port": 8002,
"timeout": 31,
"token": None,
}
@pytest.fixture(name="remote")
def remote_fixture():
"""Patch the samsungctl Remote."""
with patch(
"homeassistant.components.samsungtv.bridge.Remote"
) as remote_class, patch(
"homeassistant.components.samsungtv.config_flow.socket"
) as socket_class:
remote = Mock()
remote.__enter__ = Mock()
remote.__exit__ = Mock()
remote_class.return_value = remote
socket = Mock()
socket_class.return_value = socket
socket_class.gethostbyname.return_value = "FAKE_IP_ADDRESS"
yield remote
@pytest.fixture(name="remotews")
def remotews_fixture():
"""Patch the samsungtvws SamsungTVWS."""
with patch(
"homeassistant.components.samsungtv.bridge.SamsungTVWS"
) as remotews_class, patch(
"homeassistant.components.samsungtv.config_flow.socket"
) as socket_class:
remotews = Mock()
remotews.__enter__ = Mock()
remotews.__exit__ = Mock()
remotews_class.return_value = remotews
remotews_class().__enter__().token = "FAKE_TOKEN"
socket = Mock()
socket_class.return_value = socket
socket_class.gethostbyname.return_value = "FAKE_IP_ADDRESS"
yield remotews
async def test_user_legacy(hass, remote):
"""Test starting a flow by user."""
# show form
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
# entry was added
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=MOCK_USER_DATA
)
# legacy tv entry created
assert result["type"] == "create_entry"
assert result["title"] == "fake_name"
assert result["data"][CONF_HOST] == "fake_host"
assert result["data"][CONF_NAME] == "fake_name"
assert result["data"][CONF_METHOD] == "legacy"
assert result["data"][CONF_MANUFACTURER] is None
assert result["data"][CONF_MODEL] is None
assert result["data"][CONF_ID] is None
async def test_user_websocket(hass, remotews):
"""Test starting a flow by user."""
with patch(
"homeassistant.components.samsungtv.bridge.Remote", side_effect=OSError("Boom")
):
# show form
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
# entry was added
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=MOCK_USER_DATA
)
# legacy tv entry created
assert result["type"] == "create_entry"
assert result["title"] == "fake_name"
assert result["data"][CONF_HOST] == "fake_host"
assert result["data"][CONF_NAME] == "fake_name"
assert result["data"][CONF_METHOD] == "websocket"
assert result["data"][CONF_MANUFACTURER] is None
assert result["data"][CONF_MODEL] is None
assert result["data"][CONF_ID] is None
async def test_user_legacy_missing_auth(hass):
"""Test starting a flow by user with authentication."""
with patch(
"homeassistant.components.samsungtv.bridge.Remote",
side_effect=AccessDenied("Boom"),
), patch("homeassistant.components.samsungtv.config_flow.socket"):
# legacy device missing authentication
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=MOCK_USER_DATA
)
assert result["type"] == "abort"
assert result["reason"] == "auth_missing"
async def test_user_legacy_not_supported(hass):
"""Test starting a flow by user for not supported device."""
with patch(
"homeassistant.components.samsungtv.bridge.Remote",
side_effect=UnhandledResponse("Boom"),
), patch("homeassistant.components.samsungtv.config_flow.socket"):
# legacy device not supported
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=MOCK_USER_DATA
)
assert result["type"] == "abort"
assert result["reason"] == "not_supported"
async def test_user_websocket_not_supported(hass):
"""Test starting a flow by user for not supported device."""
with patch(
"homeassistant.components.samsungtv.bridge.Remote",
side_effect=OSError("Boom"),
), patch(
"homeassistant.components.samsungtv.bridge.SamsungTVWS",
side_effect=WebSocketProtocolException("Boom"),
), patch(
"homeassistant.components.samsungtv.config_flow.socket"
):
# websocket device not supported
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=MOCK_USER_DATA
)
assert result["type"] == "abort"
assert result["reason"] == "not_supported"
async def test_user_not_successful(hass):
"""Test starting a flow by user but no connection found."""
with patch(
"homeassistant.components.samsungtv.bridge.Remote",
side_effect=OSError("Boom"),
), patch(
"homeassistant.components.samsungtv.bridge.SamsungTVWS",
side_effect=OSError("Boom"),
), patch(
"homeassistant.components.samsungtv.config_flow.socket"
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=MOCK_USER_DATA
)
assert result["type"] == "abort"
assert result["reason"] == "cannot_connect"
async def test_user_not_successful_2(hass):
"""Test starting a flow by user but no connection found."""
with patch(
"homeassistant.components.samsungtv.bridge.Remote",
side_effect=OSError("Boom"),
), patch(
"homeassistant.components.samsungtv.bridge.SamsungTVWS",
side_effect=ConnectionFailure("Boom"),
), patch(
"homeassistant.components.samsungtv.config_flow.socket"
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=MOCK_USER_DATA
)
assert result["type"] == "abort"
assert result["reason"] == "cannot_connect"
async def test_user_already_configured(hass, remote):
"""Test starting a flow by user when already configured."""
# entry was added
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=MOCK_USER_DATA
)
assert result["type"] == "create_entry"
# failed as already configured
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=MOCK_USER_DATA
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
async def test_ssdp(hass, remote):
"""Test starting a flow from discovery."""
# confirm to add the entry
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "ssdp"}, data=MOCK_SSDP_DATA
)
assert result["type"] == "form"
assert result["step_id"] == "confirm"
# entry was added
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input="whatever"
)
assert result["type"] == "create_entry"
assert result["title"] == "fake_model"
assert result["data"][CONF_HOST] == "fake_host"
assert result["data"][CONF_NAME] == "Samsung fake_model"
assert result["data"][CONF_MANUFACTURER] == "fake_manufacturer"
assert result["data"][CONF_MODEL] == "fake_model"
assert result["data"][CONF_ID] == "fake_uuid"
async def test_ssdp_noprefix(hass, remote):
"""Test starting a flow from discovery without prefixes."""
# confirm to add the entry
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "ssdp"}, data=MOCK_SSDP_DATA_NOPREFIX
)
assert result["type"] == "form"
assert result["step_id"] == "confirm"
# entry was added
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input="whatever"
)
assert result["type"] == "create_entry"
assert result["title"] == "fake2_model"
assert result["data"][CONF_HOST] == "fake2_host"
assert result["data"][CONF_NAME] == "Samsung fake2_model"
assert result["data"][CONF_MANUFACTURER] == "fake2_manufacturer"
assert result["data"][CONF_MODEL] == "fake2_model"
assert result["data"][CONF_ID] == "fake2_uuid"
async def test_ssdp_legacy_missing_auth(hass):
"""Test starting a flow from discovery with authentication."""
with patch(
"homeassistant.components.samsungtv.bridge.Remote",
side_effect=AccessDenied("Boom"),
), patch("homeassistant.components.samsungtv.config_flow.socket"):
# confirm to add the entry
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "ssdp"}, data=MOCK_SSDP_DATA
)
assert result["type"] == "form"
assert result["step_id"] == "confirm"
# missing authentication
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input="whatever"
)
assert result["type"] == "abort"
assert result["reason"] == "auth_missing"
async def test_ssdp_legacy_not_supported(hass):
"""Test starting a flow from discovery for not supported device."""
with patch(
"homeassistant.components.samsungtv.bridge.Remote",
side_effect=UnhandledResponse("Boom"),
), patch("homeassistant.components.samsungtv.config_flow.socket"):
# confirm to add the entry
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "ssdp"}, data=MOCK_SSDP_DATA
)
assert result["type"] == "form"
assert result["step_id"] == "confirm"
# device not supported
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input="whatever"
)
assert result["type"] == "abort"
assert result["reason"] == "not_supported"
async def test_ssdp_websocket_not_supported(hass):
"""Test starting a flow from discovery for not supported device."""
with patch(
"homeassistant.components.samsungtv.bridge.Remote",
side_effect=OSError("Boom"),
), patch(
"homeassistant.components.samsungtv.bridge.SamsungTVWS",
side_effect=WebSocketProtocolException("Boom"),
), patch(
"homeassistant.components.samsungtv.config_flow.socket"
):
# confirm to add the entry
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "ssdp"}, data=MOCK_SSDP_DATA
)
assert result["type"] == "form"
assert result["step_id"] == "confirm"
# device not supported
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input="whatever"
)
assert result["type"] == "abort"
assert result["reason"] == "not_supported"
async def test_ssdp_not_successful(hass):
"""Test starting a flow from discovery but no device found."""
with patch(
"homeassistant.components.samsungtv.bridge.Remote",
side_effect=OSError("Boom"),
), patch(
"homeassistant.components.samsungtv.bridge.SamsungTVWS",
side_effect=OSError("Boom"),
), patch(
"homeassistant.components.samsungtv.config_flow.socket"
):
# confirm to add the entry
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "ssdp"}, data=MOCK_SSDP_DATA
)
assert result["type"] == "form"
assert result["step_id"] == "confirm"
# device not found
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input="whatever"
)
assert result["type"] == "abort"
assert result["reason"] == "cannot_connect"
async def test_ssdp_not_successful_2(hass):
"""Test starting a flow from discovery but no device found."""
with patch(
"homeassistant.components.samsungtv.bridge.Remote",
side_effect=OSError("Boom"),
), patch(
"homeassistant.components.samsungtv.bridge.SamsungTVWS",
side_effect=ConnectionFailure("Boom"),
), patch(
"homeassistant.components.samsungtv.config_flow.socket"
):
# confirm to add the entry
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "ssdp"}, data=MOCK_SSDP_DATA
)
assert result["type"] == "form"
assert result["step_id"] == "confirm"
# device not found
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input="whatever"
)
assert result["type"] == "abort"
assert result["reason"] == "cannot_connect"
async def test_ssdp_already_in_progress(hass, remote):
"""Test starting a flow from discovery twice."""
# confirm to add the entry
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "ssdp"}, data=MOCK_SSDP_DATA
)
assert result["type"] == "form"
assert result["step_id"] == "confirm"
# failed as already in progress
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "ssdp"}, data=MOCK_SSDP_DATA
)
assert result["type"] == "abort"
assert result["reason"] == "already_in_progress"
async def test_ssdp_already_configured(hass, remote):
"""Test starting a flow from discovery when already configured."""
# entry was added
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=MOCK_USER_DATA
)
assert result["type"] == "create_entry"
entry = result["result"]
assert entry.data[CONF_MANUFACTURER] is None
assert entry.data[CONF_MODEL] is None
assert entry.data[CONF_ID] is None
# failed as already configured
result2 = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "ssdp"}, data=MOCK_SSDP_DATA
)
assert result2["type"] == "abort"
assert result2["reason"] == "already_configured"
# check updated device info
assert entry.data[CONF_MANUFACTURER] == "fake_manufacturer"
assert entry.data[CONF_MODEL] == "fake_model"
assert entry.data[CONF_ID] == "fake_uuid"
async def test_autodetect_websocket(hass, remote, remotews):
"""Test for send key with autodetection of protocol."""
with patch(
"homeassistant.components.samsungtv.bridge.Remote",
side_effect=OSError("Boom"),
), patch("homeassistant.components.samsungtv.bridge.SamsungTVWS") as remotews:
enter = Mock()
type(enter).token = PropertyMock(return_value="123456789")
remote = Mock()
remote.__enter__ = Mock(return_value=enter)
remote.__exit__ = Mock(return_value=False)
remotews.return_value = remote
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=MOCK_USER_DATA
)
assert result["type"] == "create_entry"
assert result["data"][CONF_METHOD] == "websocket"
assert result["data"][CONF_TOKEN] == "123456789"
assert remotews.call_count == 1
assert remotews.call_args_list == [call(**AUTODETECT_WEBSOCKET_PLAIN)]
async def test_autodetect_websocket_ssl(hass, remote, remotews):
"""Test for send key with autodetection of protocol."""
with patch(
"homeassistant.components.samsungtv.bridge.Remote",
side_effect=OSError("Boom"),
), patch(
"homeassistant.components.samsungtv.bridge.SamsungTVWS",
side_effect=[WebSocketProtocolException("Boom"), DEFAULT_MOCK],
) as remotews:
enter = Mock()
type(enter).token = PropertyMock(return_value="123456789")
remote = Mock()
remote.__enter__ = Mock(return_value=enter)
remote.__exit__ = Mock(return_value=False)
remotews.return_value = remote
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=MOCK_USER_DATA
)
assert result["type"] == "create_entry"
assert result["data"][CONF_METHOD] == "websocket"
assert result["data"][CONF_TOKEN] == "123456789"
assert remotews.call_count == 2
assert remotews.call_args_list == [
call(**AUTODETECT_WEBSOCKET_PLAIN),
call(**AUTODETECT_WEBSOCKET_SSL),
]
async def test_autodetect_auth_missing(hass, remote):
"""Test for send key with autodetection of protocol."""
with patch(
"homeassistant.components.samsungtv.bridge.Remote",
side_effect=[AccessDenied("Boom")],
) as remote:
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=MOCK_USER_DATA
)
assert result["type"] == "abort"
assert result["reason"] == "auth_missing"
assert remote.call_count == 1
assert remote.call_args_list == [call(AUTODETECT_LEGACY)]
async def test_autodetect_not_supported(hass, remote):
"""Test for send key with autodetection of protocol."""
with patch(
"homeassistant.components.samsungtv.bridge.Remote",
side_effect=[UnhandledResponse("Boom")],
) as remote:
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=MOCK_USER_DATA
)
assert result["type"] == "abort"
assert result["reason"] == "not_supported"
assert remote.call_count == 1
assert remote.call_args_list == [call(AUTODETECT_LEGACY)]
async def test_autodetect_legacy(hass, remote):
"""Test for send key with autodetection of protocol."""
with patch("homeassistant.components.samsungtv.bridge.Remote") as remote:
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=MOCK_USER_DATA
)
assert result["type"] == "create_entry"
assert result["data"][CONF_METHOD] == "legacy"
assert remote.call_count == 1
assert remote.call_args_list == [call(AUTODETECT_LEGACY)]
async def test_autodetect_none(hass, remote, remotews):
"""Test for send key with autodetection of protocol."""
with patch(
"homeassistant.components.samsungtv.bridge.Remote",
side_effect=OSError("Boom"),
) as remote, patch(
"homeassistant.components.samsungtv.bridge.SamsungTVWS",
side_effect=OSError("Boom"),
) as remotews:
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=MOCK_USER_DATA
)
assert result["type"] == "abort"
assert result["reason"] == "cannot_connect"
assert remote.call_count == 1
assert remote.call_args_list == [
call(AUTODETECT_LEGACY),
]
assert remotews.call_count == 2
assert remotews.call_args_list == [
call(**AUTODETECT_WEBSOCKET_PLAIN),
call(**AUTODETECT_WEBSOCKET_SSL),
]
|
import pytest
from tests.async_mock import patch
@pytest.fixture
def patch_load_json():
"""Prevent load JSON being used."""
with patch("homeassistant.components.ps4.load_json", return_value={}) as mock_load:
yield mock_load
@pytest.fixture
def patch_save_json():
"""Prevent save JSON being used."""
with patch("homeassistant.components.ps4.save_json") as mock_save:
yield mock_save
@pytest.fixture(autouse=True)
def patch_io(patch_load_json, patch_save_json):
"""Prevent PS4 doing I/O."""
|
from homeassistant.const import ATTR_ENTITY_ID, ATTR_SERVICE
from homeassistant.core import callback
from .const import ATTR_DISPLAY_NAME, ATTR_VALUE, DOMAIN, EVENT_HOMEKIT_CHANGED
@callback
def async_describe_events(hass, async_describe_event):
"""Describe logbook events."""
@callback
def async_describe_logbook_event(event):
"""Describe a logbook event."""
data = event.data
entity_id = data.get(ATTR_ENTITY_ID)
value = data.get(ATTR_VALUE)
value_msg = f" to {value}" if value else ""
message = f"send command {data[ATTR_SERVICE]}{value_msg} for {data[ATTR_DISPLAY_NAME]}"
return {
"name": "HomeKit",
"message": message,
"entity_id": entity_id,
}
async_describe_event(DOMAIN, EVENT_HOMEKIT_CHANGED, async_describe_logbook_event)
|
from abc import ABCMeta, abstractmethod
from qstrader.signals.buffer import AssetPriceBuffers
class Signal(object):
"""
Abstract class to provide historical price range-based
rolling signals utilising deque-based 'buffers'.
Parameters
----------
start_dt : `pd.Timestamp`
The starting datetime (UTC) of the signal.
universe : `Universe`
The universe of assets to calculate the signals for.
lookbacks : `list[int]`
The number of lookback periods to store prices for.
"""
__metaclass__ = ABCMeta
def __init__(self, start_dt, universe, lookbacks):
self.start_dt = start_dt
self.universe = universe
self.lookbacks = lookbacks
self.assets = self.universe.get_assets(start_dt)
self.buffers = self._create_asset_price_buffers()
def _create_asset_price_buffers(self):
"""
Create an AssetPriceBuffers instance.
Returns
-------
`AssetPriceBuffers`
Stores the asset price buffers for the signal.
"""
return AssetPriceBuffers(
self.assets, lookbacks=self.lookbacks
)
def append(self, asset, price):
"""
Append a new price onto the price buffer for
the specific asset provided.
Parameters
----------
asset : `str`
The asset symbol name.
price : `float`
The new price of the asset.
"""
self.buffers.append(asset, price)
def update_assets(self, dt):
"""
Ensure that any new additions to the universe also receive
a price buffer at the point at which they enter.
Parameters
----------
dt : `pd.Timestamp`
The update timestamp for the signal.
"""
universe_assets = self.universe.get_assets(dt)
# TODO: Assume universe never decreases for now
extra_assets = list(set(universe_assets) - set((self.assets)))
for extra_asset in extra_assets:
self.assets.append(extra_asset)
@abstractmethod
def __call__(self, asset, lookback):
raise NotImplementedError(
"Should implement __call__()"
)
|
from homeassistant.core import callback
from homeassistant.helpers.storage import Store
from homeassistant.loader import bind_hass
from . import views
from .const import DOMAIN, STEP_CORE_CONFIG, STEP_INTEGRATION, STEP_USER, STEPS
STORAGE_KEY = DOMAIN
STORAGE_VERSION = 3
class OnboadingStorage(Store):
"""Store onboarding data."""
async def _async_migrate_func(self, old_version, old_data):
"""Migrate to the new version."""
# From version 1 -> 2, we automatically mark the integration step done
if old_version < 2:
old_data["done"].append(STEP_INTEGRATION)
if old_version < 3:
old_data["done"].append(STEP_CORE_CONFIG)
return old_data
@bind_hass
@callback
def async_is_onboarded(hass):
"""Return if Home Assistant has been onboarded."""
data = hass.data.get(DOMAIN)
return data is None or data is True
@bind_hass
@callback
def async_is_user_onboarded(hass):
"""Return if a user has been created as part of onboarding."""
return async_is_onboarded(hass) or STEP_USER in hass.data[DOMAIN]["done"]
async def async_setup(hass, config):
"""Set up the onboarding component."""
store = OnboadingStorage(hass, STORAGE_VERSION, STORAGE_KEY, private=True)
data = await store.async_load()
if data is None:
data = {"done": []}
if STEP_USER not in data["done"]:
# Users can already have created an owner account via the command line
# If so, mark the user step as done.
has_owner = False
for user in await hass.auth.async_get_users():
if user.is_owner:
has_owner = True
break
if has_owner:
data["done"].append(STEP_USER)
await store.async_save(data)
if set(data["done"]) == set(STEPS):
return True
hass.data[DOMAIN] = data
await views.async_setup(hass, data, store)
return True
|
from __future__ import absolute_import
import contextlib
import sys
import unittest
import warnings
import six
# pylint: disable=invalid-name
if six.PY2:
_subtest_msg_sentinel = object()
class _ShouldStop(Exception):
"""The test should stop."""
class _Outcome(object):
def __init__(self, result=None):
self.expecting_failure = False
self.result = result
self.result_supports_subtests = hasattr(result, 'addSubTest')
self.success = True
self.skipped = []
self.expectedFailure = None
self.errors = []
self.errors_setup_and_teardown = []
@contextlib.contextmanager
def testPartExecutor(self, test_case, is_setup_or_teardown=False):
old_success = self.success
self.success = True
try:
yield
except KeyboardInterrupt:
raise
except unittest.SkipTest as e:
self.success = False
self.skipped.append((test_case, str(e)))
except _ShouldStop:
pass
except unittest.case._ExpectedFailure as e:
self.success = False
self.expecting_failure = True
self.expectedFailure = e.exc_info
except unittest.case._UnexpectedSuccess:
self.expecting_failure = True
# We need to catch everything here, including SystemExit.
# KeyboardInterrupt was passed through above.
except: # pylint: disable=bare-except
self.success = False
if is_setup_or_teardown:
self.errors_setup_and_teardown.append((test_case, sys.exc_info()))
else:
self.errors.append((test_case, sys.exc_info()))
else:
if self.result_supports_subtests and self.success:
self.errors.append((test_case, None))
finally:
self.success = self.success and old_success
class TestCase(unittest.TestCase):
if six.PY2:
def __init__(self, methodName='runTest'):
super(TestCase, self).__init__(methodName)
self._subtest = None
self._outcome = None
def _addSkip(self, result, reason, test_case=None):
addSkip = getattr(result, 'addSkip', None)
if addSkip is not None:
if test_case:
addSkip(test_case, reason)
else:
addSkip(self, reason)
else:
warnings.warn('TestResult has no addSkip method, skips not reported',
RuntimeWarning, 2)
if test_case:
result.addSuccess(test_case)
else:
result.addSuccess(self)
def _feedErrorsToResult(self, result, errors, setup_or_teardown=False):
if setup_or_teardown:
# Both failures and errors happen in setup or teardown phase are
# regarded as errors in Python 2.
for test, exc_info in errors:
result.addError(test, exc_info)
else:
for test, exc_info in errors:
if isinstance(test, _SubTest):
result.addSubTest(test.test_case, test, exc_info)
elif exc_info is not None:
if issubclass(exc_info[0], self.failureException):
result.addFailure(test, exc_info)
else:
result.addError(test, exc_info)
def _addExpectedFailure(self, result, exc_info):
try:
addExpectedFailure = result.addExpectedFailure
except AttributeError:
warnings.warn(('TestResult has no addExpectedFailure method, '
'reporting as passes'), RuntimeWarning)
result.addSuccess(self)
else:
addExpectedFailure(self, exc_info)
def _addUnexpectedSuccess(self, result):
try:
addUnexpectedSuccess = result.addUnexpectedSuccess
except AttributeError:
warnings.warn(('TestResult has no addUnexpectedSuccess method, '
'reporting as failure'), RuntimeWarning)
# We need to pass an actual exception and traceback to addFailure,
# otherwise the legacy result can choke.
try:
raise unittest.case._UnexpectedSuccess
except unittest.case._UnexpectedSuccess:
result.addFailure(self, sys.exc_info())
else:
addUnexpectedSuccess(self)
def run(self, result=None):
orig_result = result
if result is None:
result = self.defaultTestResult()
startTestRun = getattr(result, 'startTestRun', None)
if startTestRun is not None:
startTestRun()
self._resultForDoCleanups = result
result.startTest(self)
testMethod = getattr(self, self._testMethodName)
if (getattr(self.__class__, '__unittest_skip__', False) or
getattr(testMethod, '__unittest_skip__', False)):
# If the class or method was skipped.
try:
skip_why = (getattr(self.__class__, '__unittest_skip_why__', '')
or getattr(testMethod, '__unittest_skip_why__', ''))
self._addSkip(result, skip_why, self)
finally:
result.stopTest(self)
return
outcome = _Outcome(result)
expecting_failure = False
try:
self._outcome = outcome
with outcome.testPartExecutor(self, is_setup_or_teardown=True):
self.setUp()
if outcome.success:
with outcome.testPartExecutor(self):
testMethod()
expecting_failure = outcome.expecting_failure
outcome.expecting_failure = False
# The logic here is a little different from the implementation in
# Python3.
# In Python3, if a testcase is expecting failure, even if it
# fails, outcome.success is True. This implementation does not work
# for Python2. In Python2, if a subtest fails, it does not know
# whether its parent test is expecting failure, and will set
# outcome.success to False. Now the logic is that no matter whether a
# testcase is expecting failure, if it fails, outcome.success is False
if expecting_failure:
if outcome.success:
self._addUnexpectedSuccess(result)
else:
self._addExpectedFailure(result, outcome.expectedFailure)
with outcome.testPartExecutor(self, is_setup_or_teardown=True):
self.tearDown()
for test, reason in outcome.skipped:
self._addSkip(result, reason, test)
self._feedErrorsToResult(result, outcome.errors_setup_and_teardown,
setup_or_teardown=True)
self._feedErrorsToResult(result, outcome.errors)
self.doCleanups()
if not expecting_failure and outcome.success:
result.addSuccess(self)
return result
finally:
result.stopTest(self)
if orig_result is None:
stopTestRun = getattr(result, 'stopTestRun', None)
if stopTestRun is not None:
stopTestRun() # pylint: disable=not-callable
# explicitly break reference cycles:
# outcome.errors -> frame -> outcome -> outcome.errors
# outcome.expectedFailure -> frame -> outcome -> outcome.expectedFailure
outcome.errors = []
outcome.expectedFailure = None
# clear the outcome, no more needed
self._outcome = None
@contextlib.contextmanager
def subTest(self, msg=_subtest_msg_sentinel, **params):
"""Return a context manager that will run the enclosed subtest."""
if not self._outcome.result_supports_subtests:
yield
return
parent = self._subtest
# use a list to simulate the behavior of a ChainMap
if parent is None:
params_map = [params]
else:
params_map = list(parent.params)
params_map.append(params)
self._subtest = _SubTest(self, msg, params_map)
try:
with self._outcome.testPartExecutor(self._subtest):
yield
if not self._outcome.success:
result = self._outcome.result
if result is not None and result.failfast:
raise _ShouldStop
elif self._outcome.expectedFailure:
# If the test is expecting a failure, we really want to
# stop now and register the expected failure.
raise _ShouldStop
finally:
self._subtest = parent
if six.PY2:
class _SubTest(TestCase):
def __init__(self, test_case, message, params):
super(_SubTest, self).__init__()
self._message = message
self.test_case = test_case
self.params = params
self.failureException = test_case.failureException
def runTest(self):
raise NotImplementedError('subtests cannot be run directly')
def _subDescription(self):
parts = []
if self._message is not _subtest_msg_sentinel:
parts.append('[{}]'.format(self._message))
if self.params:
params_merged = {}
for dictionary in self.params:
params_merged.update(dictionary)
params_desc = ', '.join(
'{}={!r}'.format(k, v)
for (k, v) in sorted(params_merged.items()))
parts.append('({})'.format(params_desc))
return ' '.join(parts) or '(<subtest>)'
def id(self):
return '{} {}'.format(self.test_case.id(), self._subDescription())
def shortDescription(self):
"""Returns a one-line description of the subtest."""
return self.test_case.shortDescription()
def __str__(self):
return '{} {}'.format(self.test_case, self._subDescription())
|
import unittest
import numpy as np
import numpy.testing as npt
from pgmpy.utils import optimize, pinverse
from pgmpy.global_vars import device, dtype
try: # pragma: no cover
import torch
except ImportError: # pragma: no cover
torch = None
class TestOptimize(unittest.TestCase):
"""
self = TestOptimize()
self.setUp()
"""
def setUp(self):
self.A = torch.randn(5, 5, device=device, dtype=dtype, requires_grad=True)
self.B = torch.ones(5, 5, device=device, dtype=dtype, requires_grad=False)
def loss_fn(self, params, loss_params):
A = params["A"]
B = loss_params["B"]
return (A - B).pow(2).sum()
@unittest.skipIf(torch is None, "torch is not installed")
def test_optimize(self):
# TODO: Add tests for other optimizers
for opt in ["adadelta", "adam", "adamax", "asgd", "lbfgs", "rmsprop", "rprop"]:
A = torch.randn(5, 5, device=device, dtype=dtype, requires_grad=True)
B = torch.ones(5, 5, device=device, dtype=dtype, requires_grad=False)
params = optimize(
self.loss_fn,
params={"A": A},
loss_args={"B": B},
opt=opt,
max_iter=int(1e6),
)
npt.assert_almost_equal(
B.data.cpu().numpy(),
params["A"].detach().cpu().numpy().round(),
decimal=1,
)
class Testpinverse(unittest.TestCase):
@unittest.skipIf(torch is None, "torch is not installed")
def test_pinverse(self):
mat = np.random.randn(5, 5)
np_inv = np.linalg.pinv(mat)
inv = pinverse(torch.tensor(mat))
npt.assert_array_almost_equal(np_inv, inv.numpy())
@unittest.skipIf(torch is None, "torch is not installed")
def test_pinverse_zeros(self):
mat = np.zeros((5, 5))
np_inv = np.linalg.pinv(mat)
inv = pinverse(torch.tensor(mat))
npt.assert_array_almost_equal(np_inv, inv)
|
from homeassistant.components.alarm_control_panel import (
DOMAIN as ALARM_CONTROL_PANEL_DOMAIN,
)
from homeassistant.components.homematicip_cloud import DOMAIN as HMIPC_DOMAIN
from homeassistant.const import (
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_DISARMED,
STATE_ALARM_TRIGGERED,
)
from homeassistant.setup import async_setup_component
from .helper import get_and_check_entity_basics
async def _async_manipulate_security_zones(
hass, home, internal_active=False, external_active=False, alarm_triggered=False
):
"""Set new values on hmip security zones."""
json = home._rawJSONData # pylint: disable=protected-access
json["functionalHomes"]["SECURITY_AND_ALARM"]["alarmActive"] = alarm_triggered
external_zone_id = json["functionalHomes"]["SECURITY_AND_ALARM"]["securityZones"][
"EXTERNAL"
]
internal_zone_id = json["functionalHomes"]["SECURITY_AND_ALARM"]["securityZones"][
"INTERNAL"
]
external_zone = home.search_group_by_id(external_zone_id)
external_zone.active = external_active
internal_zone = home.search_group_by_id(internal_zone_id)
internal_zone.active = internal_active
home.update_home_only(json)
home.fire_update_event(json)
await hass.async_block_till_done()
async def test_manually_configured_platform(hass):
"""Test that we do not set up an access point."""
assert await async_setup_component(
hass,
ALARM_CONTROL_PANEL_DOMAIN,
{ALARM_CONTROL_PANEL_DOMAIN: {"platform": HMIPC_DOMAIN}},
)
assert not hass.data.get(HMIPC_DOMAIN)
async def test_hmip_alarm_control_panel(hass, default_mock_hap_factory):
"""Test HomematicipAlarmControlPanel."""
entity_id = "alarm_control_panel.hmip_alarm_control_panel"
entity_name = "HmIP Alarm Control Panel"
device_model = None
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_groups=["EXTERNAL", "INTERNAL"]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == "disarmed"
assert not hmip_device
home = mock_hap.home
await hass.services.async_call(
"alarm_control_panel", "alarm_arm_away", {"entity_id": entity_id}, blocking=True
)
assert home.mock_calls[-1][0] == "set_security_zones_activation"
assert home.mock_calls[-1][1] == (True, True)
await _async_manipulate_security_zones(
hass, home, internal_active=True, external_active=True
)
assert hass.states.get(entity_id).state is STATE_ALARM_ARMED_AWAY
await hass.services.async_call(
"alarm_control_panel", "alarm_arm_home", {"entity_id": entity_id}, blocking=True
)
assert home.mock_calls[-1][0] == "set_security_zones_activation"
assert home.mock_calls[-1][1] == (False, True)
await _async_manipulate_security_zones(hass, home, external_active=True)
assert hass.states.get(entity_id).state is STATE_ALARM_ARMED_HOME
await hass.services.async_call(
"alarm_control_panel", "alarm_disarm", {"entity_id": entity_id}, blocking=True
)
assert home.mock_calls[-1][0] == "set_security_zones_activation"
assert home.mock_calls[-1][1] == (False, False)
await _async_manipulate_security_zones(hass, home)
assert hass.states.get(entity_id).state is STATE_ALARM_DISARMED
await hass.services.async_call(
"alarm_control_panel", "alarm_arm_away", {"entity_id": entity_id}, blocking=True
)
assert home.mock_calls[-1][0] == "set_security_zones_activation"
assert home.mock_calls[-1][1] == (True, True)
await _async_manipulate_security_zones(
hass, home, internal_active=True, external_active=True, alarm_triggered=True
)
assert hass.states.get(entity_id).state is STATE_ALARM_TRIGGERED
await hass.services.async_call(
"alarm_control_panel", "alarm_arm_home", {"entity_id": entity_id}, blocking=True
)
assert home.mock_calls[-1][0] == "set_security_zones_activation"
assert home.mock_calls[-1][1] == (False, True)
await _async_manipulate_security_zones(
hass, home, external_active=True, alarm_triggered=True
)
assert hass.states.get(entity_id).state is STATE_ALARM_TRIGGERED
|
from __future__ import division
import numpy as np
from chainer.backends import cuda
from chainercv.utils.bbox._nms_gpu_post import _nms_gpu_post
if cuda.available:
import cupy as cp
def non_maximum_suppression(bbox, thresh, score=None,
limit=None):
"""Suppress bounding boxes according to their IoUs.
This method checks each bounding box sequentially and selects the bounding
box if the Intersection over Unions (IoUs) between the bounding box and the
previously selected bounding boxes is less than :obj:`thresh`. This method
is mainly used as postprocessing of object detection.
The bounding boxes are selected from ones with higher scores.
If :obj:`score` is not provided as an argument, the bounding box
is ordered by its index in ascending order.
The bounding boxes are expected to be packed into a two dimensional
tensor of shape :math:`(R, 4)`, where :math:`R` is the number of
bounding boxes in the image. The second axis represents attributes of
the bounding box. They are :math:`(y_{min}, x_{min}, y_{max}, x_{max})`,
where the four attributes are coordinates of the top left and the
bottom right vertices.
:obj:`score` is a float array of shape :math:`(R,)`. Each score indicates
confidence of prediction.
This function accepts both :obj:`numpy.ndarray` and :obj:`cupy.ndarray` as
an input. Please note that both :obj:`bbox` and :obj:`score` need to be
the same type.
The type of the output is the same as the input.
Args:
bbox (array): Bounding boxes to be transformed. The shape is
:math:`(R, 4)`. :math:`R` is the number of bounding boxes.
thresh (float): Threshold of IoUs.
score (array): An array of confidences whose shape is :math:`(R,)`.
limit (int): The upper bound of the number of the output bounding
boxes. If it is not specified, this method selects as many
bounding boxes as possible.
Returns:
array:
An array with indices of bounding boxes that are selected. \
They are sorted by the scores of bounding boxes in descending \
order. \
The shape of this array is :math:`(K,)` and its dtype is\
:obj:`numpy.int32`. Note that :math:`K \\leq R`.
"""
xp = cuda.get_array_module(bbox)
if xp == np:
return _non_maximum_suppression_cpu(bbox, thresh, score, limit)
else:
return _non_maximum_suppression_gpu(bbox, thresh, score, limit)
def _non_maximum_suppression_cpu(bbox, thresh, score=None, limit=None):
if len(bbox) == 0:
return np.zeros((0,), dtype=np.int32)
if score is not None:
order = score.argsort()[::-1]
bbox = bbox[order]
bbox_area = np.prod(bbox[:, 2:] - bbox[:, :2], axis=1)
selec = np.zeros(bbox.shape[0], dtype=bool)
for i, b in enumerate(bbox):
tl = np.maximum(b[:2], bbox[selec, :2])
br = np.minimum(b[2:], bbox[selec, 2:])
area = np.prod(br - tl, axis=1) * (tl < br).all(axis=1)
iou = area / (bbox_area[i] + bbox_area[selec] - area)
if (iou >= thresh).any():
continue
selec[i] = True
if limit is not None and np.count_nonzero(selec) >= limit:
break
selec = np.where(selec)[0]
if score is not None:
selec = order[selec]
return selec.astype(np.int32)
def _non_maximum_suppression_gpu(bbox, thresh, score=None, limit=None):
if len(bbox) == 0:
return cp.zeros((0,), dtype=np.int32)
n_bbox = bbox.shape[0]
if score is not None:
order = score.argsort()[::-1].astype(np.int32)
else:
order = cp.arange(n_bbox, dtype=np.int32)
sorted_bbox = bbox[order, :]
selec, n_selec = _call_nms_kernel(
sorted_bbox, thresh)
selec = selec[:n_selec]
selec = order[selec]
if limit is not None:
selec = selec[:limit]
return selec
_nms_gpu_code = '''
#define DIVUP(m,n) ((m) / (n) + ((m) % (n) > 0))
int const threadsPerBlock = sizeof(unsigned long long) * 8;
__device__
inline float devIoU(float const *const bbox_a, float const *const bbox_b) {
float top = max(bbox_a[0], bbox_b[0]);
float bottom = min(bbox_a[2], bbox_b[2]);
float left = max(bbox_a[1], bbox_b[1]);
float right = min(bbox_a[3], bbox_b[3]);
float height = max(bottom - top, 0.f);
float width = max(right - left, 0.f);
float area_i = height * width;
float area_a = (bbox_a[2] - bbox_a[0]) * (bbox_a[3] - bbox_a[1]);
float area_b = (bbox_b[2] - bbox_b[0]) * (bbox_b[3] - bbox_b[1]);
return area_i / (area_a + area_b - area_i);
}
extern "C"
__global__
void nms_kernel(const int n_bbox, const float thresh,
const float *dev_bbox,
unsigned long long *dev_mask) {
const int row_start = blockIdx.y;
const int col_start = blockIdx.x;
const int row_size =
min(n_bbox - row_start * threadsPerBlock, threadsPerBlock);
const int col_size =
min(n_bbox - col_start * threadsPerBlock, threadsPerBlock);
__shared__ float block_bbox[threadsPerBlock * 4];
if (threadIdx.x < col_size) {
block_bbox[threadIdx.x * 4 + 0] =
dev_bbox[(threadsPerBlock * col_start + threadIdx.x) * 4 + 0];
block_bbox[threadIdx.x * 4 + 1] =
dev_bbox[(threadsPerBlock * col_start + threadIdx.x) * 4 + 1];
block_bbox[threadIdx.x * 4 + 2] =
dev_bbox[(threadsPerBlock * col_start + threadIdx.x) * 4 + 2];
block_bbox[threadIdx.x * 4 + 3] =
dev_bbox[(threadsPerBlock * col_start + threadIdx.x) * 4 + 3];
}
__syncthreads();
if (threadIdx.x < row_size) {
const int cur_box_idx = threadsPerBlock * row_start + threadIdx.x;
const float *cur_box = dev_bbox + cur_box_idx * 4;
int i = 0;
unsigned long long t = 0;
int start = 0;
if (row_start == col_start) {
start = threadIdx.x + 1;
}
for (i = start; i < col_size; i++) {
if (devIoU(cur_box, block_bbox + i * 4) >= thresh) {
t |= 1ULL << i;
}
}
const int col_blocks = DIVUP(n_bbox, threadsPerBlock);
dev_mask[cur_box_idx * col_blocks + col_start] = t;
}
}
'''
def _call_nms_kernel(bbox, thresh):
n_bbox = bbox.shape[0]
threads_per_block = 64
col_blocks = np.ceil(n_bbox / threads_per_block).astype(np.int32)
blocks = (col_blocks, col_blocks, 1)
threads = (threads_per_block, 1, 1)
mask_dev = cp.zeros((n_bbox * col_blocks,), dtype=np.uint64)
bbox = cp.ascontiguousarray(bbox, dtype=np.float32)
kern = cp.RawKernel(_nms_gpu_code, 'nms_kernel')
kern(blocks, threads, args=(cp.int32(n_bbox), cp.float32(thresh),
bbox, mask_dev))
mask_host = mask_dev.get()
selection, n_selec = _nms_gpu_post(
mask_host, n_bbox, threads_per_block, col_blocks)
return selection, n_selec
|
import unittest
from uiautomator import Selector
class TestSelector(unittest.TestCase):
fields = {
"text": (0x01, None), # MASK_TEXT,
"textContains": (0x02, None), # MASK_TEXTCONTAINS,
"textMatches": (0x04, None), # MASK_TEXTMATCHES,
"textStartsWith": (0x08, None), # MASK_TEXTSTARTSWITH,
"className": (0x10, None), # MASK_CLASSNAME
"classNameMatches": (0x20, None), # MASK_CLASSNAMEMATCHES
"description": (0x40, None), # MASK_DESCRIPTION
"descriptionContains": (0x80, None), # MASK_DESCRIPTIONCONTAINS
"descriptionMatches": (0x0100, None), # MASK_DESCRIPTIONMATCHES
"descriptionStartsWith": (0x0200, None), # MASK_DESCRIPTIONSTARTSWITH
"checkable": (0x0400, False), # MASK_CHECKABLE
"checked": (0x0800, False), # MASK_CHECKED
"clickable": (0x1000, False), # MASK_CLICKABLE
"longClickable": (0x2000, False), # MASK_LONGCLICKABLE,
"scrollable": (0x4000, False), # MASK_SCROLLABLE,
"enabled": (0x8000, False), # MASK_ENABLED,
"focusable": (0x010000, False), # MASK_FOCUSABLE,
"focused": (0x020000, False), # MASK_FOCUSED,
"selected": (0x040000, False), # MASK_SELECTED,
"packageName": (0x080000, None), # MASK_PACKAGENAME,
"packageNameMatches": (0x100000, None), # MASK_PACKAGENAMEMATCHES,
"resourceId": (0x200000, None), # MASK_RESOURCEID,
"resourceIdMatches": (0x400000, None), # MASK_RESOURCEIDMATCHES,
"index": (0x800000, 0), # MASK_INDEX,
"instance": (0x01000000, 0) # MASK_INSTANCE,
}
mask = "mask"
def test_init(self):
sel = Selector()
self.assertEqual(sel[self.mask], 0)
self.assertEqual(sel["childOrSibling"], [])
self.assertEqual(sel["childOrSiblingSelector"], [])
def test_add(self):
for k, v in self.fields.items():
kwargs = {k: v[1]}
sel = Selector(**kwargs)
self.assertEqual(sel[self.mask], v[0])
for k1, v1 in self.fields.items():
for k2, v2 in self.fields.items():
if k1 != k2:
kwargs = {k1: v1[1], k2: v2[1]}
sel = Selector(**kwargs)
self.assertEqual(sel[self.mask], v1[0] | v2[0])
def test_delete(self):
for k, v in self.fields.items():
kwargs = {k: v[1]}
sel = Selector(**kwargs)
del sel[k]
self.assertEqual(sel[self.mask], 0)
for k1, v1 in self.fields.items():
for k2, v2 in self.fields.items():
if k1 != k2:
kwargs = {k1: v1[1], k2: v2[1]}
sel = Selector(**kwargs)
del sel[k1]
self.assertEqual(sel[self.mask], v2[0])
del sel[k2]
self.assertEqual(sel[self.mask], 0)
def test_error(self):
with self.assertRaises(ReferenceError):
Selector(text1="")
def test_child_and_sibling(self):
sel = Selector()
sel.child(text="...")
self.assertEqual(sel["childOrSibling"], ["child"])
self.assertEqual(sel["childOrSiblingSelector"], [Selector(text="...")])
sel.sibling(text="---")
self.assertEqual(sel["childOrSibling"], ["child", "sibling"])
self.assertEqual(sel["childOrSiblingSelector"], [Selector(text="..."), Selector(text="---")])
def test_clone(self):
kwargs = {
"text": "1234",
"description": "desc...",
"clickable": True,
"focusable": False,
"packageName": "android"
}
sel = Selector(**kwargs)
sel.child(text="1")
sel.sibling(text="1")
sel.child(text="1")
clone = sel.clone()
for k in kwargs:
self.assertEqual(sel[k], clone[k])
self.assertEqual(sel["childOrSibling"], clone["childOrSibling"])
self.assertEqual(sel["childOrSiblingSelector"], clone["childOrSiblingSelector"])
|
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Div, Field, Layout
from django import forms
from django.core.exceptions import ValidationError
from django.utils.translation import gettext_lazy as _
from django.utils.translation import pgettext_lazy
from weblate.glossary.models import Glossary, Term
from weblate.trans.defines import GLOSSARY_LENGTH
from weblate.utils.forms import ColorWidget
from weblate.utils.validators import validate_file_extension
class CommaSeparatedIntegerField(forms.Field):
def to_python(self, value):
if not value:
return []
try:
return [int(item.strip()) for item in value.split(",") if item.strip()]
except (ValueError, TypeError):
raise ValidationError(_("Invalid integer list!"))
class OneTermForm(forms.Form):
"""Simple one-term form."""
term = forms.CharField(
label=_("Search"), max_length=GLOSSARY_LENGTH, required=False
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.disable_csrf = True
self.helper.layout = Layout(
Div(
Field("term", template="snippets/user-query-field.html"),
css_class="btn-toolbar",
role="toolbar",
),
)
class GlossaryForm(forms.ModelForm):
class Meta:
model = Glossary
fields = ["name", "color", "source_language", "links"]
widgets = {"color": ColorWidget}
def __init__(self, user, project, data=None, instance=None, **kwargs):
super().__init__(data=data, instance=instance, **kwargs)
self.helper = FormHelper(self)
self.helper.form_tag = False
self.fields["links"].queryset = user.owned_projects.exclude(pk=project.id)
class TermForm(forms.ModelForm):
"""Form for adding term to a glossary."""
terms = CommaSeparatedIntegerField(widget=forms.HiddenInput, required=False)
class Meta:
model = Term
fields = ["source", "target", "glossary"]
def __init__(self, project, data=None, instance=None, initial=None, **kwargs):
glossaries = Glossary.objects.for_project(project).order_by("name").distinct()
if not instance and not initial:
initial = {}
if initial is not None and "glossary" not in initial and len(glossaries) == 1:
initial["glossary"] = glossaries[0]
super().__init__(data=data, instance=instance, initial=initial, **kwargs)
self.fields["glossary"].queryset = glossaries
class GlossaryUploadForm(forms.Form):
"""Uploading file to a glossary."""
file = forms.FileField(
label=_("File"),
validators=[validate_file_extension],
help_text=_(
"You can upload any format understood by "
"Translate Toolkit (including TBX, CSV or gettext PO files)."
),
)
method = forms.ChoiceField(
label=_("Merge method"),
choices=(
("", _("Keep current")),
("overwrite", _("Overwrite existing")),
("add", _("Add as other translation")),
),
required=False,
)
glossary = forms.ModelChoiceField(
label=_("Glossary"), queryset=Glossary.objects.none()
)
def __init__(self, project, data=None, initial=None, **kwargs):
glossaries = Glossary.objects.for_project(project)
initial = initial or {}
if initial is not None and "glossary" not in initial and len(glossaries) == 1:
initial["glossary"] = glossaries[0]
super().__init__(data=data, initial=initial, **kwargs)
self.fields["glossary"].queryset = glossaries
class LetterForm(forms.Form):
"""Form for choosing starting letter in a glossary."""
LETTER_CHOICES = [(chr(97 + x), chr(65 + x)) for x in range(26)]
any_letter = pgettext_lazy("Choose starting letter in glossary", "Any")
letter = forms.ChoiceField(
label=_("Starting letter"),
choices=[("", any_letter)] + LETTER_CHOICES,
required=False,
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.helper = FormHelper(self)
self.helper.disable_csrf = True
self.helper.form_class = "form-inline"
self.helper.field_template = "bootstrap3/layout/inline_field.html"
|
from collections import Counter
from unittest import TestCase
from scattertext.features.FeatsFromTopicModel import FeatsFromTopicModel
class TestFeatsFromTopicModel(TestCase):
def test_get_doc_get_feats(self):
expected = Counter({'a b': 2, 'c e f': 1, 'b': 1})
actual = FeatsFromTopicModel(
topic_model={'Topic A': ['A b', 'b', 'C e F'],
'Topic B': ['B', 'C e F']},
).get_feats('A b A b C e F B')
self.assertEqual(expected, actual)
def test_get_doc_metadata(self):
expected = Counter({'Topic A': 3, 'Topic B': 2})
actual = FeatsFromTopicModel(
topic_model={'Topic A': ['A b', 'b', 'C e F'],
'Topic B': ['B', 'C e F']},
keyword_processor_args={'case_sensitive': True}
).get_doc_metadata('A b A b C e F B')
self.assertEqual(expected, actual)
|
from absl import flags
from perfkitbenchmarker import network
from perfkitbenchmarker import providers
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.providers.cloudstack import util
FLAGS = flags.FLAGS
class CloudStackNetwork(network.BaseNetwork):
"""Object representing a CloudStack Network."""
CLOUD = providers.CLOUDSTACK
def __init__(self, spec):
super(CloudStackNetwork, self).__init__(spec)
self.cs = util.CsClient(FLAGS.CS_API_URL,
FLAGS.CS_API_KEY,
FLAGS.CS_API_SECRET)
self.project_id = None
self.network_id = None
def _AcquireNetworkDetails(self):
if FLAGS.project:
project = self.cs.get_project(FLAGS.project)
if project:
self.project_id = project['id']
self.zone_id = None
zone = self.cs.get_zone(self.zone)
if zone:
self.zone_id = zone['id']
assert self.zone_id, "Zone required to create a network"
self.network_name = None
nw_off = self.cs.get_network_offering(FLAGS.cs_network_offering,
self.project_id)
assert nw_off, "Network offering not found"
self.network_offering_id = nw_off['id']
self.network_name = 'perfkit-network-%s' % FLAGS.run_uri
self.is_vpc = FLAGS.cs_use_vpc
self.vpc_id = None
if FLAGS.cs_use_vpc:
assert FLAGS.cs_vpc_offering, "VPC flag should specify the VPC offering"
vpc_off = self.cs.get_vpc_offering(FLAGS.cs_vpc_offering)
assert vpc_off, "Use VPC specified but VPC offering not found"
self.vpc_offering_id = vpc_off['id']
self.vpc_name = 'perfkit-vpc-%s' % FLAGS.run_uri
@vm_util.Retry(max_retries=3)
def Create(self):
"""Creates the actual network."""
gateway = None
netmask = None
self._AcquireNetworkDetails()
if self.is_vpc:
# Create a VPC first
cidr = '10.0.0.0/16'
vpc = self.cs.create_vpc(self.vpc_name,
self.zone_id,
cidr,
self.vpc_offering_id,
self.project_id)
self.vpc_id = vpc['id']
gateway = '10.0.0.1'
netmask = '255.255.255.0'
acl = self.cs.get_network_acl('default_allow', self.project_id)
assert acl, "Default allow ACL not found"
# Create the network
network = self.cs.create_network(self.network_name,
self.network_offering_id,
self.zone_id,
self.project_id,
self.vpc_id,
gateway,
netmask,
acl['id'])
assert network, "No network could be created"
self.network_id = network['id']
self.id = self.network_id
def Delete(self):
"""Deletes the actual network."""
if self.network_id:
self.cs.delete_network(self.network_id)
if self.is_vpc and self.vpc_id:
self.cs.delete_vpc(self.vpc_id)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from compare_gan import datasets
from compare_gan.metrics import eval_task
import matplotlib
matplotlib.use("Agg")
import matplotlib.pyplot as plt # pylint: disable=g-import-not-at-top
import numpy as np
from pstar import plist
import scipy.misc
import six.moves.cPickle as pickle
import tensorflow as tf
import tensorflow_probability as tfp
layers = tf.layers
ds = tfp.distributions
class GILBOTask(eval_task.EvalTask):
"""Compute GILBO metric and related consistency metrics."""
def __init__(self, outdir, task_workdir, dataset_name):
self.outdir = outdir
self.task_workdir = task_workdir
self.dataset = dataset_name
def metric_list(self):
return frozenset([
"gilbo",
"gilbo_train_consistency",
"gilbo_eval_consistency",
"gilbo_self_consistency",
])
def run_in_session(self, options, sess, gan, eval_data_real):
del eval_data_real
result_dict = {}
if options.get("compute_gilbo", False):
(gilbo, gilbo_train_consistency,
gilbo_eval_consistency, gilbo_self_consistency) = train_gilbo(
gan, sess, self.outdir, self.task_workdir, self.dataset, options)
result_dict["gilbo"] = gilbo
result_dict["gilbo_train_consistency"] = gilbo_train_consistency
result_dict["gilbo_eval_consistency"] = gilbo_eval_consistency
result_dict["gilbo_self_consistency"] = gilbo_self_consistency
return result_dict
def _build_regressor(x, z_dim=64):
"""Make the GILBO regressor, which is based off of the GAN discriminator."""
net = tf.cast(x, tf.float32)
net = layers.conv2d(net, 64, 4, 2, activation=tf.nn.leaky_relu)
net = layers.conv2d(net, 128, 4, 2, activation=tf.nn.leaky_relu)
net = layers.flatten(net)
net = layers.dense(net, 1024, activation=tf.nn.leaky_relu)
net = layers.dense(net, 2 * z_dim)
# a and b correspond to the alpha beta parameters of the Beta distribution.
a, b = net[..., :z_dim], net[..., z_dim:2 * z_dim]
a = 1 + tf.nn.softplus(a - 5)
b = 1 + tf.nn.softplus(b - 5)
dist = ds.Independent(ds.Beta(a, b), 1)
bijector = ds.bijectors.Affine(-1.0, 2.0)
tdist = ds.TransformedDistribution(distribution=dist, bijector=bijector)
return tdist
def train_gilbo(gan, sess, outdir, checkpoint_path, dataset, options):
"""Build and train GILBO model.
Args:
gan: GAN object.
sess: tf.Session.
outdir: Output directory. A pickle file will be written there.
checkpoint_path: Path where gan"s checkpoints are written. Only used to
ensure that GILBO files are written to a unique
subdirectory of outdir.
dataset: Name of dataset used to train the GAN.
options: Options dictionary.
Returns:
mean_eval_info: Mean GILBO computed over a large number of images generated
by the trained GAN
mean_train_consistency: Mean consistency of the trained GILBO model with
data from the training set.
mean_eval_consistency: Same consistency measure for the trained model with
data from the validation set.
mean_self_consistency: Same consistency measure for the trained model with
data generated by the trained model itself.
See the GILBO paper for an explanation of these metrics.
Raises:
ValueError: If the GAN has uninitialized variables.
"""
uninitialized = sess.run(tf.report_uninitialized_variables())
if uninitialized:
raise ValueError("Model has uninitialized variables!\n%r" % uninitialized)
outdir = os.path.join(outdir, checkpoint_path.replace("/", "_"))
tf.gfile.MakeDirs(outdir)
with tf.variable_scope("gilbo"):
ones = tf.ones((gan.batch_size, gan.z_dim))
# Get a distribution for the prior.
z_dist = ds.Independent(ds.Uniform(-ones, ones), 1)
z_sample = z_dist.sample()
epsneg = np.finfo("float32").epsneg
# Clip samples from the GAN uniform prior because the Beta distribution
# doesn"t include the top endpoint and has issues with the bottom endpoint.
ganz_clip = tf.clip_by_value(gan.z, -(1 - epsneg), 1 - epsneg)
# Get generated images from the model.
fake_images = gan.fake_images
# Build the regressor distribution that encodes images back to predicted
# samples from the prior.
with tf.variable_scope("regressor"):
z_pred_dist = _build_regressor(fake_images, gan.z_dim)
# Capture the parameters of the distributions for later analysis.
dist_p1 = z_pred_dist.distribution.distribution.concentration0
dist_p2 = z_pred_dist.distribution.distribution.concentration1
# info and avg_info compute the GILBO.
info = z_pred_dist.log_prob(ganz_clip) - z_dist.log_prob(ganz_clip)
avg_info = tf.reduce_mean(info)
# Set up training of the GILBO model.
lr = options.get("gilbo_learning_rate", 4e-4)
learning_rate = tf.get_variable(
"learning_rate", initializer=lr, trainable=False)
gilbo_step = tf.get_variable("gilbo_step", dtype=tf.int32, initializer=0,
trainable=False)
opt = tf.train.AdamOptimizer(learning_rate)
regressor_vars = tf.contrib.framework.get_variables("gilbo/regressor")
train_op = opt.minimize(-info, var_list=regressor_vars)
# Initialize the variables we just created.
uninitialized = plist(tf.report_uninitialized_variables().eval())
uninitialized_vars = uninitialized.apply(
tf.contrib.framework.get_variables_by_name)._[0]
tf.variables_initializer(uninitialized_vars).run()
saver = tf.train.Saver(uninitialized_vars, max_to_keep=1)
try:
checkpoint_path = tf.train.latest_checkpoint(outdir)
saver.restore(sess, checkpoint_path)
except ValueError:
# Failing to restore just indicates that we don"t have a valid checkpoint,
# so we will just start training a fresh GILBO model.
pass
_train_gilbo(sess, gan, saver, learning_rate, gilbo_step, z_sample, avg_info,
z_pred_dist, train_op, outdir, options)
mean_eval_info = _eval_gilbo(sess, gan, z_sample, avg_info,
dist_p1, dist_p2, fake_images, outdir, options)
# Collect encoded distributions on the training and eval set in order to do
# kl-nearest-neighbors on generated samples and measure consistency.
dataset = datasets.get_dataset(dataset)
x_train = dataset.load_dataset(split_name="train", num_threads=1)
x_train = x_train.batch(gan.batch_size, drop_remainder=True)
x_train = x_train.make_one_shot_iterator().get_next()[0]
x_train = tf.reshape(x_train, fake_images.shape)
x_eval = dataset.load_dataset(split_name="test", num_threads=1)
x_eval = x_eval.batch(gan.batch_size, drop_remainder=True)
x_eval = x_eval.make_one_shot_iterator().get_next()[0]
x_eval = tf.reshape(x_eval, fake_images.shape)
mean_train_consistency = _run_gilbo_consistency(
x_train, "train", extract_input_images=0,
save_consistency_images=20, num_batches=5, **locals())
mean_eval_consistency = _run_gilbo_consistency(
x_eval, "eval", extract_input_images=0,
save_consistency_images=20, num_batches=5, **locals())
mean_self_consistency = _run_gilbo_consistency(
fake_images, "self", extract_input_images=20,
save_consistency_images=20, num_batches=5, **locals())
return (mean_eval_info, mean_train_consistency, mean_eval_consistency,
mean_self_consistency)
def _train_gilbo(sess, gan, saver, learning_rate, gilbo_step, z_sample,
avg_info, z_pred_dist, train_op, outdir, options):
"""Run the training process."""
lr_scale = options.get("gilbo_lr_scale", 0.5)
min_lr = options.get("gilbo_min_lr", 1e-8)
min_ai_step_scale = options.get("gilbo_min_ai_step_scale", 0.75)
min_ai_step_value = options.get("gilbo_min_ai_step_value", 0.5)
max_train_cycles = options.get("gilbo_max_train_cycles", 50)
train_steps_per_cycle = options.get("gilbo_train_steps_per_cycle", 10000)
ais = [0.0] # average gilbos (i is for info)
min_ai = -2.0
lr, i = sess.run([learning_rate, gilbo_step])
for i in range(i, max_train_cycles):
if lr < min_lr:
break
_save_gilbo(saver, sess, learning_rate, gilbo_step, i, lr, outdir)
ai = 0.0
for j in range(train_steps_per_cycle):
if j % (train_steps_per_cycle // 10) == 0:
tf.logging.info("step:%d, gilbo:%.3f" % (j, ai))
samp = sess.run(z_sample)
_, z_info = sess.run(
[train_op, avg_info],
feed_dict={gan.z: samp, learning_rate: lr})
ai += (z_info - ai) / (j + 1)
tf.logging.info("cycle:%d gilbo:%.3f min next gilbo:%.3f learning rate:%.3f"
% (i, ai, min_ai, lr))
if ai < min_ai:
lr *= lr_scale
if lr < min_lr:
break
if np.isnan(ai):
tf.logging.info("NaN GILBO at cycle %d, stopping training early." % i)
break
ais.append(ai)
# min_ai is the minimum next GILBO for the training algorithm to consider
# that progress is being made. GILBO is a lower bound that we are maximizing
# so we want it to increase during each training cycle.
min_ai = max(min_ai,
ai + max(0.0,
min(min_ai_step_value,
(ai - ais[-2]) * min_ai_step_scale)
)
)
_save_gilbo(saver, sess, learning_rate, gilbo_step, i, lr, outdir)
_save_z_histograms(gan, z_sample, z_pred_dist, outdir, i)
def _eval_gilbo(sess, gan, z_sample, avg_info, dist_p1, dist_p2, fake_images,
outdir, options):
"""Evaluate GILBO on new data from the generative model.
Args:
sess: tf.Session.
gan: GAN object.
z_sample: Tensor sampling from the prior.
avg_info: Tensor that computes the per-batch GILBO.
dist_p1: Tensor for the first parameter of the distribution
(e.g., concentration1 for a Beta distribution).
dist_p2: Tensor for the second parameter of the distribution
(e.g., concentration2 for a Beta distribution).
fake_images: Tensor of images sampled from the GAN.
outdir: Output directory. A pickle file will be written there.
options: Options dictionary.
Returns:
The mean GILBO on the evaluation set. Also writes a pickle file saving
distribution parameters and generated images for later analysis.
"""
eval_steps = options.get("gilbo_eval_steps", 10000)
z_infos = np.zeros(eval_steps, np.float32)
z_dist_p1s, z_dist_p2s, z_fake_images = [], [], []
mean_eval_info = 0
for i in range(eval_steps):
samp = sess.run(z_sample)
if i * gan.batch_size < 1000:
# Save the first 1000 distribution parameters and generated images for
# separate data processing.
z_infos[i], z_dist_p1, z_dist_p2, images = sess.run(
[avg_info, dist_p1, dist_p2, fake_images], feed_dict={gan.z: samp})
z_dist_p1s.append(z_dist_p1)
z_dist_p2s.append(z_dist_p2)
z_fake_images.append(images)
else:
z_infos[i] = sess.run(avg_info, feed_dict={gan.z: samp})
if i % (eval_steps // 10) == 0:
tf.logging.info("eval step:%d gilbo:%3.1f" % (i, z_infos[i]))
if eval_steps:
mean_eval_info = np.mean(np.nan_to_num(z_infos))
eval_dists = dict(
dist_p1=np.array(z_dist_p1s).reshape([-1, 64]),
dist_p2=np.array(z_dist_p2s).reshape([-1, 64]),
images=np.array(z_fake_images).reshape(
[-1] + list(z_fake_images[0].shape[1:])))
with tf.gfile.Open(os.path.join(outdir, "eval_dists.p"), "w") as f:
pickle.dump(eval_dists, f)
tf.logging.info("eval gilbo:%3.1f" % mean_eval_info)
return mean_eval_info
def _run_gilbo_consistency(
input_images, mode, dist_p1, dist_p2, z_pred_dist,
z_sample, gan, sess, outdir, dataset, extract_input_images=0,
save_consistency_images=0, num_batches=3000, **unused_kw):
"""Measure consistency of the gilbo estimator with the GAN or VAE.
Arguments without documentation are variables from the calling function needed
here. Pass them with **locals().
Args:
input_images: Tensor. Dataset images, or images generated by the GAN or VAE.
mode: "train", "eval", or "self". Which consistency measure to compute.
dist_p1:
dist_p2:
z_pred_dist:
z_sample:
gan:
sess:
outdir:
dataset:
extract_input_images: Number of batches to extract, -1 for all. Default: 0.
save_consistency_images: Num batches to save, -1 for all. Default: 0.
num_batches: Number of batches to run. Default: 3000.
**unused_kw: Unused extra keyword args.
Returns:
Symmetric consistency KL. Additionally saves distribution parameters as a
pickle as well as any requested images as pngs to outdir.
"""
with tf.variable_scope("gilbo"):
with tf.variable_scope("regressor", reuse=True):
z_pred_dist_train = _build_regressor(input_images, gan.z_dim)
z_sample_train = z_pred_dist_train.sample()
dist_p1_ph = tf.placeholder(tf.float32, dist_p1.shape)
dist_p2_ph = tf.placeholder(tf.float32, dist_p2.shape)
consist_dist_p1_ph = tf.placeholder(tf.float32, dist_p1.shape)
consist_dist_p2_ph = tf.placeholder(tf.float32, dist_p2.shape)
dist_p1 = z_pred_dist_train.distribution.distribution.concentration0
dist_p2 = z_pred_dist_train.distribution.distribution.concentration1
consist_z_dist_p1 = z_pred_dist.distribution.distribution.concentration0
consist_z_dist_p2 = z_pred_dist.distribution.distribution.concentration1
base_dist = ds.Beta
kl_dist_p = ds.Independent(base_dist(dist_p1_ph, dist_p2_ph), 1)
kl_dist_q = ds.Independent(
base_dist(consist_dist_p1_ph, consist_dist_p2_ph), 1)
consistency_kl = kl_dist_p.kl_divergence(kl_dist_q)
consistency_rkl = kl_dist_q.kl_divergence(kl_dist_p)
z_dist_p1s, z_dist_p2s = [], []
consist_z_dist_p1s, consist_z_dist_p2s = [], []
consistency_kls, consistency_rkls, consistency_skls = [], [], []
i = 0
while i < num_batches:
try:
samp = sess.run(z_sample)
z_dist_p1, z_dist_p2, images, train_samp = sess.run(
[dist_p1, dist_p2, input_images, z_sample_train],
feed_dict={gan.z: samp})
z_dist_p1s.append(z_dist_p1)
z_dist_p2s.append(z_dist_p2)
(consist_z_dist_p1_out, consist_z_dist_p2_out,
consistency_images) = sess.run(
[consist_z_dist_p1, consist_z_dist_p2, gan.fake_images],
feed_dict={gan.z: train_samp})
consist_z_dist_p1s.append(consist_z_dist_p1_out)
consist_z_dist_p2s.append(consist_z_dist_p2_out)
consist_kls, consist_rkls = sess.run(
[consistency_kl, consistency_rkl],
feed_dict={
dist_p1_ph: z_dist_p1,
dist_p2_ph: z_dist_p2,
consist_dist_p1_ph: consist_z_dist_p1_out,
consist_dist_p2_ph: consist_z_dist_p2_out,
})
consistency_kls.append(consist_kls)
consistency_rkls.append(consist_rkls)
consistency_skls.append((consist_kls + consist_rkls) / 2.0)
if save_consistency_images:
save_consistency_images -= 1
filename = os.path.join(
outdir,
"consistency_image_%s_%06d_%06d.png"
% (mode, i * gan.batch_size, (i + 1) * gan.batch_size - 1))
img = consistency_images.reshape(
[gan.batch_size * consistency_images.shape[1],
consistency_images.shape[2],
-1])
_save_image(img, filename)
if extract_input_images:
extract_input_images -= 1
if mode == "self":
filename = os.path.join(
outdir,
"%s_image_%06d_%06d.png"
% (mode, i * gan.batch_size, (i + 1) * gan.batch_size - 1))
img = images.reshape(
[gan.batch_size * consistency_images.shape[1],
consistency_images.shape[2],
-1])
_save_image(img, filename)
else:
for j in range(gan.batch_size):
filename = os.path.join(
outdir, "..", dataset,
"%s_image_%06d.png" % (mode, i * gan.batch_size + j))
_save_image(images[j], filename)
if i % 100 == 0:
tf.logging.info(
"%s: step:%d consistency KL:%3.1f" %
(mode, i, np.mean(consistency_skls)))
i += 1
except tf.errors.OutOfRangeError:
break
out_dists = dict(
dist_p1=np.reshape(z_dist_p1s, [-1, gan.batch_size]),
dist_p2=np.reshape(z_dist_p2s, [-1, gan.batch_size]),
consist_dist_p1=np.reshape(consist_z_dist_p1s, [-1, gan.batch_size]),
consist_dist_p2=np.reshape(consist_z_dist_p2s, [-1, gan.batch_size]),
consistency_kl=np.reshape(consistency_kls, [-1, gan.batch_size]),
consistency_rkl=np.reshape(consistency_rkls, [-1, gan.batch_size]),
consistency_skl=np.reshape(consistency_skls, [-1, gan.batch_size]),
)
with tf.gfile.Open(
os.path.join(outdir, "%s_consistency_dists.p" % mode), "w") as f:
pickle.dump(out_dists, f)
return np.mean(consistency_skls)
def _save_image(img, filename):
# If img is [H W] or [H W 1], stack into [H W 3] for scipy"s api.
if len(img.shape) == 2 or img.shape[-1] == 1:
img = np.stack((img.squeeze(),) * 3, -1)
with tf.gfile.Open(filename, "w") as f:
scipy.misc.toimage(img, cmin=0.0, cmax=1.0).save(f)
def _save_z_histograms(gan, z_sample, z_pred_dist, outdir, step):
"""Save a histogram for each z dimension as an png in outdir."""
fig, axs = plt.subplots(8, 8, figsize=(15, 10))
pk = 0
bins = np.linspace(-1, 1, 70)
samp = z_sample.eval()
z_pred_samp = z_pred_dist.sample(10000).eval({gan.z: samp})
try:
for j in range(64):
axs.flat[j].hist(z_pred_samp[:, pk, j], bins, histtype="stepfilled",
normed=True)
axs.flat[j].vlines(samp[pk, j], 0, 1.0, linestyle="dashed")
plt.tight_layout()
filename = os.path.join(outdir, "z_hist_%03d.png" % step)
tf.logging.info("Saving z histogram: %s" % filename)
with tf.gfile.Open(filename, "w") as f:
fig.savefig(f, dpi="figure")
except Exception as e: # pylint: disable=broad-except
tf.logging.info("Caught %r while rendering chart. Ignoring.\n%s\n"
% (type(e), str(e)))
def _save_gilbo(saver, sess, learning_rate, gilbo_step, step, lr, outdir):
"""Save GILBO model checkpoints, including the current step and lr.
Args:
saver: tf.train.Saver.
sess: tf.Session.
learning_rate: tf.Variable for the learning rate.
gilbo_step: tf.Variable for the current training step.
step: integer for the current step, to be saved in the checkpoint.
lr: float for the current learning rate, to be saved in the checkpoint.
outdir: output directory.
"""
# Save the current learning rate and gilbo training step with the checkpoint.
learning_rate.assign(lr).eval()
gilbo_step.assign(step).eval()
filename = os.path.join(outdir, "gilbo_model")
saver.save(sess, filename, global_step=step)
|
import logging
from homeassistant.components.switch import SwitchEntity
from . import DOMAIN
from .const import (
KEY_CONSUMER,
KEY_IDENTIFIER,
KEY_MEASUREMENT,
KEY_PARENT_MAC,
KEY_PARENT_NAME,
KEY_UNIT,
PERIPHERAL_STATE_OFF,
PERIPHERAL_STATE_ON,
)
_LOGGER = logging.getLogger(__name__)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up actuator platform."""
if discovery_info is None:
return None
consumer = hass.data[DOMAIN][KEY_CONSUMER]
actuator_list = []
for entity_info in discovery_info:
peripheral = hass.data[DOMAIN][entity_info[KEY_PARENT_MAC]][
entity_info[KEY_IDENTIFIER]
]
parent_name = entity_info[KEY_PARENT_NAME]
unit = entity_info[KEY_UNIT]
measurement = entity_info[KEY_MEASUREMENT]
actuator_list.append(
VActuator(peripheral, parent_name, unit, measurement, consumer)
)
async_add_entities(actuator_list)
class VActuator(SwitchEntity):
"""Representation of an Actuator."""
def __init__(self, peripheral, parent_name, unit, measurement, consumer):
"""Initialize the sensor."""
self._is_on = False
self._available = True
self._name = f"{parent_name} {measurement}"
self._parent_mac = peripheral.parentMac
self._identifier = peripheral.identifier
self._unit = unit
self._measurement = measurement
self.consumer = consumer
@property
def unique_id(self):
"""Return the unique id of the actuator."""
return f"{self._parent_mac}/{self._identifier}/{self._measurement}"
@property
def name(self):
"""Return the name of the actuator."""
return self._name
@property
def is_on(self):
"""Return the state of the actuator."""
return self._is_on
@property
def available(self):
"""Return if the actuator is available."""
return self._available
async def async_turn_off(self, **kwargs):
"""Turn off the actuator."""
await self.update_state(0)
async def async_turn_on(self, **kwargs):
"""Turn on the actuator."""
await self.update_state(1)
async def update_state(self, state):
"""Update the state of the actuator."""
payload = {"id": "state-num", "value": state}
await self.consumer.actuatePeripheral(
None, self._identifier, self._parent_mac, payload
)
async def async_update(self):
"""Fetch state data from the actuator."""
samples = await self.consumer.fetchPeripheralSample(
None, self._identifier, self._parent_mac
)
if samples is not None:
for sample in samples:
if sample.measurement == self._measurement:
self._available = True
if sample.value == PERIPHERAL_STATE_OFF:
self._is_on = False
elif sample.value == PERIPHERAL_STATE_ON:
self._is_on = True
break
else:
_LOGGER.error("Sample unavailable")
self._available = False
self._is_on = None
|
from homeassistant import config_entries, setup
from homeassistant.components.NEW_DOMAIN.config_flow import CannotConnect, InvalidAuth
from homeassistant.components.NEW_DOMAIN.const import DOMAIN
from tests.async_mock import patch
async def test_form(hass):
"""Test we get the form."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
with patch(
"homeassistant.components.NEW_DOMAIN.config_flow.PlaceholderHub.authenticate",
return_value=True,
), patch(
"homeassistant.components.NEW_DOMAIN.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.NEW_DOMAIN.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"host": "1.1.1.1",
"username": "test-username",
"password": "test-password",
},
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "Name of the device"
assert result2["data"] == {
"host": "1.1.1.1",
"username": "test-username",
"password": "test-password",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_invalid_auth(hass):
"""Test we handle invalid auth."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.NEW_DOMAIN.config_flow.PlaceholderHub.authenticate",
side_effect=InvalidAuth,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"host": "1.1.1.1",
"username": "test-username",
"password": "test-password",
},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "invalid_auth"}
async def test_form_cannot_connect(hass):
"""Test we handle cannot connect error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.NEW_DOMAIN.config_flow.PlaceholderHub.authenticate",
side_effect=CannotConnect,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"host": "1.1.1.1",
"username": "test-username",
"password": "test-password",
},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "cannot_connect"}
|
from datetime import timedelta
import logging
from ProgettiHWSW.input import Input
import async_timeout
from homeassistant.components.binary_sensor import BinarySensorEntity
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
)
from . import setup_input
from .const import DEFAULT_POLLING_INTERVAL_SEC, DOMAIN
_LOGGER = logging.getLogger(DOMAIN)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the binary sensors from a config entry."""
board_api = hass.data[DOMAIN][config_entry.entry_id]
input_count = config_entry.data["input_count"]
binary_sensors = []
async def async_update_data():
"""Fetch data from API endpoint of board."""
async with async_timeout.timeout(5):
return await board_api.get_inputs()
coordinator = DataUpdateCoordinator(
hass,
_LOGGER,
name="binary_sensor",
update_method=async_update_data,
update_interval=timedelta(seconds=DEFAULT_POLLING_INTERVAL_SEC),
)
await coordinator.async_refresh()
for i in range(1, int(input_count) + 1):
binary_sensors.append(
ProgettihwswBinarySensor(
coordinator,
f"Input #{i}",
setup_input(board_api, i),
)
)
async_add_entities(binary_sensors)
class ProgettihwswBinarySensor(CoordinatorEntity, BinarySensorEntity):
"""Represent a binary sensor."""
def __init__(self, coordinator, name, sensor: Input):
"""Set initializing values."""
super().__init__(coordinator)
self._name = name
self._sensor = sensor
@property
def name(self):
"""Return the sensor name."""
return self._name
@property
def is_on(self):
"""Get sensor state."""
return self.coordinator.data[self._sensor.id]
|
import typing
import functools
from .units.unit import Unit
def chain_transform(units: typing.List[Unit]) -> typing.Callable:
"""
Compose unit transformations into a single function.
:param units: List of :class:`matchzoo.StatelessUnit`.
"""
@functools.wraps(chain_transform)
def wrapper(arg):
"""Wrapper function of transformations composition."""
for unit in units:
arg = unit.transform(arg)
return arg
unit_names = ' => '.join(unit.__class__.__name__ for unit in units)
wrapper.__name__ += ' of ' + unit_names
return wrapper
|
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_MOTION,
BinarySensorEntity,
)
from homeassistant.helpers import entity_platform
from .const import DATA_COORDINATOR, DOMAIN
from .entity import RiscoEntity
SERVICE_BYPASS_ZONE = "bypass_zone"
SERVICE_UNBYPASS_ZONE = "unbypass_zone"
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Risco alarm control panel."""
platform = entity_platform.current_platform.get()
platform.async_register_entity_service(SERVICE_BYPASS_ZONE, {}, "async_bypass_zone")
platform.async_register_entity_service(
SERVICE_UNBYPASS_ZONE, {}, "async_unbypass_zone"
)
coordinator = hass.data[DOMAIN][config_entry.entry_id][DATA_COORDINATOR]
entities = [
RiscoBinarySensor(coordinator, zone_id, zone)
for zone_id, zone in coordinator.data.zones.items()
]
async_add_entities(entities, False)
class RiscoBinarySensor(BinarySensorEntity, RiscoEntity):
"""Representation of a Risco zone as a binary sensor."""
def __init__(self, coordinator, zone_id, zone):
"""Init the zone."""
super().__init__(coordinator)
self._zone_id = zone_id
self._zone = zone
def _get_data_from_coordinator(self):
self._zone = self.coordinator.data.zones[self._zone_id]
@property
def device_info(self):
"""Return device info for this device."""
return {
"identifiers": {(DOMAIN, self.unique_id)},
"name": self.name,
"manufacturer": "Risco",
}
@property
def name(self):
"""Return the name of the zone."""
return self._zone.name
@property
def unique_id(self):
"""Return a unique id for this zone."""
return f"{self._risco.site_uuid}_zone_{self._zone_id}"
@property
def device_state_attributes(self):
"""Return the state attributes."""
return {"bypassed": self._zone.bypassed}
@property
def is_on(self):
"""Return true if sensor is on."""
return self._zone.triggered
@property
def device_class(self):
"""Return the class of this sensor, from DEVICE_CLASSES."""
return DEVICE_CLASS_MOTION
async def _bypass(self, bypass):
alarm = await self._risco.bypass_zone(self._zone_id, bypass)
self._zone = alarm.zones[self._zone_id]
self.async_write_ha_state()
async def async_bypass_zone(self):
"""Bypass this zone."""
await self._bypass(True)
async def async_unbypass_zone(self):
"""Unbypass this zone."""
await self._bypass(False)
|
import os
from mayavi.core.ui.mayavi_scene import MayaviScene
from mayavi.tools.mlab_scene_model import MlabSceneModel
import numpy as np
from pyface.api import confirm, error, FileDialog, OK, YES
from traits.api import (HasTraits, HasPrivateTraits, on_trait_change,
cached_property, DelegatesTo, Event, Instance,
Property, Array, Bool, Button, Enum)
from traitsui.api import HGroup, Item, VGroup, View, Handler, ArrayEditor
from traitsui.menu import NoButtons
from tvtk.pyface.scene_editor import SceneEditor
from ..coreg import (fid_fname, _find_fiducials_files, _find_head_bem,
get_mni_fiducials)
from ..defaults import DEFAULTS
from ..io import write_fiducials
from ..io.constants import FIFF
from ..surface import complete_surface_info, decimate_surface
from ..utils import get_subjects_dir, logger, warn
from ..viz.backends._pysurfer_mayavi import _toggle_mlab_render
from ._file_traits import (SurfaceSource, fid_wildcard, FiducialsSource,
MRISubjectSource, SubjectSelectorPanel,
Surf)
from ._viewer import (HeadViewController, PointObject, SurfaceObject,
headview_borders, _BUTTON_WIDTH,
_MRI_FIDUCIALS_WIDTH, _MM_WIDTH,
_RESET_LABEL, _RESET_WIDTH, _mm_fmt)
defaults = DEFAULTS['coreg']
class MRIHeadWithFiducialsModel(HasPrivateTraits):
"""Represent an MRI head shape (high and low res) with fiducials.
Attributes
----------
points : array (n_points, 3)
MRI head surface points.
tris : array (n_tris, 3)
Triangles based on points.
lpa : array (1, 3)
Left peri-auricular point coordinates.
nasion : array (1, 3)
Nasion coordinates.
rpa : array (1, 3)
Right peri-auricular point coordinates.
"""
subject_source = Instance(MRISubjectSource, ())
bem_low_res = Instance(SurfaceSource, ())
bem_high_res = Instance(SurfaceSource, ())
fid = Instance(FiducialsSource, ())
fid_file = DelegatesTo('fid', 'file')
fid_fname = DelegatesTo('fid', 'fname')
fid_points = DelegatesTo('fid', 'points')
subjects_dir = DelegatesTo('subject_source')
subject = DelegatesTo('subject_source')
subject_has_bem = DelegatesTo('subject_source')
lpa = Array(float, (1, 3))
nasion = Array(float, (1, 3))
rpa = Array(float, (1, 3))
reset = Event(desc="Reset fiducials to the file.")
# info
can_save = Property(depends_on=['file', 'can_save_as'])
can_save_as = Property(depends_on=['lpa', 'nasion', 'rpa'])
can_reset = Property(depends_on=['file', 'fid.points', 'lpa', 'nasion',
'rpa'])
fid_ok = Property(depends_on=['lpa', 'nasion', 'rpa'], desc="All points "
"are set")
default_fid_fname = Property(depends_on=['subjects_dir', 'subject'],
desc="the default file name for the "
"fiducials fif file")
# switch for the GUI (has no effect in the model)
lock_fiducials = Bool(False, desc="Used by GIU, has no effect in the "
"model.")
@on_trait_change('fid_points')
def reset_fiducials(self): # noqa: D102
if self.fid_points is not None:
self.lpa = self.fid_points[0:1]
self.nasion = self.fid_points[1:2]
self.rpa = self.fid_points[2:3]
def save(self, fname=None):
"""Save the current fiducials to a file.
Parameters
----------
fname : str
Destination file path. If None, will use the current fid filename
if available, or else use the default pattern.
"""
if fname is None:
fname = self.fid_file
if not fname:
fname = self.default_fid_fname
dig = [{'kind': FIFF.FIFFV_POINT_CARDINAL,
'ident': FIFF.FIFFV_POINT_LPA,
'r': np.array(self.lpa[0])},
{'kind': FIFF.FIFFV_POINT_CARDINAL,
'ident': FIFF.FIFFV_POINT_NASION,
'r': np.array(self.nasion[0])},
{'kind': FIFF.FIFFV_POINT_CARDINAL,
'ident': FIFF.FIFFV_POINT_RPA,
'r': np.array(self.rpa[0])}]
write_fiducials(fname, dig, FIFF.FIFFV_COORD_MRI)
self.fid_file = fname
@cached_property
def _get_can_reset(self):
if not self.fid_file:
return False
elif np.any(self.lpa != self.fid.points[0:1]):
return True
elif np.any(self.nasion != self.fid.points[1:2]):
return True
elif np.any(self.rpa != self.fid.points[2:3]):
return True
return False
@cached_property
def _get_can_save_as(self):
can = not (np.all(self.nasion == self.lpa) or
np.all(self.nasion == self.rpa) or
np.all(self.lpa == self.rpa))
return can
@cached_property
def _get_can_save(self):
if not self.can_save_as:
return False
elif self.fid_file:
return True
elif self.subjects_dir and self.subject:
return True
else:
return False
@cached_property
def _get_default_fid_fname(self):
fname = fid_fname.format(subjects_dir=self.subjects_dir,
subject=self.subject)
return fname
@cached_property
def _get_fid_ok(self):
return all(np.any(pt) for pt in (self.nasion, self.lpa, self.rpa))
def _reset_fired(self):
self.reset_fiducials()
# if subject changed because of a change of subjects_dir this was not
# triggered
@on_trait_change('subjects_dir,subject')
def _subject_changed(self):
subject = self.subject
subjects_dir = self.subjects_dir
if not subjects_dir or not subject:
return
# find high-res head model (if possible)
high_res_path = _find_head_bem(subject, subjects_dir, high_res=True)
low_res_path = _find_head_bem(subject, subjects_dir, high_res=False)
if high_res_path is None and low_res_path is None:
msg = 'No standard head model was found for subject %s' % subject
error(None, msg, "No head surfaces found")
raise RuntimeError(msg)
if high_res_path is not None:
self.bem_high_res.file = high_res_path
else:
self.bem_high_res.file = low_res_path
if low_res_path is None:
# This should be very rare!
warn('No low-resolution head found, decimating high resolution '
'mesh (%d vertices): %s' % (len(self.bem_high_res.surf.rr),
high_res_path,))
# Create one from the high res one, which we know we have
rr, tris = decimate_surface(self.bem_high_res.surf.rr,
self.bem_high_res.surf.tris,
n_triangles=5120)
surf = complete_surface_info(dict(rr=rr, tris=tris),
copy=False, verbose=False)
# directly set the attributes of bem_low_res
self.bem_low_res.surf = Surf(tris=surf['tris'], rr=surf['rr'],
nn=surf['nn'])
else:
self.bem_low_res.file = low_res_path
# Set MNI points
try:
fids = get_mni_fiducials(subject, subjects_dir)
except Exception: # some problem, leave at origin
self.fid.mni_points = None
else:
self.fid.mni_points = np.array([f['r'] for f in fids], float)
# find fiducials file
fid_files = _find_fiducials_files(subject, subjects_dir)
if len(fid_files) == 0:
self.fid.reset_traits(['file'])
self.lock_fiducials = False
else:
self.fid_file = fid_files[0].format(subjects_dir=subjects_dir,
subject=subject)
self.lock_fiducials = True
# does not seem to happen by itself ... so hard code it:
self.reset_fiducials()
class SetHandler(Handler):
"""Handler to change style when setting MRI fiducials."""
def object_set_changed(self, info): # noqa: D102
return self.object_locked_changed(info)
def object_locked_changed(self, info): # noqa: D102
if info.object.locked:
ss = ''
else:
ss = 'border-style: solid; border-color: red; border-width: 2px;'
# This will only work for Qt, but hopefully that's most users!
try:
_color_children(info.ui.info.ui.control, ss)
except AttributeError: # safeguard for wxpython
pass
def _color_children(obj, ss):
"""Qt helper."""
for child in obj.children():
if 'QRadioButton' in repr(child):
child.setStyleSheet(ss if child.isChecked() else '')
elif 'QLineEdit' in repr(child):
child.setStyleSheet(ss)
elif 'QWidget' in repr(child): # on Linux it's nested
_color_children(child, ss)
_SET_TOOLTIP = ('Click on the MRI image to set the position, '
'or enter values below')
class FiducialsPanel(HasPrivateTraits):
"""Set fiducials on an MRI surface."""
model = Instance(MRIHeadWithFiducialsModel)
fid_file = DelegatesTo('model')
fid_fname = DelegatesTo('model')
lpa = DelegatesTo('model')
nasion = DelegatesTo('model')
rpa = DelegatesTo('model')
can_save = DelegatesTo('model')
can_save_as = DelegatesTo('model')
can_reset = DelegatesTo('model')
fid_ok = DelegatesTo('model')
locked = DelegatesTo('model', 'lock_fiducials')
set = Enum('LPA', 'Nasion', 'RPA')
current_pos_mm = Array(float, (1, 3))
save_as = Button(label='Save as...')
save = Button(label='Save')
reset_fid = Button(label=_RESET_LABEL)
headview = Instance(HeadViewController)
hsp_obj = Instance(SurfaceObject)
picker = Instance(object)
# the layout of the dialog created
view = View(VGroup(
HGroup(Item('fid_file', width=_MRI_FIDUCIALS_WIDTH,
tooltip='MRI fiducials file'), show_labels=False),
HGroup(Item('set', width=_MRI_FIDUCIALS_WIDTH,
format_func=lambda x: x, style='custom',
tooltip=_SET_TOOLTIP), show_labels=False),
HGroup(Item('current_pos_mm',
editor=ArrayEditor(width=_MM_WIDTH, format_func=_mm_fmt),
tooltip='MRI fiducial position (mm)'), show_labels=False),
HGroup(Item('save', enabled_when='can_save',
tooltip="If a filename is currently specified, save to "
"that file, otherwise save to the default file name",
width=_BUTTON_WIDTH),
Item('save_as', enabled_when='can_save_as',
width=_BUTTON_WIDTH),
Item('reset_fid', enabled_when='can_reset', width=_RESET_WIDTH,
tooltip='Reset to file values (if available)'),
show_labels=False),
enabled_when="locked==False", show_labels=False), handler=SetHandler())
def __init__(self, *args, **kwargs): # noqa: D102
super(FiducialsPanel, self).__init__(*args, **kwargs)
@on_trait_change('current_pos_mm')
def _update_pos(self):
attr = self.set.lower()
if not np.allclose(getattr(self, attr), self.current_pos_mm * 1e-3):
setattr(self, attr, self.current_pos_mm * 1e-3)
@on_trait_change('model:lpa')
def _update_lpa(self, name):
if self.set == 'LPA':
self.current_pos_mm = self.lpa * 1000
@on_trait_change('model:nasion')
def _update_nasion(self, name):
if self.set.lower() == 'Nasion':
self.current_pos_mm = self.nasion * 1000
@on_trait_change('model:rpa')
def _update_rpa(self, name):
if self.set.lower() == 'RPA':
self.current_pos_mm = self.rpa * 1000
def _reset_fid_fired(self):
self.model.reset = True
def _save_fired(self):
self.model.save()
def _save_as_fired(self):
if self.fid_file:
default_path = self.fid_file
else:
default_path = self.model.default_fid_fname
dlg = FileDialog(action="save as", wildcard=fid_wildcard,
default_path=default_path)
dlg.open()
if dlg.return_code != OK:
return
path = dlg.path
if not path.endswith('.fif'):
path = path + '.fif'
if os.path.exists(path):
answer = confirm(None, "The file %r already exists. Should it "
"be replaced?", "Overwrite File?")
if answer != YES:
return
self.model.save(path)
def _on_pick(self, picker):
if self.locked:
return
self.picker = picker
n_pos = len(picker.picked_positions)
if n_pos == 0:
logger.debug("GUI: picked empty location")
return
if picker.actor is self.hsp_obj.surf.actor.actor:
idxs = []
idx = None
pt = [picker.pick_position]
elif self.hsp_obj.surf.actor.actor in picker.actors:
idxs = [i for i in range(n_pos) if picker.actors[i] is
self.hsp_obj.surf.actor.actor]
idx = idxs[-1]
pt = [picker.picked_positions[idx]]
else:
logger.debug("GUI: picked object other than MRI")
def round_(x):
return round(x, 3)
poss = [map(round_, pos) for pos in picker.picked_positions]
pos = map(round_, picker.pick_position)
msg = ["Pick Event: %i picked_positions:" % n_pos]
line = str(pos)
if idx is None:
line += " <-pick_position"
msg.append(line)
for i, pos in enumerate(poss):
line = str(pos)
if i == idx:
line += " <- MRI mesh"
elif i in idxs:
line += " (<- also MRI mesh)"
msg.append(line)
logger.debug('\n'.join(msg))
if self.set == 'Nasion':
self.nasion = pt
elif self.set == 'LPA':
self.lpa = pt
elif self.set == 'RPA':
self.rpa = pt
else:
raise ValueError("set = %r" % self.set)
@on_trait_change('set')
def _on_set_change(self, obj, name, old, new):
if new == 'Nasion':
self.current_pos_mm = self.nasion * 1000
self.headview.front = True
elif new == 'LPA':
self.current_pos_mm = self.lpa * 1000
self.headview.left = True
elif new == 'RPA':
self.current_pos_mm = self.rpa * 1000
self.headview.right = True
# FiducialsPanel view that allows manipulating all coordinates numerically
view2 = View(VGroup(Item('fid_file', label='Fiducials File'),
Item('fid_fname', show_label=False, style='readonly'),
Item('set', style='custom'), 'lpa', 'nasion', 'rpa',
HGroup(Item('save', enabled_when='can_save'),
Item('save_as', enabled_when='can_save_as'),
Item('reset_fid', enabled_when='can_reset'),
show_labels=False),
enabled_when="locked==False"))
class FiducialsFrame(HasTraits):
"""GUI for interpolating between two KIT marker files.
Parameters
----------
subject : None | str
Set the subject which is initially selected.
subjects_dir : None | str
Override the SUBJECTS_DIR environment variable.
"""
model = Instance(MRIHeadWithFiducialsModel, ())
scene = Instance(MlabSceneModel, ())
headview = Instance(HeadViewController)
spanel = Instance(SubjectSelectorPanel)
panel = Instance(FiducialsPanel)
mri_obj = Instance(SurfaceObject)
point_scale = float(defaults['mri_fid_scale'])
lpa_obj = Instance(PointObject)
nasion_obj = Instance(PointObject)
rpa_obj = Instance(PointObject)
def _headview_default(self):
return HeadViewController(scene=self.scene, system='RAS')
def _panel_default(self):
panel = FiducialsPanel(model=self.model, headview=self.headview)
panel.trait_view('view', view2)
return panel
def _spanel_default(self):
return SubjectSelectorPanel(model=self.model.subject_source)
view = View(HGroup(Item('scene',
editor=SceneEditor(scene_class=MayaviScene),
dock='vertical'),
VGroup(headview_borders,
VGroup(Item('spanel', style='custom'),
label="Subject", show_border=True,
show_labels=False),
VGroup(Item('panel', style="custom"),
label="Fiducials", show_border=True,
show_labels=False),
show_labels=False),
show_labels=False),
resizable=True,
buttons=NoButtons)
def __init__(self, subject=None, subjects_dir=None,
**kwargs): # noqa: D102
super(FiducialsFrame, self).__init__(**kwargs)
subjects_dir = get_subjects_dir(subjects_dir)
if subjects_dir is not None:
self.spanel.subjects_dir = subjects_dir
if subject is not None:
if subject in self.spanel.subjects:
self.spanel.subject = subject
@on_trait_change('scene.activated')
def _init_plot(self):
_toggle_mlab_render(self, False)
lpa_color = defaults['lpa_color']
nasion_color = defaults['nasion_color']
rpa_color = defaults['rpa_color']
# bem
color = defaults['mri_color']
self.mri_obj = SurfaceObject(points=self.model.points, color=color,
tri=self.model.tris, scene=self.scene)
self.model.on_trait_change(self._on_mri_src_change, 'tris')
self.panel.hsp_obj = self.mri_obj
# fiducials
self.lpa_obj = PointObject(scene=self.scene, color=lpa_color,
has_norm=True,
point_scale=self.point_scale)
self.panel.sync_trait('lpa', self.lpa_obj, 'points', mutual=False)
self.sync_trait('point_scale', self.lpa_obj, mutual=False)
self.nasion_obj = PointObject(scene=self.scene, color=nasion_color,
has_norm=True,
point_scale=self.point_scale)
self.panel.sync_trait('nasion', self.nasion_obj, 'points',
mutual=False)
self.sync_trait('point_scale', self.nasion_obj, mutual=False)
self.rpa_obj = PointObject(scene=self.scene, color=rpa_color,
has_norm=True,
point_scale=self.point_scale)
self.panel.sync_trait('rpa', self.rpa_obj, 'points', mutual=False)
self.sync_trait('point_scale', self.rpa_obj, mutual=False)
self.headview.left = True
_toggle_mlab_render(self, True)
# picker
self.scene.mayavi_scene.on_mouse_pick(self.panel._on_pick, type='cell')
def _on_mri_src_change(self):
if (not np.any(self.model.points)) or (not np.any(self.model.tris)):
self.mri_obj.clear()
return
self.mri_obj.points = self.model.points
self.mri_obj.tri = self.model.tris
self.mri_obj.plot()
|
from unittest import mock
from vcr import mode
from vcr.stubs import VCRHTTPSConnection
from vcr.cassette import Cassette
class TestVCRConnection:
def test_setting_of_attributes_get_propogated_to_real_connection(self):
vcr_connection = VCRHTTPSConnection("www.examplehost.com")
vcr_connection.ssl_version = "example_ssl_version"
assert vcr_connection.real_connection.ssl_version == "example_ssl_version"
@mock.patch("vcr.cassette.Cassette.can_play_response_for", return_value=False)
def testing_connect(*args):
vcr_connection = VCRHTTPSConnection("www.google.com")
vcr_connection.cassette = Cassette("test", record_mode=mode.ALL)
vcr_connection.real_connection.connect()
assert vcr_connection.real_connection.sock is not None
|
from datetime import timedelta
import pytest
from homeassistant.components.cover import (
ATTR_CURRENT_POSITION,
ATTR_CURRENT_TILT_POSITION,
ATTR_POSITION,
ATTR_TILT_POSITION,
DOMAIN,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_SUPPORTED_FEATURES,
SERVICE_CLOSE_COVER,
SERVICE_CLOSE_COVER_TILT,
SERVICE_OPEN_COVER,
SERVICE_OPEN_COVER_TILT,
SERVICE_SET_COVER_POSITION,
SERVICE_SET_COVER_TILT_POSITION,
SERVICE_STOP_COVER,
SERVICE_STOP_COVER_TILT,
SERVICE_TOGGLE,
SERVICE_TOGGLE_COVER_TILT,
STATE_CLOSED,
STATE_CLOSING,
STATE_OPEN,
STATE_OPENING,
)
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from tests.common import assert_setup_component, async_fire_time_changed
CONFIG = {"cover": {"platform": "demo"}}
ENTITY_COVER = "cover.living_room_window"
@pytest.fixture
async def setup_comp(hass):
"""Set up demo cover component."""
with assert_setup_component(1, DOMAIN):
await async_setup_component(hass, DOMAIN, CONFIG)
await hass.async_block_till_done()
async def test_supported_features(hass, setup_comp):
"""Test cover supported features."""
state = hass.states.get("cover.garage_door")
assert state.attributes[ATTR_SUPPORTED_FEATURES] == 3
state = hass.states.get("cover.kitchen_window")
assert state.attributes[ATTR_SUPPORTED_FEATURES] == 11
state = hass.states.get("cover.hall_window")
assert state.attributes[ATTR_SUPPORTED_FEATURES] == 15
state = hass.states.get("cover.living_room_window")
assert state.attributes[ATTR_SUPPORTED_FEATURES] == 255
async def test_close_cover(hass, setup_comp):
"""Test closing the cover."""
state = hass.states.get(ENTITY_COVER)
assert state.state == STATE_OPEN
assert state.attributes[ATTR_CURRENT_POSITION] == 70
await hass.services.async_call(
DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True
)
state = hass.states.get(ENTITY_COVER)
assert state.state == STATE_CLOSING
for _ in range(7):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(ENTITY_COVER)
assert state.state == STATE_CLOSED
assert state.attributes[ATTR_CURRENT_POSITION] == 0
async def test_open_cover(hass, setup_comp):
"""Test opening the cover."""
state = hass.states.get(ENTITY_COVER)
assert state.state == STATE_OPEN
assert state.attributes[ATTR_CURRENT_POSITION] == 70
await hass.services.async_call(
DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True
)
state = hass.states.get(ENTITY_COVER)
assert state.state == STATE_OPENING
for _ in range(7):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(ENTITY_COVER)
assert state.state == STATE_OPEN
assert state.attributes[ATTR_CURRENT_POSITION] == 100
async def test_toggle_cover(hass, setup_comp):
"""Test toggling the cover."""
# Start open
await hass.services.async_call(
DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True
)
for _ in range(7):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(ENTITY_COVER)
assert state.state == STATE_OPEN
assert state.attributes["current_position"] == 100
# Toggle closed
await hass.services.async_call(
DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True
)
for _ in range(10):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(ENTITY_COVER)
assert state.state == STATE_CLOSED
assert state.attributes[ATTR_CURRENT_POSITION] == 0
# Toggle open
await hass.services.async_call(
DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True
)
for _ in range(10):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(ENTITY_COVER)
assert state.state == STATE_OPEN
assert state.attributes[ATTR_CURRENT_POSITION] == 100
async def test_set_cover_position(hass, setup_comp):
"""Test moving the cover to a specific position."""
state = hass.states.get(ENTITY_COVER)
assert state.attributes[ATTR_CURRENT_POSITION] == 70
await hass.services.async_call(
DOMAIN,
SERVICE_SET_COVER_POSITION,
{ATTR_ENTITY_ID: ENTITY_COVER, ATTR_POSITION: 10},
blocking=True,
)
for _ in range(6):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(ENTITY_COVER)
assert state.attributes[ATTR_CURRENT_POSITION] == 10
async def test_stop_cover(hass, setup_comp):
"""Test stopping the cover."""
state = hass.states.get(ENTITY_COVER)
assert state.attributes[ATTR_CURRENT_POSITION] == 70
await hass.services.async_call(
DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True
)
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
await hass.services.async_call(
DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True
)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(ENTITY_COVER)
assert state.attributes[ATTR_CURRENT_POSITION] == 80
async def test_close_cover_tilt(hass, setup_comp):
"""Test closing the cover tilt."""
state = hass.states.get(ENTITY_COVER)
assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 50
await hass.services.async_call(
DOMAIN, SERVICE_CLOSE_COVER_TILT, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True
)
for _ in range(7):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(ENTITY_COVER)
assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 0
async def test_open_cover_tilt(hass, setup_comp):
"""Test opening the cover tilt."""
state = hass.states.get(ENTITY_COVER)
assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 50
await hass.services.async_call(
DOMAIN, SERVICE_OPEN_COVER_TILT, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True
)
for _ in range(7):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(ENTITY_COVER)
assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 100
async def test_toggle_cover_tilt(hass, setup_comp):
"""Test toggling the cover tilt."""
# Start open
await hass.services.async_call(
DOMAIN, SERVICE_OPEN_COVER_TILT, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True
)
for _ in range(7):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(ENTITY_COVER)
assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 100
# Toggle closed
await hass.services.async_call(
DOMAIN, SERVICE_TOGGLE_COVER_TILT, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True
)
for _ in range(10):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(ENTITY_COVER)
assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 0
# Toggle Open
await hass.services.async_call(
DOMAIN, SERVICE_TOGGLE_COVER_TILT, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True
)
for _ in range(10):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(ENTITY_COVER)
assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 100
async def test_set_cover_tilt_position(hass, setup_comp):
"""Test moving the cover til to a specific position."""
state = hass.states.get(ENTITY_COVER)
assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 50
await hass.services.async_call(
DOMAIN,
SERVICE_SET_COVER_TILT_POSITION,
{ATTR_ENTITY_ID: ENTITY_COVER, ATTR_TILT_POSITION: 90},
blocking=True,
)
for _ in range(7):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(ENTITY_COVER)
assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 90
async def test_stop_cover_tilt(hass, setup_comp):
"""Test stopping the cover tilt."""
state = hass.states.get(ENTITY_COVER)
assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 50
await hass.services.async_call(
DOMAIN, SERVICE_CLOSE_COVER_TILT, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True
)
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
await hass.services.async_call(
DOMAIN, SERVICE_STOP_COVER_TILT, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True
)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(ENTITY_COVER)
assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 40
|
from flexx import flx
class Relay(flx.Component):
""" Global object to relay messages to all participants.
"""
@flx.emitter
def create_message(self, name, message):
return dict(name=name, message=message)
@flx.emitter
def new_name(self):
return {}
# Create global relay
relay = Relay()
class MessageBox(flx.Label):
CSS = """
.flx-MessageBox {
overflow-y:scroll;
background: #e8e8e8;
border: 1px solid #444;
margin: 3px;
}
"""
def init(self):
super().init()
global window
self._se = window.document.createElement('div')
def sanitize(self, text):
self._se.textContent = text
text = self._se.innerHTML
self._se.textContent = ''
return text
@flx.action
def add_message(self, name, msg):
line = '<i>' + self.sanitize(name) + '</i>: ' + self.sanitize(msg)
self.set_html(self.html + line + '<br />')
class ChatRoom(flx.PyComponent):
""" This represents one connection to the chat room.
"""
def init(self):
with flx.HBox(title='Flexx chatroom demo'):
flx.Widget(flex=1)
with flx.VBox():
self.name_edit = flx.LineEdit(placeholder_text='your name')
self.people_label = flx.Label(flex=1, minsize=250)
with flx.VBox(minsize=450):
self.messages = MessageBox(flex=1)
with flx.HBox():
self.msg_edit = flx.LineEdit(flex=1,
placeholder_text='enter message')
self.ok = flx.Button(text='Send')
flx.Widget(flex=1)
self._update_participants()
@flx.reaction('ok.pointer_down', 'msg_edit.submit')
def _send_message(self, *events):
text = self.msg_edit.text
if text:
name = self.name_edit.text or 'anonymous'
relay.create_message(name, text)
self.msg_edit.set_text('')
@relay.reaction('create_message') # note that we connect to relay
def _push_info(self, *events):
for ev in events:
self.messages.add_message(ev.name, ev.message)
@flx.reaction('name_edit.user_done') # tell everyone we changed our name
def _push_name(self, *events):
relay.new_name()
@relay.reaction('new_name') # check for updated names
def _new_name(self, *events):
self._update_participants(self, [])
@flx.manager.reaction('connections_changed')
def _update_participants(self, *event):
if self.session.status:
# Query the app manager to see who's in the room
sessions = flx.manager.get_connections(self.session.app_name)
names = [s.app.name_edit.text for s in sessions]
del sessions
text = '<br />%i persons in this chat:<br /><br />' % len(names)
text += '<br />'.join([name or 'anonymous' for name in sorted(names)])
self.people_label.set_html(text)
if __name__ == '__main__':
a = flx.App(ChatRoom)
a.serve()
# m = a.launch('firefox') # for use during development
flx.start()
|
import logging
import requests
import voluptuous as vol
from homeassistant.components.device_tracker import (
DOMAIN,
PLATFORM_SCHEMA,
DeviceScanner,
)
from homeassistant.const import CONF_HOST, HTTP_OK
import homeassistant.helpers.config_validation as cv
DEFAULT_TIMEOUT = 10
_LOGGER = logging.getLogger(__name__)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({vol.Required(CONF_HOST): cv.string})
def get_scanner(hass, config):
"""Validate the configuration and return a Linksys AP scanner."""
try:
return LinksysSmartWifiDeviceScanner(config[DOMAIN])
except ConnectionError:
return None
class LinksysSmartWifiDeviceScanner(DeviceScanner):
"""This class queries a Linksys Access Point."""
def __init__(self, config):
"""Initialize the scanner."""
self.host = config[CONF_HOST]
self.last_results = {}
# Check if the access point is accessible
response = self._make_request()
if not response.status_code == HTTP_OK:
raise ConnectionError("Cannot connect to Linksys Access Point")
def scan_devices(self):
"""Scan for new devices and return a list with device IDs (MACs)."""
self._update_info()
return self.last_results.keys()
def get_device_name(self, device):
"""Return the name (if known) of the device."""
return self.last_results.get(device)
def _update_info(self):
"""Check for connected devices."""
_LOGGER.info("Checking Linksys Smart Wifi")
self.last_results = {}
response = self._make_request()
if response.status_code != HTTP_OK:
_LOGGER.error(
"Got HTTP status code %d when getting device list", response.status_code
)
return False
try:
data = response.json()
result = data["responses"][0]
devices = result["output"]["devices"]
for device in devices:
macs = device["knownMACAddresses"]
if not macs:
_LOGGER.warning("Skipping device without known MAC address")
continue
mac = macs[-1]
connections = device["connections"]
if not connections:
_LOGGER.debug("Device %s is not connected", mac)
continue
name = None
for prop in device["properties"]:
if prop["name"] == "userDeviceName":
name = prop["value"]
if not name:
name = device.get("friendlyName", device["deviceID"])
_LOGGER.debug("Device %s is connected", mac)
self.last_results[mac] = name
except (KeyError, IndexError):
_LOGGER.exception("Router returned unexpected response")
return False
return True
def _make_request(self):
# Weirdly enough, this doesn't seem to require authentication
data = [
{
"request": {"sinceRevision": 0},
"action": "http://linksys.com/jnap/devicelist/GetDevices",
}
]
headers = {"X-JNAP-Action": "http://linksys.com/jnap/core/Transaction"}
return requests.post(
f"http://{self.host}/JNAP/",
timeout=DEFAULT_TIMEOUT,
headers=headers,
json=data,
)
|
from rest_framework import serializers
from shop.models.product import ProductModel
class ProductSelectSerializer(serializers.ModelSerializer):
"""
A simple serializer to convert the product's name and code used for rendering the
`Select2 Widget`_'s content, while looking up for a certain product.
This serializer shall return a list of 2-tuples, whose 1st entry is the
primary key of the product and the second entry is the rendered name.
.. _Select2 Widget: https://github.com/applegrew/django-select2
"""
text = serializers.SerializerMethodField()
class Meta:
model = ProductModel
fields = ['id', 'text']
def get_text(self, instance):
return instance.product_name
|
import io
import json
import logging
from zipfile import ZipFile
from babelfish import Language
from guessit import guessit
from requests import Session
from six.moves import urllib
from . import Provider
from ..cache import EPISODE_EXPIRATION_TIME, region
from ..exceptions import ProviderError
from ..matches import guess_matches
from ..subtitle import Subtitle, fix_line_ending
from ..video import Episode
logger = logging.getLogger(__name__)
class ArgenteamSubtitle(Subtitle):
provider_name = 'argenteam'
def __init__(self, language, download_link, series, season, episode, release, version):
super(ArgenteamSubtitle, self).__init__(language, download_link)
self.download_link = download_link
self.series = series
self.season = season
self.episode = episode
self.release = release
self.version = version
@property
def id(self):
return self.download_link
@property
def info(self):
return urllib.parse.unquote(self.download_link.rsplit('/')[-1])
def get_matches(self, video):
matches = guess_matches(video, {
'title': self.series,
'season': self.season,
'episode': self.episode,
'release_group': self.version
})
# resolution
if video.resolution and self.version and video.resolution in self.version.lower():
matches.add('resolution')
matches |= guess_matches(video, guessit(self.version, {'type': 'episode'}), partial=True)
return matches
class ArgenteamProvider(Provider):
provider_name = 'argenteam'
language = Language.fromalpha2('es')
languages = {language}
video_types = (Episode,)
server_url = "http://argenteam.net/api/v1/"
subtitle_class = ArgenteamSubtitle
def __init__(self):
self.session = None
def initialize(self):
self.session = Session()
self.session.headers['User-Agent'] = self.user_agent
def terminate(self):
self.session.close()
@region.cache_on_arguments(expiration_time=EPISODE_EXPIRATION_TIME, should_cache_fn=lambda value: value)
def search_episode_id(self, series, season, episode):
"""Search the episode id from the `series`, `season` and `episode`.
:param str series: series of the episode.
:param int season: season of the episode.
:param int episode: episode number.
:return: the episode id, if any.
:rtype: int or None
"""
# make the search
query = '%s S%#02dE%#02d' % (series, season, episode)
logger.info('Searching episode id for %r', query)
r = self.session.get(self.server_url + 'search', params={'q': query}, timeout=10)
r.raise_for_status()
results = json.loads(r.text)
if results['total'] == 1:
return results['results'][0]['id']
logger.error('No episode id found for %r', series)
def query(self, series, season, episode):
episode_id = self.search_episode_id(series, season, episode)
if episode_id is None:
return []
response = self.session.get(self.server_url + 'episode', params={'id': episode_id}, timeout=10)
response.raise_for_status()
content = json.loads(response.text)
subtitles = []
for r in content['releases']:
for s in r['subtitles']:
subtitle = self.subtitle_class(self.language, s['uri'], series, season, episode, r['team'], r['tags'])
logger.debug('Found subtitle %r', subtitle)
subtitles.append(subtitle)
return subtitles
def list_subtitles(self, video, languages):
titles = [video.series] + video.alternative_series
for title in titles:
subs = self.query(title, video.season, video.episode)
if subs:
return subs
return []
def download_subtitle(self, subtitle):
# download as a zip
logger.info('Downloading subtitle %r', subtitle)
r = self.session.get(subtitle.download_link, timeout=10)
r.raise_for_status()
# open the zip
with ZipFile(io.BytesIO(r.content)) as zf:
if len(zf.namelist()) > 1:
raise ProviderError('More than one file to unzip')
subtitle.content = fix_line_ending(zf.read(zf.namelist()[0]))
|
from __future__ import print_function
import os
import sys
import argparse
import time
import fnmatch
from functools import partial
class FilePredicate(object):
def __init__(self):
self.funclist = []
def add_filter(self, func):
self.funclist.append(func)
def run(self, paths):
names = []
for pth in paths:
for root, dirs, files in os.walk(os.path.normpath(pth)):
for func in self.funclist:
root, dirs, files = func(root, dirs, files, pth)
if root is None:
break
if root is not None:
names.extend(os.path.join(root, f) for f in files)
names.extend(os.path.join(root, d + os.path.sep) for d in dirs)
return names
def filter_depth_and_type(mindepth, maxdepth, ftype, root, dirs, files, pth):
root_rel = os.path.relpath(root, pth)
if root_rel == '.':
level = 0
else:
level = len(root_rel.split(os.path.sep))
if level > maxdepth:
return None, None, None
elif not (mindepth <= level <= maxdepth):
files = []
if ftype == 'f':
if not dirs and not files:
return None, None, None
else:
if level == maxdepth:
dirs = []
else:
return root, dirs, files
elif ftype == 'd':
files = []
return root, dirs, files
def filter_name(pattern, root, dirs, files, pth):
files = fnmatch.filter(files, pattern)
dirs = fnmatch.filter(dirs, pattern)
if not files and not dirs and not fnmatch.fnmatch(root, pattern):
return None, None, None
return root, dirs, files
def filter_mtime(oldest_time, newest_time, root, dirs, files, pth):
fnames = []
for f in files:
st_mtime = os.stat(os.path.join(root, f)).st_mtime
if newest_time > st_mtime > oldest_time:
fnames.append(f)
dnames = []
for d in dirs:
st_mtime = os.stat(os.path.join(root, d)).st_mtime
if newest_time > st_mtime > oldest_time:
dnames.append(d)
if not fnames and not dnames:
st_mtime = os.stat(root).st_mtime
if not (newest_time > st_mtime > oldest_time):
return None, None, None
return root, dnames, fnames
def main(args):
ap = argparse.ArgumentParser()
ap.add_argument('paths', nargs='+', help='specify a file hierarchy for find to traverse')
ap.add_argument('-n', '-name', '--name', dest='pattern', nargs='?', default='*', help='pattern to match file names')
ap.add_argument(
'-t',
'-type',
'--type',
nargs='?',
default='f',
choices=('a',
'f',
'd'),
help='specify the file type to match'
)
ap.add_argument('-d', '-mtime', '--mtime', metavar='n', nargs='?', help='specify modification time range')
ap.add_argument(
'-mindepth',
'--mindepth',
metavar='n',
nargs='?',
default=0,
type=int,
help='descend at most n directory levels below command line arguments'
)
ap.add_argument(
'-maxdepth',
'--maxdepth',
metavar='n',
nargs='?',
default=sys.maxsize,
type=int,
help='descend at most n directory levels below command line arguments'
)
ns = ap.parse_args(args)
file_predicate = FilePredicate()
file_predicate.add_filter(partial(filter_depth_and_type, ns.mindepth, ns.maxdepth, ns.type))
file_predicate.add_filter(partial(filter_name, ns.pattern))
if ns.mtime:
oldest_time = 0
tnow = newest_time = time.time()
if ns.mtime.startswith('-'):
ndays = int(ns.mtime[1:])
oldest_time = tnow - ndays * 86400.0
elif ns.mtime.startswith('+'):
ndays = int(ns.mtime[1:])
newest_time = tnow - (ndays + 1) * 86400.0
else:
ndays = int(ns.mtime)
oldest_time = tnow - (ndays + 1) * 86400.0
newest_time = tnow - ndays * 86400.0
file_predicate.add_filter(partial(filter_mtime, oldest_time, newest_time))
names = file_predicate.run(ns.paths)
print('\n'.join(names))
if __name__ == "__main__":
main(sys.argv[1:])
|
from homeassistant.components.notify import ATTR_DATA, BaseNotificationService
from . import DOMAIN
async def async_get_service(hass, config, discovery_info=None):
"""Return the notify service."""
client = hass.data[DOMAIN]
return KebaNotificationService(client)
class KebaNotificationService(BaseNotificationService):
"""Notification service for KEBA EV Chargers."""
def __init__(self, client):
"""Initialize the service."""
self._client = client
async def async_send_message(self, message="", **kwargs):
"""Send the message."""
text = message.replace(" ", "$") # Will be translated back by the display
data = kwargs[ATTR_DATA] or {}
min_time = float(data.get("min_time", 2))
max_time = float(data.get("max_time", 10))
await self._client.set_text(text, min_time, max_time)
|
import logging
from pyaehw4a1.aehw4a1 import AehW4a1
import pyaehw4a1.exceptions
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
FAN_AUTO,
FAN_HIGH,
FAN_LOW,
FAN_MEDIUM,
HVAC_MODE_COOL,
HVAC_MODE_DRY,
HVAC_MODE_FAN_ONLY,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
PRESET_BOOST,
PRESET_ECO,
PRESET_NONE,
PRESET_SLEEP,
SUPPORT_FAN_MODE,
SUPPORT_PRESET_MODE,
SUPPORT_SWING_MODE,
SUPPORT_TARGET_TEMPERATURE,
SWING_BOTH,
SWING_HORIZONTAL,
SWING_OFF,
SWING_VERTICAL,
)
from homeassistant.const import (
ATTR_TEMPERATURE,
PRECISION_WHOLE,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
)
from . import CONF_IP_ADDRESS, DOMAIN
SUPPORT_FLAGS = (
SUPPORT_TARGET_TEMPERATURE
| SUPPORT_FAN_MODE
| SUPPORT_SWING_MODE
| SUPPORT_PRESET_MODE
)
MIN_TEMP_C = 16
MAX_TEMP_C = 32
MIN_TEMP_F = 61
MAX_TEMP_F = 90
HVAC_MODES = [
HVAC_MODE_OFF,
HVAC_MODE_HEAT,
HVAC_MODE_COOL,
HVAC_MODE_DRY,
HVAC_MODE_FAN_ONLY,
]
FAN_MODES = [
"mute",
FAN_LOW,
FAN_MEDIUM,
FAN_HIGH,
FAN_AUTO,
]
SWING_MODES = [
SWING_OFF,
SWING_VERTICAL,
SWING_HORIZONTAL,
SWING_BOTH,
]
PRESET_MODES = [
PRESET_NONE,
PRESET_ECO,
PRESET_BOOST,
PRESET_SLEEP,
"sleep_2",
"sleep_3",
"sleep_4",
]
AC_TO_HA_STATE = {
"0001": HVAC_MODE_HEAT,
"0010": HVAC_MODE_COOL,
"0011": HVAC_MODE_DRY,
"0000": HVAC_MODE_FAN_ONLY,
}
HA_STATE_TO_AC = {
HVAC_MODE_OFF: "off",
HVAC_MODE_HEAT: "mode_heat",
HVAC_MODE_COOL: "mode_cool",
HVAC_MODE_DRY: "mode_dry",
HVAC_MODE_FAN_ONLY: "mode_fan",
}
AC_TO_HA_FAN_MODES = {
"00000000": FAN_AUTO, # fan value for heat mode
"00000001": FAN_AUTO,
"00000010": "mute",
"00000100": FAN_LOW,
"00000110": FAN_MEDIUM,
"00001000": FAN_HIGH,
}
HA_FAN_MODES_TO_AC = {
"mute": "speed_mute",
FAN_LOW: "speed_low",
FAN_MEDIUM: "speed_med",
FAN_HIGH: "speed_max",
FAN_AUTO: "speed_auto",
}
AC_TO_HA_SWING = {
"00": SWING_OFF,
"10": SWING_VERTICAL,
"01": SWING_HORIZONTAL,
"11": SWING_BOTH,
}
_LOGGER = logging.getLogger(__name__)
def _build_entity(device):
_LOGGER.debug("Found device at %s", device)
return ClimateAehW4a1(device)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the AEH-W4A1 climate platform."""
# Priority 1: manual config
if hass.data[DOMAIN].get(CONF_IP_ADDRESS):
devices = hass.data[DOMAIN][CONF_IP_ADDRESS]
else:
# Priority 2: scanned interfaces
devices = await AehW4a1().discovery()
entities = [_build_entity(device) for device in devices]
async_add_entities(entities, True)
class ClimateAehW4a1(ClimateEntity):
"""Representation of a Hisense AEH-W4A1 module for climate device."""
def __init__(self, device):
"""Initialize the climate device."""
self._unique_id = device
self._device = AehW4a1(device)
self._hvac_modes = HVAC_MODES
self._fan_modes = FAN_MODES
self._swing_modes = SWING_MODES
self._preset_modes = PRESET_MODES
self._available = None
self._on = None
self._temperature_unit = None
self._current_temperature = None
self._target_temperature = None
self._hvac_mode = None
self._fan_mode = None
self._swing_mode = None
self._preset_mode = None
self._previous_state = None
async def async_update(self):
"""Pull state from AEH-W4A1."""
try:
status = await self._device.command("status_102_0")
except pyaehw4a1.exceptions.ConnectionError as library_error:
_LOGGER.warning(
"Unexpected error of %s: %s", self._unique_id, library_error
)
self._available = False
return
self._available = True
self._on = status["run_status"]
if status["temperature_Fahrenheit"] == "0":
self._temperature_unit = TEMP_CELSIUS
else:
self._temperature_unit = TEMP_FAHRENHEIT
self._current_temperature = int(status["indoor_temperature_status"], 2)
if self._on == "1":
device_mode = status["mode_status"]
self._hvac_mode = AC_TO_HA_STATE[device_mode]
fan_mode = status["wind_status"]
self._fan_mode = AC_TO_HA_FAN_MODES[fan_mode]
swing_mode = f'{status["up_down"]}{status["left_right"]}'
self._swing_mode = AC_TO_HA_SWING[swing_mode]
if self._hvac_mode in (HVAC_MODE_COOL, HVAC_MODE_HEAT):
self._target_temperature = int(status["indoor_temperature_setting"], 2)
else:
self._target_temperature = None
if status["efficient"] == "1":
self._preset_mode = PRESET_BOOST
elif status["low_electricity"] == "1":
self._preset_mode = PRESET_ECO
elif status["sleep_status"] == "0000001":
self._preset_mode = PRESET_SLEEP
elif status["sleep_status"] == "0000010":
self._preset_mode = "sleep_2"
elif status["sleep_status"] == "0000011":
self._preset_mode = "sleep_3"
elif status["sleep_status"] == "0000100":
self._preset_mode = "sleep_4"
else:
self._preset_mode = PRESET_NONE
else:
self._hvac_mode = HVAC_MODE_OFF
self._fan_mode = None
self._swing_mode = None
self._target_temperature = None
self._preset_mode = None
@property
def available(self):
"""Return True if entity is available."""
return self._available
@property
def name(self):
"""Return the name of the climate device."""
return self._unique_id
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return self._temperature_unit
@property
def current_temperature(self):
"""Return the current temperature."""
return self._current_temperature
@property
def target_temperature(self):
"""Return the temperature we are trying to reach."""
return self._target_temperature
@property
def hvac_mode(self):
"""Return hvac target hvac state."""
return self._hvac_mode
@property
def hvac_modes(self):
"""Return the list of available operation modes."""
return self._hvac_modes
@property
def fan_mode(self):
"""Return the fan setting."""
return self._fan_mode
@property
def fan_modes(self):
"""Return the list of available fan modes."""
return self._fan_modes
@property
def preset_mode(self):
"""Return the preset mode if on."""
return self._preset_mode
@property
def preset_modes(self):
"""Return the list of available preset modes."""
return self._preset_modes
@property
def swing_mode(self):
"""Return swing operation."""
return self._swing_mode
@property
def swing_modes(self):
"""Return the list of available fan modes."""
return self._swing_modes
@property
def min_temp(self):
"""Return the minimum temperature."""
if self._temperature_unit == TEMP_CELSIUS:
return MIN_TEMP_C
return MIN_TEMP_F
@property
def max_temp(self):
"""Return the maximum temperature."""
if self._temperature_unit == TEMP_CELSIUS:
return MAX_TEMP_C
return MAX_TEMP_F
@property
def precision(self):
"""Return the precision of the system."""
return PRECISION_WHOLE
@property
def target_temperature_step(self):
"""Return the supported step of target temperature."""
return 1
@property
def supported_features(self):
"""Return the list of supported features."""
return SUPPORT_FLAGS
async def async_set_temperature(self, **kwargs):
"""Set new target temperatures."""
if self._on != "1":
_LOGGER.warning(
"AC at %s is off, could not set temperature", self._unique_id
)
return
temp = kwargs.get(ATTR_TEMPERATURE)
if temp is not None:
_LOGGER.debug("Setting temp of %s to %s", self._unique_id, temp)
if self._preset_mode != PRESET_NONE:
await self.async_set_preset_mode(PRESET_NONE)
if self._temperature_unit == TEMP_CELSIUS:
await self._device.command(f"temp_{int(temp)}_C")
else:
await self._device.command(f"temp_{int(temp)}_F")
async def async_set_fan_mode(self, fan_mode):
"""Set new fan mode."""
if self._on != "1":
_LOGGER.warning("AC at %s is off, could not set fan mode", self._unique_id)
return
if self._hvac_mode in (HVAC_MODE_COOL, HVAC_MODE_FAN_ONLY) and (
self._hvac_mode != HVAC_MODE_FAN_ONLY or fan_mode != FAN_AUTO
):
_LOGGER.debug("Setting fan mode of %s to %s", self._unique_id, fan_mode)
await self._device.command(HA_FAN_MODES_TO_AC[fan_mode])
async def async_set_swing_mode(self, swing_mode):
"""Set new target swing operation."""
if self._on != "1":
_LOGGER.warning(
"AC at %s is off, could not set swing mode", self._unique_id
)
return
_LOGGER.debug("Setting swing mode of %s to %s", self._unique_id, swing_mode)
swing_act = self._swing_mode
if swing_mode == SWING_OFF and swing_act != SWING_OFF:
if swing_act in (SWING_HORIZONTAL, SWING_BOTH):
await self._device.command("hor_dir")
if swing_act in (SWING_VERTICAL, SWING_BOTH):
await self._device.command("vert_dir")
if swing_mode == SWING_BOTH and swing_act != SWING_BOTH:
if swing_act in (SWING_OFF, SWING_HORIZONTAL):
await self._device.command("vert_swing")
if swing_act in (SWING_OFF, SWING_VERTICAL):
await self._device.command("hor_swing")
if swing_mode == SWING_VERTICAL and swing_act != SWING_VERTICAL:
if swing_act in (SWING_OFF, SWING_HORIZONTAL):
await self._device.command("vert_swing")
if swing_act in (SWING_BOTH, SWING_HORIZONTAL):
await self._device.command("hor_dir")
if swing_mode == SWING_HORIZONTAL and swing_act != SWING_HORIZONTAL:
if swing_act in (SWING_BOTH, SWING_VERTICAL):
await self._device.command("vert_dir")
if swing_act in (SWING_OFF, SWING_VERTICAL):
await self._device.command("hor_swing")
async def async_set_preset_mode(self, preset_mode):
"""Set new preset mode."""
if self._on != "1":
if preset_mode == PRESET_NONE:
return
await self.async_turn_on()
_LOGGER.debug("Setting preset mode of %s to %s", self._unique_id, preset_mode)
if preset_mode == PRESET_ECO:
await self._device.command("energysave_on")
self._previous_state = preset_mode
elif preset_mode == PRESET_BOOST:
await self._device.command("turbo_on")
self._previous_state = preset_mode
elif preset_mode == PRESET_SLEEP:
await self._device.command("sleep_1")
self._previous_state = self._hvac_mode
elif preset_mode == "sleep_2":
await self._device.command("sleep_2")
self._previous_state = self._hvac_mode
elif preset_mode == "sleep_3":
await self._device.command("sleep_3")
self._previous_state = self._hvac_mode
elif preset_mode == "sleep_4":
await self._device.command("sleep_4")
self._previous_state = self._hvac_mode
elif self._previous_state is not None:
if self._previous_state == PRESET_ECO:
await self._device.command("energysave_off")
elif self._previous_state == PRESET_BOOST:
await self._device.command("turbo_off")
elif self._previous_state in HA_STATE_TO_AC:
await self._device.command(HA_STATE_TO_AC[self._previous_state])
self._previous_state = None
async def async_set_hvac_mode(self, hvac_mode):
"""Set new operation mode."""
_LOGGER.debug("Setting operation mode of %s to %s", self._unique_id, hvac_mode)
if hvac_mode == HVAC_MODE_OFF:
await self.async_turn_off()
else:
await self._device.command(HA_STATE_TO_AC[hvac_mode])
if self._on != "1":
await self.async_turn_on()
async def async_turn_on(self):
"""Turn on."""
_LOGGER.debug("Turning %s on", self._unique_id)
await self._device.command("on")
async def async_turn_off(self):
"""Turn off."""
_LOGGER.debug("Turning %s off", self._unique_id)
await self._device.command("off")
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
from absl.testing import absltest
class ClassA(absltest.TestCase):
"""Helper test case A for absltest_fail_fast_test."""
def testA(self):
sys.stderr.write('\nclass A test A\n')
def testB(self):
sys.stderr.write('\nclass A test B\n')
def testC(self):
sys.stderr.write('\nclass A test C\n')
self.fail('Force failure')
def testD(self):
sys.stderr.write('\nclass A test D\n')
def testE(self):
sys.stderr.write('\nclass A test E\n')
if __name__ == '__main__':
absltest.main()
|
import logging
from homeassistant.components.switch import SwitchEntity
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from . import (
CONF_DEVICE_CODE,
CONF_SWITCHABLE_OUTPUTS,
CONF_ZONE_NAME,
DATA_SATEL,
SIGNAL_OUTPUTS_UPDATED,
)
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ["satel_integra"]
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Satel Integra switch devices."""
if not discovery_info:
return
configured_zones = discovery_info[CONF_SWITCHABLE_OUTPUTS]
controller = hass.data[DATA_SATEL]
devices = []
for zone_num, device_config_data in configured_zones.items():
zone_name = device_config_data[CONF_ZONE_NAME]
device = SatelIntegraSwitch(
controller, zone_num, zone_name, discovery_info[CONF_DEVICE_CODE]
)
devices.append(device)
async_add_entities(devices)
class SatelIntegraSwitch(SwitchEntity):
"""Representation of an Satel switch."""
def __init__(self, controller, device_number, device_name, code):
"""Initialize the binary_sensor."""
self._device_number = device_number
self._name = device_name
self._state = False
self._code = code
self._satel = controller
async def async_added_to_hass(self):
"""Register callbacks."""
async_dispatcher_connect(
self.hass, SIGNAL_OUTPUTS_UPDATED, self._devices_updated
)
@callback
def _devices_updated(self, zones):
"""Update switch state, if needed."""
_LOGGER.debug("Update switch name: %s zones: %s", self._name, zones)
if self._device_number in zones:
new_state = self._read_state()
_LOGGER.debug("New state: %s", new_state)
if new_state != self._state:
self._state = new_state
self.async_write_ha_state()
async def async_turn_on(self, **kwargs):
"""Turn the device on."""
_LOGGER.debug("Switch: %s status: %s, turning on", self._name, self._state)
await self._satel.set_output(self._code, self._device_number, True)
self.async_write_ha_state()
async def async_turn_off(self, **kwargs):
"""Turn the device off."""
_LOGGER.debug(
"Switch name: %s status: %s, turning off", self._name, self._state
)
await self._satel.set_output(self._code, self._device_number, False)
self.async_write_ha_state()
@property
def is_on(self):
"""Return true if device is on."""
self._state = self._read_state()
return self._state
def _read_state(self):
"""Read state of the device."""
return self._device_number in self._satel.violated_outputs
@property
def name(self):
"""Return the name of the switch."""
return self._name
@property
def should_poll(self):
"""Don't poll."""
return False
|
import logging
from homeassistant.const import (
ATTR_BATTERY_LEVEL,
DEVICE_CLASS_BATTERY,
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_ILLUMINANCE,
DEVICE_CLASS_POWER,
DEVICE_CLASS_PRESSURE,
DEVICE_CLASS_TEMPERATURE,
LIGHT_LUX,
PERCENTAGE,
POWER_WATT,
PRESSURE_HPA,
TEMP_CELSIUS,
)
from . import XiaomiDevice
from .const import BATTERY_MODELS, DOMAIN, GATEWAYS_KEY, POWER_MODELS
_LOGGER = logging.getLogger(__name__)
SENSOR_TYPES = {
"temperature": [TEMP_CELSIUS, None, DEVICE_CLASS_TEMPERATURE],
"humidity": [PERCENTAGE, None, DEVICE_CLASS_HUMIDITY],
"illumination": ["lm", None, DEVICE_CLASS_ILLUMINANCE],
"lux": [LIGHT_LUX, None, DEVICE_CLASS_ILLUMINANCE],
"pressure": [PRESSURE_HPA, None, DEVICE_CLASS_PRESSURE],
"bed_activity": ["μm", None, None],
"load_power": [POWER_WATT, None, DEVICE_CLASS_POWER],
}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Perform the setup for Xiaomi devices."""
entities = []
gateway = hass.data[DOMAIN][GATEWAYS_KEY][config_entry.entry_id]
for device in gateway.devices["sensor"]:
if device["model"] == "sensor_ht":
entities.append(
XiaomiSensor(
device, "Temperature", "temperature", gateway, config_entry
)
)
entities.append(
XiaomiSensor(device, "Humidity", "humidity", gateway, config_entry)
)
elif device["model"] in ["weather", "weather.v1"]:
entities.append(
XiaomiSensor(
device, "Temperature", "temperature", gateway, config_entry
)
)
entities.append(
XiaomiSensor(device, "Humidity", "humidity", gateway, config_entry)
)
entities.append(
XiaomiSensor(device, "Pressure", "pressure", gateway, config_entry)
)
elif device["model"] == "sensor_motion.aq2":
entities.append(
XiaomiSensor(device, "Illumination", "lux", gateway, config_entry)
)
elif device["model"] in ["gateway", "gateway.v3", "acpartner.v3"]:
entities.append(
XiaomiSensor(
device, "Illumination", "illumination", gateway, config_entry
)
)
elif device["model"] in ["vibration"]:
entities.append(
XiaomiSensor(
device, "Bed Activity", "bed_activity", gateway, config_entry
)
)
entities.append(
XiaomiSensor(
device, "Tilt Angle", "final_tilt_angle", gateway, config_entry
)
)
entities.append(
XiaomiSensor(
device, "Coordination", "coordination", gateway, config_entry
)
)
else:
_LOGGER.warning("Unmapped Device Model")
# Set up battery sensors
seen_sids = set() # Set of device sids that are already seen
for devices in gateway.devices.values():
for device in devices:
if device["sid"] in seen_sids:
continue
seen_sids.add(device["sid"])
if device["model"] in BATTERY_MODELS:
entities.append(
XiaomiBatterySensor(device, "Battery", gateway, config_entry)
)
if device["model"] in POWER_MODELS:
entities.append(
XiaomiSensor(
device, "Load Power", "load_power", gateway, config_entry
)
)
async_add_entities(entities)
class XiaomiSensor(XiaomiDevice):
"""Representation of a XiaomiSensor."""
def __init__(self, device, name, data_key, xiaomi_hub, config_entry):
"""Initialize the XiaomiSensor."""
self._data_key = data_key
super().__init__(device, name, xiaomi_hub, config_entry)
@property
def icon(self):
"""Return the icon to use in the frontend."""
try:
return SENSOR_TYPES.get(self._data_key)[1]
except TypeError:
return None
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
try:
return SENSOR_TYPES.get(self._data_key)[0]
except TypeError:
return None
@property
def device_class(self):
"""Return the device class of this entity."""
return (
SENSOR_TYPES.get(self._data_key)[2]
if self._data_key in SENSOR_TYPES
else None
)
@property
def state(self):
"""Return the state of the sensor."""
return self._state
def parse_data(self, data, raw_data):
"""Parse data sent by gateway."""
value = data.get(self._data_key)
if value is None:
return False
if self._data_key in ["coordination", "status"]:
self._state = value
return True
value = float(value)
if self._data_key in ["temperature", "humidity", "pressure"]:
value /= 100
elif self._data_key in ["illumination"]:
value = max(value - 300, 0)
if self._data_key == "temperature" and (value < -50 or value > 60):
return False
if self._data_key == "humidity" and (value <= 0 or value > 100):
return False
if self._data_key == "pressure" and value == 0:
return False
if self._data_key in ["illumination", "lux"]:
self._state = round(value)
else:
self._state = round(value, 1)
return True
class XiaomiBatterySensor(XiaomiDevice):
"""Representation of a XiaomiSensor."""
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return PERCENTAGE
@property
def device_class(self):
"""Return the device class of this entity."""
return DEVICE_CLASS_BATTERY
@property
def state(self):
"""Return the state of the sensor."""
return self._state
def parse_data(self, data, raw_data):
"""Parse data sent by gateway."""
succeed = super().parse_voltage(data)
if not succeed:
return False
battery_level = int(self._device_state_attributes.pop(ATTR_BATTERY_LEVEL))
if battery_level <= 0 or battery_level > 100:
return False
self._state = battery_level
return True
def parse_voltage(self, data):
"""Parse battery level data sent by gateway."""
return False # Override parse_voltage to do nothing
|
import os
import base64
import numpy as np
import flexx
from flexx.util.icon import Icon
# colors:
# (70, 140, 210) - Python blue
# (240, 80, 80) - a strong red
def create_icon(N=16, COLOR=(240, 80, 80)):
im = np.zeros((N, N), np.bool)
row_index = [0, 1, 1, 1, 1, 0, 2, 2, 2, 2, 0, 3, 3, 3, 3, 0]
col_index1 = [0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
col_index2 = [0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0]
col_index3 = [0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0]
col_index = None, col_index1, col_index2, col_index3
# Create template image
for y in range(N):
for x in range(N):
row16 = int(y * 16 / N)
col16 = int(x * 16 / N)
inrow = row_index[row16]
if inrow:
incol = col_index[inrow][col16]
if incol:
im[y, x] = True
im = np.flipud(im) # images have y up
# Colorize
rgba = np.zeros((N, N, 4), np.uint8)
for y in range(N):
for x in range(N):
if im[y, x]:
rgba[y, x, :3] = COLOR
rgba[y, x, 3] = 255
elif im[max(0, y-1):y+2, max(0, x-1):x+2].any():
factor = im[max(0, y-1):y+2, max(0, x-1):x+2].sum()
rgba[y, x, :3] = COLOR
rgba[y, x, :3] //= 2
rgba[y, x, 3] = 64 * (0.66 if factor == 1 else 1)
# else:
# rgba[y, x, :3] = 0, 0, 0
# rgba[y, x, 3] = 128
return rgba
def create_icons():
icon = Icon()
for n in (16, 32, 48, 64, 128, 256):
icon.add(create_icon(n).tobytes())
icon.write(os.path.join(flexx.__path__[0], 'resources', 'flexx.ico'))
def create_silly_icon():
im = np.zeros((16, 16, 4), 'uint8')
im[3:-3, 3:-3] = 200
im[:, :, 3] = 255
icon = Icon()
icon.add(im.tobytes())
bb = icon._to_png(icon._ims[16])
print(base64.encodebytes(bb).decode())
if __name__ == '__main__':
rgba = create_icon(48)
import visvis as vv
vv.figure(1)
vv.clf()
vv.imshow(rgba)
create_icons()
|
import copy
import pytest
from molecule import config
from molecule import scenario
from molecule import scenarios
@pytest.fixture
def _instance(config_instance):
config_instance_1 = copy.deepcopy(config_instance)
config_instance_2 = copy.deepcopy(config_instance)
config_instance_2.config['scenario']['name'] = 'foo'
return scenarios.Scenarios([config_instance_1, config_instance_2])
def test_configs_private_member(_instance):
assert 2 == len(_instance._configs)
assert isinstance(_instance._configs[0], config.Config)
assert isinstance(_instance._configs[1], config.Config)
def test_scenario_name_private_member(_instance):
assert _instance._scenario_name is None
def test_scenarios_private_member(_instance):
assert 2 == len(_instance._scenarios)
assert isinstance(_instance._scenarios[0], scenario.Scenario)
assert isinstance(_instance._scenarios[1], scenario.Scenario)
def test_scenarios_iterator(_instance):
s = [scenario for scenario in _instance]
assert 'default' == s[0].name
assert 'foo' == s[1].name
def test_all_property(_instance):
result = _instance.all
assert 2 == len(result)
assert 'default' == result[0].name
assert 'foo' == result[1].name
def test_all_filters_on_scenario_name_property(_instance):
_instance._scenario_name = 'default'
assert 1 == len(_instance.all)
def test_print_matrix(mocker, patched_logger_info, patched_logger_out,
_instance):
_instance.print_matrix()
msg = 'Test matrix'
patched_logger_info(msg)
matrix_out = u"""
├── default
│ ├── lint
│ ├── dependency
│ ├── cleanup
│ ├── destroy
│ ├── syntax
│ ├── create
│ ├── prepare
│ ├── converge
│ ├── idempotence
│ ├── side_effect
│ ├── verify
│ ├── cleanup
│ └── destroy
└── foo
├── lint
├── dependency
├── cleanup
├── destroy
├── syntax
├── create
├── prepare
├── converge
├── idempotence
├── side_effect
├── verify
├── cleanup
└── destroy
"""
assert matrix_out == patched_logger_out.mock_calls[0][1][0]
assert mocker.call('') == patched_logger_out.mock_calls[1]
def test_verify_does_not_raise_when_found(_instance):
_instance._scenario_name = 'default'
assert _instance._verify() is None
def test_verify_raises_when_scenario_not_found(_instance,
patched_logger_critical):
_instance._scenario_name = 'invalid'
with pytest.raises(SystemExit) as e:
_instance._verify()
assert 1 == e.value.code
msg = "Scenario 'invalid' not found. Exiting."
patched_logger_critical.assert_called_once_with(msg)
def test_filter_for_scenario(_instance):
_instance._scenario_name = 'default'
result = _instance._filter_for_scenario()
assert 1 == len(result)
assert 'default' == result[0].name
_instance._scenario_name = 'invalid'
result = _instance._filter_for_scenario()
assert [] == result
def test_get_matrix(_instance):
matrix = {
'default': {
'lint': ['lint'],
'idempotence': ['idempotence'],
'syntax': ['syntax'],
'converge': [
'dependency',
'create',
'prepare',
'converge',
],
'cleanup': ['cleanup'],
'check': [
'dependency',
'cleanup',
'destroy',
'create',
'prepare',
'converge',
'check',
'cleanup',
'destroy',
],
'verify': ['verify'],
'create': [
'dependency',
'create',
'prepare',
],
'prepare': ['prepare'],
'side_effect': ['side_effect'],
'dependency': ['dependency'],
'test': [
'lint',
'dependency',
'cleanup',
'destroy',
'syntax',
'create',
'prepare',
'converge',
'idempotence',
'side_effect',
'verify',
'cleanup',
'destroy',
],
'destroy': ['dependency', 'cleanup', 'destroy']
},
'foo': {
'lint': ['lint'],
'idempotence': ['idempotence'],
'syntax': ['syntax'],
'converge': [
'dependency',
'create',
'prepare',
'converge',
],
'check': [
'dependency',
'cleanup',
'destroy',
'create',
'prepare',
'converge',
'check',
'cleanup',
'destroy',
],
'cleanup': ['cleanup'],
'create': [
'dependency',
'create',
'prepare',
],
'verify': ['verify'],
'prepare': ['prepare'],
'side_effect': ['side_effect'],
'dependency': ['dependency'],
'test': [
'lint',
'dependency',
'cleanup',
'destroy',
'syntax',
'create',
'prepare',
'converge',
'idempotence',
'side_effect',
'verify',
'cleanup',
'destroy',
],
'destroy': ['dependency', 'cleanup', 'destroy']
}
}
assert matrix == _instance._get_matrix()
|
import errno
import os
import shutil
import socket
import ssl
import subprocess
import sys
import tempfile
import threading
import time
from configparser import RawConfigParser
from urllib import request
from urllib.error import HTTPError, URLError
import pytest
from radicale import config, server
from radicale.tests import BaseTest
from radicale.tests.helpers import configuration_to_dict, get_file_path
class DisabledRedirectHandler(request.HTTPRedirectHandler):
def http_error_302(self, req, fp, code, msg, headers):
raise HTTPError(req.full_url, code, msg, headers, fp)
http_error_301 = http_error_303 = http_error_307 = http_error_302
class TestBaseServerRequests(BaseTest):
"""Test the internal server."""
def setup(self):
self.configuration = config.load()
self.colpath = tempfile.mkdtemp()
self.shutdown_socket, shutdown_socket_out = socket.socketpair()
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
# Find available port
sock.bind(("127.0.0.1", 0))
self.sockname = sock.getsockname()
self.configuration.update({
"storage": {"filesystem_folder": self.colpath,
# Disable syncing to disk for better performance
"_filesystem_fsync": "False"},
"server": {"hosts": "[%s]:%d" % self.sockname},
# Enable debugging for new processes
"logging": {"level": "debug"}},
"test", privileged=True)
self.thread = threading.Thread(target=server.serve, args=(
self.configuration, shutdown_socket_out))
ssl_context = ssl.create_default_context()
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
self.opener = request.build_opener(
request.HTTPSHandler(context=ssl_context),
DisabledRedirectHandler)
def teardown(self):
self.shutdown_socket.close()
try:
self.thread.join()
except RuntimeError: # Thread never started
pass
shutil.rmtree(self.colpath)
def request(self, method, path, data=None, is_alive_fn=None, **headers):
"""Send a request."""
if is_alive_fn is None:
is_alive_fn = self.thread.is_alive
scheme = ("https" if self.configuration.get("server", "ssl") else
"http")
req = request.Request(
"%s://[%s]:%d%s" % (scheme, *self.sockname, path),
data=data, headers=headers, method=method)
while True:
assert is_alive_fn()
try:
with self.opener.open(req) as f:
return f.getcode(), f.info(), f.read().decode()
except HTTPError as e:
return e.code, e.headers, e.read().decode()
except URLError as e:
if not isinstance(e.reason, ConnectionRefusedError):
raise
time.sleep(0.1)
def test_root(self):
self.thread.start()
self.get("/", check=302)
def test_ssl(self):
self.configuration.update({
"server": {"ssl": "True",
"certificate": get_file_path("cert.pem"),
"key": get_file_path("key.pem")}}, "test")
self.thread.start()
self.get("/", check=302)
def test_bind_fail(self):
for address_family, address in [(socket.AF_INET, "::1"),
(socket.AF_INET6, "127.0.0.1")]:
with socket.socket(address_family, socket.SOCK_STREAM) as sock:
if address_family == socket.AF_INET6:
# Only allow IPv6 connections to the IPv6 socket
sock.setsockopt(server.COMPAT_IPPROTO_IPV6,
socket.IPV6_V6ONLY, 1)
with pytest.raises(OSError) as exc_info:
sock.bind((address, 0))
# See ``radicale.server.serve``
assert (isinstance(exc_info.value, socket.gaierror) and
exc_info.value.errno in (
socket.EAI_NONAME, server.COMPAT_EAI_ADDRFAMILY,
server.COMPAT_EAI_NODATA) or
str(exc_info.value) == "address family mismatched" or
exc_info.value.errno in (
errno.EADDRNOTAVAIL, errno.EAFNOSUPPORT,
errno.EPROTONOSUPPORT))
def test_ipv6(self):
try:
with socket.socket(socket.AF_INET6, socket.SOCK_STREAM) as sock:
# Only allow IPv6 connections to the IPv6 socket
sock.setsockopt(
server.COMPAT_IPPROTO_IPV6, socket.IPV6_V6ONLY, 1)
# Find available port
sock.bind(("::1", 0))
self.sockname = sock.getsockname()[:2]
except OSError as e:
if e.errno in (errno.EADDRNOTAVAIL, errno.EAFNOSUPPORT,
errno.EPROTONOSUPPORT):
pytest.skip("IPv6 not supported")
raise
self.configuration.update({
"server": {"hosts": "[%s]:%d" % self.sockname}}, "test")
self.thread.start()
self.get("/", check=302)
def test_command_line_interface(self):
config_args = []
for section, values in config.DEFAULT_CONFIG_SCHEMA.items():
if section.startswith("_"):
continue
for option, data in values.items():
if option.startswith("_"):
continue
long_name = "--%s-%s" % (section, option.replace("_", "-"))
if data["type"] == bool:
if not self.configuration.get(section, option):
long_name = "--no%s" % long_name[1:]
config_args.append(long_name)
else:
config_args.append(long_name)
config_args.append(
self.configuration.get_raw(section, option))
p = subprocess.Popen(
[sys.executable, "-m", "radicale"] + config_args,
env={**os.environ, "PYTHONPATH": os.pathsep.join(sys.path)})
try:
self.get("/", is_alive_fn=lambda: p.poll() is None, check=302)
finally:
p.terminate()
p.wait()
if os.name == "posix":
assert p.returncode == 0
def test_wsgi_server(self):
config_path = os.path.join(self.colpath, "config")
parser = RawConfigParser()
parser.read_dict(configuration_to_dict(self.configuration))
with open(config_path, "w") as f:
parser.write(f)
env = os.environ.copy()
env["PYTHONPATH"] = os.pathsep.join(sys.path)
env["RADICALE_CONFIG"] = config_path
p = subprocess.Popen([
sys.executable, "-m", "waitress",
"--listen", self.configuration.get_raw("server", "hosts"),
"radicale:application"], env=env)
try:
self.get("/", is_alive_fn=lambda: p.poll() is None, check=302)
finally:
p.terminate()
p.wait()
|
from __future__ import absolute_import
import unittest, gc
from .common_imports import etree, HelperTestCase, _bytes, BytesIO
xml_str = _bytes('''\
<root xmlns="myNS" xmlns:other="otherNS">
<c1 a1="A1" a2="A2" other:a3="A3">
<c2 a1="C2">0</c2>
<c2>1</c2>
<other:c2>2</other:c2>
</c1>
</root>''')
class ProxyTestCase(HelperTestCase):
"""Basic tests for element proxy behaviour.
"""
etree = etree
def test_proxy_reuse(self):
root = etree.XML('<a><b><c/></b></a>')
b = root.find('b')
self.assertTrue(b is root[0])
def test_proxy_reuse_after_gc(self):
root = etree.XML('<a><b><c/></b></a>')
b = root.find('b')
self.assertTrue(self.etree.iselement(b))
gc.collect()
self.assertTrue(b is root[0])
def test_proxy_reuse_after_del_root(self):
root = etree.XML('<a><b><c/></b></a>')
b = root.find('b')
self.assertTrue(self.etree.iselement(b))
c = b.find('c')
self.assertTrue(self.etree.iselement(c))
del root
gc.collect()
self.assertTrue(b[0] is c)
def test_proxy_hashing(self):
root = etree.XML('<a><b><c/></b></a>')
old_elements = set(root.iter())
elements = root.iter()
del root
gc.collect()
missing = len(old_elements)
self.assertEqual(3, missing)
for new in elements:
for old in old_elements:
if old == new:
self.assertTrue(old is new)
missing -= 1
break
else:
self.assertTrue(False, "element '%s' is missing" % new.tag)
self.assertEqual(0, missing)
def test_element_base(self):
el = self.etree.ElementBase()
self.assertEqual('ElementBase', el.tag)
root = self.etree.ElementBase()
root.append(el)
self.assertEqual('ElementBase', root[0].tag)
def test_element_base_children(self):
el = self.etree.ElementBase(etree.ElementBase())
self.assertEqual('ElementBase', el.tag)
self.assertEqual(1, len(el))
self.assertEqual('ElementBase', el[0].tag)
root = self.etree.ElementBase()
root.append(el)
self.assertEqual('ElementBase', root[0].tag)
self.assertEqual('ElementBase', root[0][0].tag)
def test_comment_base(self):
el = self.etree.CommentBase('some text')
self.assertEqual(self.etree.Comment, el.tag)
self.assertEqual('some text', el.text)
root = self.etree.Element('root')
root.append(el)
self.assertEqual('some text', root[0].text)
def test_pi_base(self):
el = self.etree.PIBase('the target', 'some text')
self.assertEqual(self.etree.ProcessingInstruction, el.tag)
self.assertEqual('some text', el.text)
root = self.etree.Element('root')
root.append(el)
self.assertEqual('some text', root[0].text)
class ClassLookupTestCase(HelperTestCase):
"""Test cases for different Element class lookup mechanisms.
"""
etree = etree
def tearDown(self):
etree.set_element_class_lookup()
super(ClassLookupTestCase, self).tearDown()
def test_namespace_lookup(self):
class TestElement(etree.ElementBase):
FIND_ME = "namespace class"
lookup = etree.ElementNamespaceClassLookup()
etree.set_element_class_lookup(lookup)
ns = lookup.get_namespace("myNS")
ns[None] = TestElement
root = etree.XML(xml_str)
self.assertEqual(root.FIND_ME,
TestElement.FIND_ME)
self.assertEqual(root[0].FIND_ME,
TestElement.FIND_ME)
self.assertFalse(hasattr(root[0][-1], 'FIND_ME'))
def test_default_class_lookup(self):
class TestElement(etree.ElementBase):
FIND_ME = "default element"
class TestComment(etree.CommentBase):
FIND_ME = "default comment"
class TestPI(etree.PIBase):
FIND_ME = "default pi"
parser = etree.XMLParser()
lookup = etree.ElementDefaultClassLookup(
element=TestElement, comment=TestComment, pi=TestPI)
parser.set_element_class_lookup(lookup)
root = etree.XML(_bytes("""<?xml version='1.0'?>
<root>
<?myPI?>
<!-- hi -->
</root>
"""), parser)
self.assertEqual("default element", root.FIND_ME)
self.assertEqual("default pi", root[0].FIND_ME)
self.assertEqual("default comment", root[1].FIND_ME)
def test_default_class_lookup_pull_parser(self):
class TestElement(etree.ElementBase):
FIND_ME = "default element"
class TestComment(etree.CommentBase):
FIND_ME = "default comment"
class TestPI(etree.PIBase):
FIND_ME = "default pi"
parser = etree.XMLPullParser(events=('start', 'end', 'comment', 'pi'))
lookup = etree.ElementDefaultClassLookup(
element=TestElement, comment=TestComment, pi=TestPI)
parser.set_element_class_lookup(lookup)
events_seen = []
def add_events(events):
for ev, el in events:
events_seen.append((ev, el.FIND_ME))
parser.feed("""<?xml version='1.0'?>
<root>
<?myPI?>
""")
add_events(parser.read_events())
parser.feed("<!-- hi -->")
add_events(parser.read_events())
parser.feed("</root>")
root = parser.close()
add_events(parser.read_events())
self.assertEqual([
('start', "default element"),
('pi', "default pi"),
('comment', "default comment"),
('end', "default element"),
], events_seen)
self.assertEqual("default element", root.FIND_ME)
self.assertEqual("default pi", root[0].FIND_ME)
self.assertEqual("default comment", root[1].FIND_ME)
def test_evil_class_lookup(self):
class MyLookup(etree.CustomElementClassLookup):
def lookup(self, t, d, ns, name):
if name == 'none':
return None
elif name == 'obj':
return object()
else:
return etree.ElementBase
parser = etree.XMLParser()
parser.set_element_class_lookup(MyLookup())
root = etree.XML(_bytes('<none/>'), parser)
self.assertEqual('none', root.tag)
self.assertRaises(
TypeError,
etree.XML, _bytes("<obj />"), parser)
root = etree.XML(_bytes('<root/>'), parser)
self.assertEqual('root', root.tag)
def test_class_lookup_type_mismatch(self):
class MyLookup(etree.CustomElementClassLookup):
def lookup(self, t, d, ns, name):
if t == 'element':
if name == 'root':
return etree.ElementBase
return etree.CommentBase
elif t == 'comment':
return etree.PIBase
elif t == 'PI':
return etree.EntityBase
elif t == 'entity':
return etree.ElementBase
else:
raise ValueError('got type %s' % t)
parser = etree.XMLParser(resolve_entities=False)
parser.set_element_class_lookup(MyLookup())
root = etree.XML(_bytes('<root></root>'), parser)
self.assertEqual('root', root.tag)
self.assertEqual(etree.ElementBase, type(root))
root = etree.XML(_bytes("<root><test/></root>"), parser)
self.assertRaises(TypeError, root.__getitem__, 0)
root = etree.XML(_bytes("<root><!-- test --></root>"), parser)
self.assertRaises(TypeError, root.__getitem__, 0)
root = etree.XML(_bytes("<root><?test?></root>"), parser)
self.assertRaises(TypeError, root.__getitem__, 0)
root = etree.XML(
_bytes('<!DOCTYPE root [<!ENTITY myent "ent">]>'
'<root>&myent;</root>'),
parser)
self.assertRaises(TypeError, root.__getitem__, 0)
root = etree.XML(_bytes('<root><root/></root>'), parser)
self.assertEqual('root', root[0].tag)
def test_attribute_based_lookup(self):
class TestElement(etree.ElementBase):
FIND_ME = "attribute_based"
class_dict = {"A1" : TestElement}
lookup = etree.AttributeBasedElementClassLookup(
"a1", class_dict)
etree.set_element_class_lookup(lookup)
root = etree.XML(xml_str)
self.assertFalse(hasattr(root, 'FIND_ME'))
self.assertEqual(root[0].FIND_ME,
TestElement.FIND_ME)
self.assertFalse(hasattr(root[0][0], 'FIND_ME'))
def test_custom_lookup(self):
class TestElement(etree.ElementBase):
FIND_ME = "custom"
class MyLookup(etree.CustomElementClassLookup):
def lookup(self, t, d, ns, name):
if name == 'c1':
return TestElement
etree.set_element_class_lookup( MyLookup() )
root = etree.XML(xml_str)
self.assertFalse(hasattr(root, 'FIND_ME'))
self.assertEqual(root[0].FIND_ME,
TestElement.FIND_ME)
self.assertFalse(hasattr(root[0][1], 'FIND_ME'))
def test_custom_lookup_ns_fallback(self):
class TestElement1(etree.ElementBase):
FIND_ME = "custom"
class TestElement2(etree.ElementBase):
FIND_ME = "nsclasses"
class MyLookup(etree.CustomElementClassLookup):
def lookup(self, t, d, ns, name):
if name == 'c1':
return TestElement1
lookup = etree.ElementNamespaceClassLookup( MyLookup() )
etree.set_element_class_lookup(lookup)
ns = lookup.get_namespace("otherNS")
ns[None] = TestElement2
root = etree.XML(xml_str)
self.assertFalse(hasattr(root, 'FIND_ME'))
self.assertEqual(root[0].FIND_ME,
TestElement1.FIND_ME)
self.assertFalse(hasattr(root[0][1], 'FIND_ME'))
self.assertEqual(root[0][-1].FIND_ME,
TestElement2.FIND_ME)
def test_parser_based_lookup(self):
class TestElement(etree.ElementBase):
FIND_ME = "parser_based"
lookup = etree.ParserBasedElementClassLookup()
etree.set_element_class_lookup(lookup)
class MyLookup(etree.CustomElementClassLookup):
def lookup(self, t, d, ns, name):
return TestElement
parser = etree.XMLParser()
parser.set_element_class_lookup( MyLookup() )
root = etree.parse(BytesIO(xml_str), parser).getroot()
self.assertEqual(root.FIND_ME,
TestElement.FIND_ME)
self.assertEqual(root[0].FIND_ME,
TestElement.FIND_ME)
root = etree.parse(BytesIO(xml_str)).getroot()
self.assertFalse(hasattr(root, 'FIND_ME'))
self.assertFalse(hasattr(root[0], 'FIND_ME'))
def test_class_lookup_reentry(self):
XML = self.etree.XML
class TestElement(etree.ElementBase):
FIND_ME = "here"
root = None
class MyLookup(etree.CustomElementClassLookup):
el = None
def lookup(self, t, d, ns, name):
if root is not None: # not in the parser
if self.el is None and name == "a":
self.el = []
self.el.append(root.find(name))
return TestElement
parser = self.etree.XMLParser()
parser.set_element_class_lookup(MyLookup())
root = XML(_bytes('<root><a>A</a><b xmlns="test">B</b></root>'),
parser)
a = root[0]
self.assertEqual(a.tag, "a")
self.assertEqual(root[0].tag, "a")
del a
self.assertEqual(root[0].tag, "a")
def test_lookup_without_fallback(self):
class Lookup(etree.CustomElementClassLookup):
def __init__(self):
# no super call here, so no fallback is set
pass
def lookup(self, node_type, document, namespace, name):
return Foo
class Foo(etree.ElementBase):
def custom(self):
return "test"
parser = self.etree.XMLParser()
parser.set_element_class_lookup( Lookup() )
root = etree.XML('<foo/>', parser)
self.assertEqual("test", root.custom())
def test_suite():
suite = unittest.TestSuite()
suite.addTests([unittest.makeSuite(ProxyTestCase)])
suite.addTests([unittest.makeSuite(ClassLookupTestCase)])
return suite
if __name__ == '__main__':
print('to test use test.py %s' % __file__)
|
from marshmallow import fields
from lemur.common.fields import ArrowDateTime
from lemur.common.schema import LemurInputSchema, LemurOutputSchema
class DnsProvidersNestedOutputSchema(LemurOutputSchema):
__envelope__ = False
id = fields.Integer()
name = fields.String()
provider_type = fields.String()
description = fields.String()
credentials = fields.String()
api_endpoint = fields.String()
date_created = ArrowDateTime()
class DnsProvidersNestedInputSchema(LemurInputSchema):
__envelope__ = False
name = fields.String()
description = fields.String()
provider_type = fields.Dict()
dns_provider_output_schema = DnsProvidersNestedOutputSchema()
dns_provider_input_schema = DnsProvidersNestedInputSchema()
|
import os
import pandas as pd
from qstrader.asset.equity import Equity
from qstrader.broker.simulated_broker import SimulatedBroker
from qstrader.broker.fee_model.zero_fee_model import ZeroFeeModel
from qstrader.data.backtest_data_handler import BacktestDataHandler
from qstrader.data.daily_bar_csv import CSVDailyBarDataSource
from qstrader.exchange.simulated_exchange import SimulatedExchange
from qstrader.simulation.daily_bday import DailyBusinessDaySimulationEngine
from qstrader.system.qts import QuantTradingSystem
from qstrader.system.rebalance.buy_and_hold import BuyAndHoldRebalance
from qstrader.system.rebalance.daily import DailyRebalance
from qstrader.system.rebalance.end_of_month import EndOfMonthRebalance
from qstrader.system.rebalance.weekly import WeeklyRebalance
from qstrader.trading.trading_session import TradingSession
from qstrader import settings
DEFAULT_ACCOUNT_NAME = 'Backtest Simulated Broker Account'
DEFAULT_PORTFOLIO_ID = '000001'
DEFAULT_PORTFOLIO_NAME = 'Backtest Simulated Broker Portfolio'
class BacktestTradingSession(TradingSession):
"""
Encaspulates a full trading simulation backtest with externally
provided instances for each module.
Utilises sensible defaults to allow straightforward backtesting of
less complex trading strategies.
Parameters
----------
start_dt : `pd.Timestamp`
The starting datetime (UTC) of the backtest.
end_dt : `pd.Timestamp`
The ending datetime (UTC) of the backtest.
universe : `Universe`
The Asset Universe to utilise for the backtest.
alpha_model : `AlphaModel`
The signal/forecast alpha model for the quant trading strategy.
risk_model : `RiskModel`
The optional risk model for the quant trading strategy.
signals : `SignalsCollection`, optional
An optional collection of signals used in the trading models.
initial_cash : `float`, optional
The initial account equity (defaults to $1MM)
rebalance : `str`, optional
The rebalance frequency of the backtest, defaulting to 'weekly'.
account_name : `str`, optional
The name of the simulated broker account.
portfolio_id : `str`, optional
The ID of the portfolio being used for the backtest.
portfolio_name : `str`, optional
The name of the portfolio being used for the backtest.
long_only : `Boolean`, optional
Whether to invoke the long only order sizer or allow
long/short leveraged portfolios. Defaults to long/short leveraged.
fee_model : `FeeModel` class instance, optional
The optional FeeModel derived subclass to use for transaction cost estimates.
burn_in_dt : `pd.Timestamp`, optional
The optional date provided to begin tracking strategy statistics,
which is used for strategies requiring a period of data 'burn in'
"""
def __init__(
self,
start_dt,
end_dt,
universe,
alpha_model,
risk_model=None,
signals=None,
initial_cash=1e6,
rebalance='weekly',
account_name=DEFAULT_ACCOUNT_NAME,
portfolio_id=DEFAULT_PORTFOLIO_ID,
portfolio_name=DEFAULT_PORTFOLIO_NAME,
long_only=False,
fee_model=ZeroFeeModel(),
burn_in_dt=None,
data_handler=None,
**kwargs
):
self.start_dt = start_dt
self.end_dt = end_dt
self.universe = universe
self.alpha_model = alpha_model
self.risk_model = risk_model
self.signals = signals
self.initial_cash = initial_cash
self.rebalance = rebalance
self.account_name = account_name
self.portfolio_id = portfolio_id
self.portfolio_name = portfolio_name
self.long_only = long_only
self.fee_model = fee_model
self.burn_in_dt = burn_in_dt
self.exchange = self._create_exchange()
self.data_handler = self._create_data_handler(data_handler)
self.broker = self._create_broker()
self.sim_engine = self._create_simulation_engine()
if rebalance == 'weekly':
if 'rebalance_weekday' in kwargs:
self.rebalance_weekday = kwargs['rebalance_weekday']
else:
raise ValueError(
"Rebalance frequency was set to 'weekly' but no specific "
"weekday was provided. Try adding the 'rebalance_weekday' "
"keyword argument to the instantiation of "
"BacktestTradingSession, e.g. with 'WED'."
)
self.rebalance_schedule = self._create_rebalance_event_times()
self.qts = self._create_quant_trading_system(**kwargs)
self.equity_curve = []
self.target_allocations = []
def _is_rebalance_event(self, dt):
"""
Checks if the provided timestamp is part of the rebalance
schedule of the backtest.
Parameters
----------
dt : `pd.Timestamp`
The timestamp to check the rebalance schedule for.
Returns
-------
`Boolean`
Whether the timestamp is part of the rebalance schedule.
"""
return dt in self.rebalance_schedule
def _create_exchange(self):
"""
Generates a simulated exchange instance used for
market hours and holiday calendar checks.
Returns
-------
`SimulatedExchanage`
The simulated exchange instance.
"""
return SimulatedExchange(self.start_dt)
def _create_data_handler(self, data_handler):
"""
Creates a DataHandler instance to load the asset pricing data
used within the backtest.
TODO: Currently defaults to CSV data sources of daily bar data in
the YahooFinance format.
Parameters
----------
`BacktestDataHandler` or None
The (potential) backtesting data handler instance.
Returns
-------
`BacktestDataHandler`
The backtesting data handler instance.
"""
if data_handler is not None:
return data_handler
try:
os.environ['QSTRADER_CSV_DATA_DIR']
except KeyError:
if settings.PRINT_EVENTS:
print(
"The QSTRADER_CSV_DATA_DIR environment variable has not been set. "
"This means that QSTrader will fall back to finding data within the "
"current directory where the backtest has been executed. However "
"it is strongly recommended that a QSTRADER_CSV_DATA_DIR environment "
"variable is set for future backtests."
)
csv_dir = '.'
else:
csv_dir = os.environ.get('QSTRADER_CSV_DATA_DIR')
# TODO: Only equities are supported by QSTrader for now.
data_source = CSVDailyBarDataSource(csv_dir, Equity)
data_handler = BacktestDataHandler(
self.universe, data_sources=[data_source]
)
return data_handler
def _create_broker(self):
"""
Create the SimulatedBroker with an appropriate default
portfolio identifiers.
Returns
-------
`SimulatedBroker`
The simulated broker instance.
"""
broker = SimulatedBroker(
self.start_dt,
self.exchange,
self.data_handler,
account_id=self.account_name,
initial_funds=self.initial_cash,
fee_model=self.fee_model
)
broker.create_portfolio(self.portfolio_id, self.portfolio_name)
broker.subscribe_funds_to_portfolio(self.portfolio_id, self.initial_cash)
return broker
def _create_simulation_engine(self):
"""
Create a simulation engine instance to generate the events
used for the quant trading algorithm to act upon.
TODO: Currently hardcoded to daily events
Returns
-------
`SimulationEngine`
The simulation engine generating simulation timestamps.
"""
return DailyBusinessDaySimulationEngine(
self.start_dt, self.end_dt, pre_market=False, post_market=False
)
def _create_rebalance_event_times(self):
"""
Creates the list of rebalance timestamps used to determine when
to execute the quant trading strategy throughout the backtest.
Returns
-------
`List[pd.Timestamp]`
The list of rebalance timestamps.
"""
if self.rebalance == 'buy_and_hold':
rebalancer = BuyAndHoldRebalance(self.start_dt)
elif self.rebalance == 'daily':
rebalancer = DailyRebalance(
self.start_dt, self.end_dt
)
elif self.rebalance == 'weekly':
rebalancer = WeeklyRebalance(
self.start_dt, self.end_dt, self.rebalance_weekday
)
elif self.rebalance == 'end_of_month':
rebalancer = EndOfMonthRebalance(self.start_dt, self.end_dt)
else:
raise ValueError(
'Unknown rebalance frequency "%s" provided.' % self.rebalance
)
return rebalancer.rebalances
def _create_quant_trading_system(self, **kwargs):
"""
Creates the quantitative trading system with the provided
alpha model.
TODO: All portfolio construction/optimisation is hardcoded for
sensible defaults.
Returns
-------
`QuantTradingSystem`
The quantitative trading system.
"""
if self.long_only:
if 'cash_buffer_percentage' not in kwargs:
raise ValueError(
'Long only portfolio specified for Quant Trading System '
'but no cash buffer percentage supplied.'
)
cash_buffer_percentage = kwargs['cash_buffer_percentage']
qts = QuantTradingSystem(
self.universe,
self.broker,
self.portfolio_id,
self.data_handler,
self.alpha_model,
self.risk_model,
long_only=self.long_only,
cash_buffer_percentage=cash_buffer_percentage,
submit_orders=True
)
else:
if 'gross_leverage' not in kwargs:
raise ValueError(
'Long/short leveraged portfolio specified for Quant '
'Trading System but no gross leverage percentage supplied.'
)
gross_leverage = kwargs['gross_leverage']
qts = QuantTradingSystem(
self.universe,
self.broker,
self.portfolio_id,
self.data_handler,
self.alpha_model,
self.risk_model,
long_only=self.long_only,
gross_leverage=gross_leverage,
submit_orders=True
)
return qts
def _update_equity_curve(self, dt):
"""
Update the equity curve values.
Parameters
----------
dt : `pd.Timestamp`
The time at which the total account equity is obtained.
"""
self.equity_curve.append(
(dt, self.broker.get_account_total_equity()["master"])
)
def output_holdings(self):
"""
Output the portfolio holdings to the console.
"""
self.broker.portfolios[self.portfolio_id].holdings_to_console()
def get_equity_curve(self):
"""
Returns the equity curve as a Pandas DataFrame.
Returns
-------
`pd.DataFrame`
The datetime-indexed equity curve of the strategy.
"""
equity_df = pd.DataFrame(
self.equity_curve, columns=['Date', 'Equity']
).set_index('Date')
equity_df.index = equity_df.index.date
return equity_df
def get_target_allocations(self):
"""
Returns the target allocations as a Pandas DataFrame
utilising the same index as the equity curve with
forward-filled dates.
Returns
-------
`pd.DataFrame`
The datetime-indexed target allocations of the strategy.
"""
equity_curve = self.get_equity_curve()
alloc_df = pd.DataFrame(self.target_allocations).set_index('Date')
alloc_df.index = alloc_df.index.date
alloc_df = alloc_df.reindex(index=equity_curve.index, method='ffill')
if self.burn_in_dt is not None:
alloc_df = alloc_df[self.burn_in_dt:]
return alloc_df
def run(self, results=False):
"""
Execute the simulation engine by iterating over all
simulation events, rebalancing the quant trading
system at the appropriate schedule.
Parameters
----------
results : `Boolean`, optional
Whether to output the current portfolio holdings
"""
if settings.PRINT_EVENTS:
print("Beginning backtest simulation...")
stats = {'target_allocations': []}
for event in self.sim_engine:
# Output the system event and timestamp
dt = event.ts
if settings.PRINT_EVENTS:
print("(%s) - %s" % (event.ts, event.event_type))
# Update the simulated broker
self.broker.update(dt)
# Update any signals on a daily basis
if self.signals is not None and event.event_type == "market_close":
self.signals.update(dt)
# If we have hit a rebalance time then carry
# out a full run of the quant trading system
if self._is_rebalance_event(dt):
if settings.PRINT_EVENTS:
print("(%s) - trading logic and rebalance" % event.ts)
self.qts(dt, stats=stats)
# Out of market hours we want a daily
# performance update, but only if we
# are past the 'burn in' period
if event.event_type == "market_close":
if self.burn_in_dt is not None:
if dt >= self.burn_in_dt:
self._update_equity_curve(dt)
else:
self._update_equity_curve(dt)
self.target_allocations = stats['target_allocations']
# At the end of the simulation output the
# portfolio holdings if desired
if results:
self.output_holdings()
if settings.PRINT_EVENTS:
print("Ending backtest simulation.")
|
from behave import given
from behave import then
from behave import when
from paasta_tools.cli.utils import x_mark
from paasta_tools.utils import _run
@given('a "{service_type}" service')
@given('an "{service_type}" service')
def given_service(context, service_type):
context.service = "fake_%s_service" % service_type
context.soa_dir = "fake_soa_configs_validate"
@when("we run paasta validate")
def run_paasta_validate(context):
validate_cmd = (
"paasta validate "
"--yelpsoa-config-root %s "
"--service %s " % (context.soa_dir, context.service)
)
context.return_code, context.output = _run(command=validate_cmd)
@then('it should have a return code of "{code:d}"')
def see_expected_return_code(context, code):
print(context.output)
print(context.return_code)
print()
assert context.return_code == code
@then("everything should pass")
def validate_status_all_pass(context):
assert not context.output or x_mark() not in context.output
@then("it should report an error in the output")
def validate_status_something_fail(context):
print(context.output)
assert "Successfully validated schema" not in context.output
|
import os.path as op
import shutil
import os
import datetime as dt
import pytest
from numpy.testing import assert_allclose, assert_array_equal
from mne import pick_types
from mne.datasets.testing import data_path, requires_testing_data
from mne.io import read_raw_nirx
from mne.io.tests.test_raw import _test_raw_reader
from mne.transforms import apply_trans, _get_trans
from mne.utils import run_tests_if_main
from mne.preprocessing.nirs import source_detector_distances,\
short_channels
from mne.io.constants import FIFF
fname_nirx_15_0 = op.join(data_path(download=False),
'NIRx', 'nirscout', 'nirx_15_0_recording')
fname_nirx_15_2 = op.join(data_path(download=False),
'NIRx', 'nirscout', 'nirx_15_2_recording')
fname_nirx_15_2_short = op.join(data_path(download=False),
'NIRx', 'nirscout',
'nirx_15_2_recording_w_short')
fname_nirx_15_3_short = op.join(data_path(download=False),
'NIRx', 'nirscout', 'nirx_15_3_recording')
@requires_testing_data
def test_nirx_hdr_load():
"""Test reading NIRX files using path to header file."""
fname = fname_nirx_15_2_short + "/NIRS-2019-08-23_001.hdr"
raw = read_raw_nirx(fname, preload=True)
# Test data import
assert raw._data.shape == (26, 145)
assert raw.info['sfreq'] == 12.5
@requires_testing_data
def test_nirx_missing_warn():
"""Test reading NIRX files when missing data."""
with pytest.raises(FileNotFoundError, match='The path you'):
read_raw_nirx(fname_nirx_15_2_short + "1", preload=True)
@requires_testing_data
def test_nirx_missing_evt(tmpdir):
"""Test reading NIRX files when missing data."""
shutil.copytree(fname_nirx_15_2_short, str(tmpdir) + "/data/")
os.rename(str(tmpdir) + "/data" + "/NIRS-2019-08-23_001.evt",
str(tmpdir) + "/data" + "/NIRS-2019-08-23_001.xxx")
fname = str(tmpdir) + "/data" + "/NIRS-2019-08-23_001.hdr"
raw = read_raw_nirx(fname, preload=True)
assert raw.annotations.onset.shape == (0, )
@requires_testing_data
def test_nirx_dat_warn(tmpdir):
"""Test reading NIRX files when missing data."""
shutil.copytree(fname_nirx_15_2_short, str(tmpdir) + "/data/")
os.rename(str(tmpdir) + "/data" + "/NIRS-2019-08-23_001.dat",
str(tmpdir) + "/data" + "/NIRS-2019-08-23_001.tmp")
fname = str(tmpdir) + "/data" + "/NIRS-2019-08-23_001.hdr"
with pytest.raises(RuntimeWarning, match='A single dat'):
read_raw_nirx(fname, preload=True)
@requires_testing_data
def test_nirx_15_2_short():
"""Test reading NIRX files."""
raw = read_raw_nirx(fname_nirx_15_2_short, preload=True)
# Test data import
assert raw._data.shape == (26, 145)
assert raw.info['sfreq'] == 12.5
assert raw.info['meas_date'] == dt.datetime(2019, 8, 23, 7, 37, 4, 540000,
tzinfo=dt.timezone.utc)
# Test channel naming
assert raw.info['ch_names'][:4] == ["S1_D1 760", "S1_D1 850",
"S1_D9 760", "S1_D9 850"]
assert raw.info['ch_names'][24:26] == ["S5_D13 760", "S5_D13 850"]
# Test frequency encoding
assert raw.info['chs'][0]['loc'][9] == 760
assert raw.info['chs'][1]['loc'][9] == 850
# Test info import
assert raw.info['subject_info'] == dict(sex=1, first_name="MNE",
middle_name="Test",
last_name="Recording",
birthday=(2014, 8, 23))
# Test distance between optodes matches values from
# nirsite https://github.com/mne-tools/mne-testing-data/pull/51
# step 4 figure 2
allowed_distance_error = 0.0002
distances = source_detector_distances(raw.info)
assert_allclose(distances[::2], [
0.0304, 0.0078, 0.0310, 0.0086, 0.0416,
0.0072, 0.0389, 0.0075, 0.0558, 0.0562,
0.0561, 0.0565, 0.0077], atol=allowed_distance_error)
# Test which channels are short
# These are the ones marked as red at
# https://github.com/mne-tools/mne-testing-data/pull/51 step 4 figure 2
is_short = short_channels(raw.info)
assert_array_equal(is_short[:9:2], [False, True, False, True, False])
is_short = short_channels(raw.info, threshold=0.003)
assert_array_equal(is_short[:3:2], [False, False])
is_short = short_channels(raw.info, threshold=50)
assert_array_equal(is_short[:3:2], [True, True])
# Test trigger events
assert_array_equal(raw.annotations.description, ['3.0', '2.0', '1.0'])
# Test location of detectors
# The locations of detectors can be seen in the first
# figure on this page...
# https://github.com/mne-tools/mne-testing-data/pull/51
# And have been manually copied below
# These values were reported in mm, but according to this page...
# https://mne.tools/stable/auto_tutorials/intro/plot_40_sensor_locations.html
# 3d locations should be specified in meters, so that's what's tested below
# Detector locations are stored in the third three loc values
allowed_dist_error = 0.0002
locs = [ch['loc'][6:9] for ch in raw.info['chs']]
head_mri_t, _ = _get_trans('fsaverage', 'head', 'mri')
mni_locs = apply_trans(head_mri_t, locs)
assert raw.info['ch_names'][0][3:5] == 'D1'
assert_allclose(
mni_locs[0], [-0.0841, -0.0464, -0.0129], atol=allowed_dist_error)
assert raw.info['ch_names'][4][3:5] == 'D3'
assert_allclose(
mni_locs[4], [0.0846, -0.0142, -0.0156], atol=allowed_dist_error)
assert raw.info['ch_names'][8][3:5] == 'D2'
assert_allclose(
mni_locs[8], [0.0207, -0.1062, 0.0484], atol=allowed_dist_error)
assert raw.info['ch_names'][12][3:5] == 'D4'
assert_allclose(
mni_locs[12], [-0.0196, 0.0821, 0.0275], atol=allowed_dist_error)
assert raw.info['ch_names'][16][3:5] == 'D5'
assert_allclose(
mni_locs[16], [-0.0360, 0.0276, 0.0778], atol=allowed_dist_error)
assert raw.info['ch_names'][19][3:5] == 'D6'
assert_allclose(
mni_locs[19], [0.0352, 0.0283, 0.0780], atol=allowed_dist_error)
assert raw.info['ch_names'][21][3:5] == 'D7'
assert_allclose(
mni_locs[21], [0.0388, -0.0477, 0.0932], atol=allowed_dist_error)
@requires_testing_data
def test_nirx_15_3_short():
"""Test reading NIRX files."""
raw = read_raw_nirx(fname_nirx_15_3_short, preload=True)
# Test data import
assert raw._data.shape == (26, 220)
assert raw.info['sfreq'] == 12.5
# Test channel naming
assert raw.info['ch_names'][:4] == ["S1_D2 760", "S1_D2 850",
"S1_D9 760", "S1_D9 850"]
assert raw.info['ch_names'][24:26] == ["S5_D13 760", "S5_D13 850"]
# Test frequency encoding
assert raw.info['chs'][0]['loc'][9] == 760
assert raw.info['chs'][1]['loc'][9] == 850
# Test info import
assert raw.info['subject_info'] == dict(birthday=(2020, 8, 18),
sex=0,
first_name="testMontage\\0A"
"TestMontage")
# Test distance between optodes matches values from
# https://github.com/mne-tools/mne-testing-data/pull/72
allowed_distance_error = 0.001
distances = source_detector_distances(raw.info)
assert_allclose(distances[::2], [
0.0304, 0.0078, 0.0310, 0.0086, 0.0416,
0.0072, 0.0389, 0.0075, 0.0558, 0.0562,
0.0561, 0.0565, 0.0077], atol=allowed_distance_error)
# Test which channels are short
# These are the ones marked as red at
# https://github.com/mne-tools/mne-testing-data/pull/72
is_short = short_channels(raw.info)
assert_array_equal(is_short[:9:2], [False, True, False, True, False])
is_short = short_channels(raw.info, threshold=0.003)
assert_array_equal(is_short[:3:2], [False, False])
is_short = short_channels(raw.info, threshold=50)
assert_array_equal(is_short[:3:2], [True, True])
# Test trigger events
assert_array_equal(raw.annotations.description, ['4.0', '2.0', '1.0'])
# Test location of detectors
# The locations of detectors can be seen in the first
# figure on this page...
# https://github.com/mne-tools/mne-testing-data/pull/72
# And have been manually copied below
allowed_dist_error = 0.0002
locs = [ch['loc'][6:9] for ch in raw.info['chs']]
head_mri_t, _ = _get_trans('fsaverage', 'head', 'mri')
mni_locs = apply_trans(head_mri_t, locs)
assert raw.info['ch_names'][0][3:5] == 'D2'
assert_allclose(
mni_locs[0], [-0.0841, -0.0464, -0.0129], atol=allowed_dist_error)
assert raw.info['ch_names'][4][3:5] == 'D1'
assert_allclose(
mni_locs[4], [0.0846, -0.0142, -0.0156], atol=allowed_dist_error)
assert raw.info['ch_names'][8][3:5] == 'D3'
assert_allclose(
mni_locs[8], [0.0207, -0.1062, 0.0484], atol=allowed_dist_error)
assert raw.info['ch_names'][12][3:5] == 'D4'
assert_allclose(
mni_locs[12], [-0.0196, 0.0821, 0.0275], atol=allowed_dist_error)
assert raw.info['ch_names'][16][3:5] == 'D5'
assert_allclose(
mni_locs[16], [-0.0360, 0.0276, 0.0778], atol=allowed_dist_error)
assert raw.info['ch_names'][19][3:5] == 'D6'
assert_allclose(
mni_locs[19], [0.0388, -0.0477, 0.0932], atol=allowed_dist_error)
assert raw.info['ch_names'][21][3:5] == 'D7'
assert_allclose(
mni_locs[21], [-0.0394, -0.0483, 0.0928], atol=allowed_dist_error)
@requires_testing_data
def test_encoding(tmpdir):
"""Test NIRx encoding."""
fname = str(tmpdir.join('latin'))
shutil.copytree(fname_nirx_15_2, fname)
hdr_fname = op.join(fname, 'NIRS-2019-10-02_003.hdr')
hdr = list()
with open(hdr_fname, 'rb') as fid:
hdr.extend(line for line in fid)
hdr[2] = b'Date="jeu. 13 f\xe9vr. 2020"\r\n'
with open(hdr_fname, 'wb') as fid:
for line in hdr:
fid.write(line)
# smoke test
with pytest.raises(RuntimeWarning, match='Extraction of measurement date'):
read_raw_nirx(fname)
@requires_testing_data
def test_nirx_15_2():
"""Test reading NIRX files."""
raw = read_raw_nirx(fname_nirx_15_2, preload=True)
# Test data import
assert raw._data.shape == (64, 67)
assert raw.info['sfreq'] == 3.90625
assert raw.info['meas_date'] == dt.datetime(2019, 10, 2, 9, 8, 47, 511000,
tzinfo=dt.timezone.utc)
# Test channel naming
assert raw.info['ch_names'][:4] == ["S1_D1 760", "S1_D1 850",
"S1_D10 760", "S1_D10 850"]
# Test info import
assert raw.info['subject_info'] == dict(sex=1, first_name="TestRecording",
birthday=(1989, 10, 2))
# Test trigger events
assert_array_equal(raw.annotations.description, ['4.0', '6.0', '2.0'])
print(raw.annotations.onset)
# Test location of detectors
allowed_dist_error = 0.0002
locs = [ch['loc'][6:9] for ch in raw.info['chs']]
head_mri_t, _ = _get_trans('fsaverage', 'head', 'mri')
mni_locs = apply_trans(head_mri_t, locs)
assert raw.info['ch_names'][0][3:5] == 'D1'
assert_allclose(
mni_locs[0], [-0.0292, 0.0852, -0.0142], atol=allowed_dist_error)
assert raw.info['ch_names'][15][3:5] == 'D4'
assert_allclose(
mni_locs[15], [-0.0739, -0.0756, -0.0075], atol=allowed_dist_error)
# Old name aliases for backward compat
assert 'fnirs_cw_amplitude' in raw
with pytest.raises(ValueError, match='Invalid value'):
'fnirs_raw' in raw
assert 'fnirs_od' not in raw
picks = pick_types(raw.info, fnirs='fnirs_cw_amplitude')
assert len(picks) > 0
@requires_testing_data
def test_nirx_15_0():
"""Test reading NIRX files."""
raw = read_raw_nirx(fname_nirx_15_0, preload=True)
# Test data import
assert raw._data.shape == (20, 92)
assert raw.info['sfreq'] == 6.25
assert raw.info['meas_date'] == dt.datetime(2019, 10, 27, 13, 53, 34,
209000,
tzinfo=dt.timezone.utc)
# Test channel naming
assert raw.info['ch_names'][:12] == ["S1_D1 760", "S1_D1 850",
"S2_D2 760", "S2_D2 850",
"S3_D3 760", "S3_D3 850",
"S4_D4 760", "S4_D4 850",
"S5_D5 760", "S5_D5 850",
"S6_D6 760", "S6_D6 850"]
# Test info import
assert raw.info['subject_info'] == {'birthday': (2004, 10, 27),
'first_name': 'NIRX',
'last_name': 'Test',
'sex': FIFF.FIFFV_SUBJ_SEX_UNKNOWN}
# Test trigger events
assert_array_equal(raw.annotations.description, ['1.0', '2.0', '2.0'])
# Test location of detectors
allowed_dist_error = 0.0002
locs = [ch['loc'][6:9] for ch in raw.info['chs']]
head_mri_t, _ = _get_trans('fsaverage', 'head', 'mri')
mni_locs = apply_trans(head_mri_t, locs)
assert raw.info['ch_names'][0][3:5] == 'D1'
assert_allclose(
mni_locs[0], [0.0287, -0.1143, -0.0332], atol=allowed_dist_error)
assert raw.info['ch_names'][15][3:5] == 'D8'
assert_allclose(
mni_locs[15], [-0.0693, -0.0480, 0.0657], atol=allowed_dist_error)
# Test distance between optodes matches values from
allowed_distance_error = 0.0002
distances = source_detector_distances(raw.info)
assert_allclose(distances[::2], [
0.0301, 0.0315, 0.0343, 0.0368, 0.0408,
0.0399, 0.0393, 0.0367, 0.0336, 0.0447], atol=allowed_distance_error)
@requires_testing_data
@pytest.mark.parametrize('fname, boundary_decimal', (
[fname_nirx_15_2_short, 1],
[fname_nirx_15_2, 0],
[fname_nirx_15_0, 0]
))
def test_nirx_standard(fname, boundary_decimal):
"""Test standard operations."""
_test_raw_reader(read_raw_nirx, fname=fname,
boundary_decimal=boundary_decimal) # low fs
run_tests_if_main()
|
import re
import requests_mock
from homeassistant.components.uk_transport.sensor import (
ATTR_ATCOCODE,
ATTR_CALLING_AT,
ATTR_LOCALITY,
ATTR_NEXT_BUSES,
ATTR_NEXT_TRAINS,
ATTR_STATION_CODE,
ATTR_STOP_NAME,
CONF_API_APP_ID,
CONF_API_APP_KEY,
UkTransportSensor,
)
from homeassistant.setup import async_setup_component
from homeassistant.util.dt import now
from tests.async_mock import patch
from tests.common import load_fixture
BUS_ATCOCODE = "340000368SHE"
BUS_DIRECTION = "Wantage"
TRAIN_STATION_CODE = "WIM"
TRAIN_DESTINATION_NAME = "WAT"
VALID_CONFIG = {
"sensor": {
"platform": "uk_transport",
CONF_API_APP_ID: "foo",
CONF_API_APP_KEY: "ebcd1234",
"queries": [
{"mode": "bus", "origin": BUS_ATCOCODE, "destination": BUS_DIRECTION},
{
"mode": "train",
"origin": TRAIN_STATION_CODE,
"destination": TRAIN_DESTINATION_NAME,
},
],
}
}
async def test_bus(hass):
"""Test for operational uk_transport sensor with proper attributes."""
with requests_mock.Mocker() as mock_req:
uri = re.compile(UkTransportSensor.TRANSPORT_API_URL_BASE + "*")
mock_req.get(uri, text=load_fixture("uk_transport_bus.json"))
assert await async_setup_component(hass, "sensor", VALID_CONFIG)
await hass.async_block_till_done()
bus_state = hass.states.get("sensor.next_bus_to_wantage")
assert None is not bus_state
assert f"Next bus to {BUS_DIRECTION}" == bus_state.name
assert BUS_ATCOCODE == bus_state.attributes[ATTR_ATCOCODE]
assert "Harwell Campus" == bus_state.attributes[ATTR_LOCALITY]
assert "Bus Station" == bus_state.attributes[ATTR_STOP_NAME]
assert 2 == len(bus_state.attributes.get(ATTR_NEXT_BUSES))
direction_re = re.compile(BUS_DIRECTION)
for bus in bus_state.attributes.get(ATTR_NEXT_BUSES):
assert None is not bus
assert None is not direction_re.search(bus["direction"])
async def test_train(hass):
"""Test for operational uk_transport sensor with proper attributes."""
with requests_mock.Mocker() as mock_req, patch(
"homeassistant.util.dt.now", return_value=now().replace(hour=13)
):
uri = re.compile(UkTransportSensor.TRANSPORT_API_URL_BASE + "*")
mock_req.get(uri, text=load_fixture("uk_transport_train.json"))
assert await async_setup_component(hass, "sensor", VALID_CONFIG)
await hass.async_block_till_done()
train_state = hass.states.get("sensor.next_train_to_WAT")
assert None is not train_state
assert f"Next train to {TRAIN_DESTINATION_NAME}" == train_state.name
assert TRAIN_STATION_CODE == train_state.attributes[ATTR_STATION_CODE]
assert TRAIN_DESTINATION_NAME == train_state.attributes[ATTR_CALLING_AT]
assert 25 == len(train_state.attributes.get(ATTR_NEXT_TRAINS))
assert (
"London Waterloo"
== train_state.attributes[ATTR_NEXT_TRAINS][0]["destination_name"]
)
assert "06:13" == train_state.attributes[ATTR_NEXT_TRAINS][0]["estimated"]
|
import homeassistant.scripts as scripts
from tests.async_mock import patch
@patch("homeassistant.scripts.get_default_config_dir", return_value="/default")
def test_config_per_platform(mock_def):
"""Test config per platform method."""
assert scripts.get_default_config_dir() == "/default"
assert scripts.extract_config_dir() == "/default"
assert scripts.extract_config_dir([""]) == "/default"
assert scripts.extract_config_dir(["-c", "/arg"]) == "/arg"
assert scripts.extract_config_dir(["--config", "/a"]) == "/a"
|
from songpal import SongpalException
from homeassistant.components.songpal.const import CONF_ENDPOINT
from homeassistant.const import CONF_NAME
from tests.async_mock import AsyncMock, MagicMock, patch
FRIENDLY_NAME = "name"
ENTITY_ID = f"media_player.{FRIENDLY_NAME}"
HOST = "0.0.0.0"
ENDPOINT = f"http://{HOST}:10000/sony"
MODEL = "model"
MAC = "mac"
SW_VERSION = "sw_ver"
CONF_DATA = {
CONF_NAME: FRIENDLY_NAME,
CONF_ENDPOINT: ENDPOINT,
}
def _create_mocked_device(throw_exception=False):
mocked_device = MagicMock()
type(mocked_device).get_supported_methods = AsyncMock(
side_effect=SongpalException("Unable to do POST request: ")
if throw_exception
else None
)
interface_info = MagicMock()
interface_info.modelName = MODEL
type(mocked_device).get_interface_information = AsyncMock(
return_value=interface_info
)
sys_info = MagicMock()
sys_info.macAddr = MAC
sys_info.version = SW_VERSION
type(mocked_device).get_system_info = AsyncMock(return_value=sys_info)
volume1 = MagicMock()
volume1.maxVolume = 100
volume1.minVolume = 0
volume1.volume = 50
volume1.is_muted = False
volume1.set_volume = AsyncMock()
volume1.set_mute = AsyncMock()
volume2 = MagicMock()
volume2.maxVolume = 100
volume2.minVolume = 0
volume2.volume = 20
volume2.is_muted = True
mocked_device.volume1 = volume1
type(mocked_device).get_volume_information = AsyncMock(
return_value=[volume1, volume2]
)
power = MagicMock()
power.status = True
type(mocked_device).get_power = AsyncMock(return_value=power)
input1 = MagicMock()
input1.title = "title1"
input1.uri = "uri1"
input1.active = False
input1.activate = AsyncMock()
mocked_device.input1 = input1
input2 = MagicMock()
input2.title = "title2"
input2.uri = "uri2"
input2.active = True
type(mocked_device).get_inputs = AsyncMock(return_value=[input1, input2])
type(mocked_device).set_power = AsyncMock()
type(mocked_device).set_sound_settings = AsyncMock()
type(mocked_device).listen_notifications = AsyncMock()
type(mocked_device).stop_listen_notifications = AsyncMock()
notification_callbacks = {}
mocked_device.notification_callbacks = notification_callbacks
def _on_notification(name, callback):
notification_callbacks[name] = callback
type(mocked_device).on_notification = MagicMock(side_effect=_on_notification)
type(mocked_device).clear_notification_callbacks = MagicMock()
return mocked_device
def _patch_config_flow_device(mocked_device):
return patch(
"homeassistant.components.songpal.config_flow.Device",
return_value=mocked_device,
)
def _patch_media_player_device(mocked_device):
return patch(
"homeassistant.components.songpal.media_player.Device",
return_value=mocked_device,
)
|
from django.db import migrations
from weblate.gitexport.models import SUPPORTED_VCS, get_export_url
def set_export_url(apps, schema_editor):
Component = apps.get_model("trans", "Component")
db_alias = schema_editor.connection.alias
matching = (
Component.objects.using(db_alias)
.filter(vcs__in=SUPPORTED_VCS)
.exclude(repo__startswith="weblate:/")
)
for component in matching:
new_url = get_export_url(component)
if component.git_export != new_url:
component.git_export = new_url
component.save()
class Migration(migrations.Migration):
initial = True
dependencies = [("trans", "0001_squashed_0143_auto_20180609_1655")]
operations = [
migrations.RunPython(code=set_export_url, reverse_code=set_export_url)
]
|
import os
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
def test_hostname(host):
assert 'instance' == host.check_output('hostname -s')
def test_etc_molecule_directory(host):
f = host.file('/etc/molecule')
assert f.is_directory
assert f.user == 'root'
assert f.group == 'root'
assert f.mode == 0o755
def test_etc_molecule_ansible_hostname_file(host):
f = host.file('/etc/molecule/instance')
assert f.is_file
assert f.user == 'root'
assert f.group == 'root'
assert f.mode == 0o644
def test_buildarg_env_var(host):
cmd_out = host.run("echo $envarg")
assert cmd_out.stdout == 'this_is_a_test'
|
import json
from scattertext.Common import DEFAULT_DIV_ID, DEFAULT_D3_AXIS_VALUE_FORMAT
class InvalidProtocolException(Exception):
pass
class ScatterplotStructure(object):
def __init__(
self,
visualization_data,
width_in_pixels=None,
height_in_pixels=None,
max_snippets=None,
color=None,
grey_zero_scores=False,
sort_by_dist=True,
reverse_sort_scores_for_not_category=True,
use_full_doc=False,
asian_mode=False,
match_full_line=False,
use_non_text_features=False,
show_characteristic=True,
word_vec_use_p_vals=False,
max_p_val=0.1,
save_svg_button=False,
p_value_colors=False,
x_label=None,
y_label=None,
full_data=None,
show_top_terms=True,
show_neutral=False,
get_tooltip_content=None,
x_axis_values=None,
y_axis_values=None,
color_func=None,
show_axes=True,
horizontal_line_y_position=None,
vertical_line_x_position=None,
show_extra=False,
do_censor_points=True,
center_label_over_points=False,
x_axis_labels=None,
y_axis_labels=None,
topic_model_preview_size=10,
vertical_lines=None,
unified_context=False,
show_category_headings=True,
highlight_selected_category=False,
show_cross_axes=True,
div_name=DEFAULT_DIV_ID,
alternative_term_func=None,
include_all_contexts=False,
show_axes_and_cross_hairs=False,
x_axis_values_format=None,
y_axis_values_format=None,
max_overlapping=-1,
show_corpus_stats=True,
sort_doc_labels_by_name=False,
always_jump=True,
show_diagonal=False,
enable_term_category_description=True,
use_global_scale=False,
get_custom_term_html=None,
header_names=None,
header_sorting_algos=None):
'''
Parameters
----------
visualization_data : VizDataAdapter
From ScatterChart or a descendant
width_in_pixels : int, optional
width of viz in pixels, if None, default to 1000
height_in_pixels : int, optional
height of viz in pixels, if None, default to 600
max_snippets : int, optional
max snippets to snow when term is clicked, Defaults to show all
color : str, optional
d3 color scheme
grey_zero_scores : bool, optional
If True, color points with zero-scores a light shade of grey. False by default.
sort_by_dist : bool, optional
sort by distance or score, default True
reverse_sort_scores_for_not_category : bool, optional
If using a custom score, score the not-category class by
lowest-score-as-most-predictive. Turn this off for word vectory
or topic similarity. Default True.
use_full_doc : bool, optional
use full document instead of sentence in snippeting
use_non_text_features : bool, default False
Show non-bag-of-words features (e.g., Empath) instaed of text. False by default.
show_characteristic: bool, default True
Show characteristic terms on the far left-hand side of the visualization
word_vec_use_p_vals: bool, default False
Use category-associated p-values to determine which terms to give word
vec scores.
max_p_val : float, default = 0.1
Max p-val to use find set of terms for similarity calculation, if
word_vec_use_p_vals is True.
save_svg_button : bool, default False
Add a save as SVG button to the page.
p_value_colors : bool, default False
Color points differently if p val is above 1-max_p_val, below max_p_val, or
in between.
x_label : str, default None
If present, use as the x-axis label
y_label : str, default None
If present, use as the y-axis label
full_data : str, default None
Data used to create chart. By default "getDataAndInfo()".
show_top_terms : bool, default True
Show the top terms sidebar
show_neutral : bool, default False
Show a third column for matches in neutral documents
get_tooltip_content : str, default None
Javascript function to control content of tooltip. Function takes a parameter
which is a dictionary entry produced by `ScatterChartExplorer.to_dict` and
returns a string.
x_axis_values : list, default None
Numeric value-labels to show on x-axis which correspond to original x-values.
y_axis_values : list, default None
Numeric Value-labels to show on y-axis which correspond to original y-values.
color_func : str, default None
Javascript function to control color of a point. Function takes a parameter
which is a dictionary entry produced by `ScatterChartExplorer.to_dict` and
returns a string.
show_axes : bool, default True
Show x and y axes
horizontal_line_y_position : float, default None
If x and y axes markers are shown, the position of the horizontal axis marker
vertical_line_x_position : float, default None
If x and y axes markers are shown, the position of the vertical axis marker
show_extra : bool, default False
Show extra fourth column
do_censor_points : bool, default True
Don't label over dots
center_label_over_points : bool, default False
Only put labels centered over point
x_axis_labels: list, default None
List of string value-labels to show at evenly spaced intervals on the x-axis.
Low, medium, high are defaults. This relies on d3's ticks function, which can
behave unpredictable. Caveat usor.
y_axis_labels : list, default None
List of string value-labels to show at evenly spaced intervals on the y-axis.
Low, medium, high are defaults. This relies on d3's ticks function, which can
behave unpredictable. Caveat usor.
topic_model_preview_size : int, default 10
If topic models are being visualized, show the first topic_model_preview_size topics
in a preview.
vertical_lines: list, default None
List of scaled points along the x-axis to draw horizontal lines
unified_context: bool, default False
Display all context in a single column.
show_category_headings: bool, default True
If unified_context, should we show the category headings?
highlight_selected_category: bool, default False
Highlight selected category in unified view
show_cross_axes: bool, default True
If show_axes is False, do we show cross-axes?
div_name: str, default DEFAULT_DIV_ID
Div which holds scatterplot
alternative_term_func: str, default None
Javascript function which take a term JSON object and returns a bool. If the return value is true,
execute standard term click pipeline. Ex.: `'(function(termDict) {return true;})'`.
include_all_contexts: bool, default False
Include all contexts, even non-matching ones, in interface
show_axes_and_cross_hairs: bool, default False
Show both cross-axes and peripheral scales.
x_axis_values_format: str, default None
d3 format string for x-axis values (see https://github.com/d3/d3-format)
y_axis_values_format: str, default None
d3 format string for y-axis values (see https://github.com/d3/d3-format)
max_overlapping: int, default -1
Maximum number of overlapping terms to output. Show all if -1 (default)
show_corpus_stats: bool, default True
Populate corpus stats div
sort_doc_labels_by_name: bool, default False
If unified document labels, sort the labels by name instead of value.
always_jump: bool, default True
Always jump to term contexts if a term is clicked.
show_diagonal: bool, default False
Show a diagonal line from the lower-left to the upper-right of the chart
use_global_scale: bool, default False
Use the same scaling for the x and y axis. Without this show_diagonal may not
make sense.
enable_term_category_description: bool, default True
List term/metadata statistics under category
get_custom_term_html: str, default None
Javascript function which takes term info and outputs custom term HTML
header_names: Dict[str, str], default None
Dictionary giving names of term lists shown to the right of the plot. Valid keys are
upper, lower and right.
header_sorting_algos: Dict[str, str], default None
Dictionary giving javascript sorting algorithms for panes. Valid keys are upper, lower
and right. Value is a JS function which takes the "data" object.
'''
self._visualization_data = visualization_data
self._width_in_pixels = width_in_pixels if width_in_pixels is not None else 1000
self._height_in_pixels = height_in_pixels if height_in_pixels is not None else 600
self._max_snippets = max_snippets
self._color = color
self._sort_by_dist = sort_by_dist
self._use_full_doc = use_full_doc
self._asian_mode = asian_mode
self._match_full_line = match_full_line
self._grey_zero_scores = grey_zero_scores
self._use_non_text_features = use_non_text_features
self._show_characteristic = show_characteristic
self._word_vec_use_p_vals = word_vec_use_p_vals
self._max_p_val = max_p_val
self._save_svg_button = save_svg_button
self._reverse_sort_scores_for_not_category = reverse_sort_scores_for_not_category
self._p_value_colors = p_value_colors
self._x_label = x_label
self._y_label = y_label
self._full_data = full_data
self._show_top_terms = show_top_terms
self._show_neutral = show_neutral
self._get_tooltip_content = get_tooltip_content
self._x_axis_values = x_axis_values
self._y_axis_values = y_axis_values
self._x_axis_labels = x_axis_labels
self._y_axis_labels = y_axis_labels
self._color_func = color_func
self._show_axes = show_axes
self._horizontal_line_y_position = horizontal_line_y_position
self._vertical_line_x_position = vertical_line_x_position
self._show_extra = show_extra
self._do_censor_points = do_censor_points
self._center_label_over_points = center_label_over_points
self._topic_model_preview_size = topic_model_preview_size
self._vertical_lines = vertical_lines
self._unified_context = unified_context
self._show_category_headings = show_category_headings
self._highlight_selected_category = highlight_selected_category
self._show_cross_axes = show_cross_axes
self._div_name = div_name
self._alternative_term_func = alternative_term_func
self._include_all_contexts = include_all_contexts
self._show_axes_and_cross_hairs = show_axes_and_cross_hairs
self._x_axis_values_format = x_axis_values_format
self._y_axis_values_format = y_axis_values_format
self._max_overlapping = max_overlapping
self._show_corpus_stats = show_corpus_stats
self._sort_doc_labels_by_name = sort_doc_labels_by_name
self._always_jump = always_jump
self._show_diagonal = show_diagonal
self._use_global_scale = use_global_scale
self._enable_term_category_description = enable_term_category_description
self._get_custom_term_html = get_custom_term_html
self._header_names = header_names
self._header_sorting_algos = header_sorting_algos
def call_build_visualization_in_javascript(self):
def js_default_value(x):
return 'undefined' if x is None else str(x)
def js_default_string(x, default_string=None):
if x is not None:
return json.dumps(str(x))
if default_string is None:
return 'undefined'
return json.dumps(default_string)
def js_default_value_to_null(x):
return 'null' if x is None else str(x)
def json_or_null(x):
return 'null' if x is None else json.dumps(x)
def js_bool(x):
return 'true' if x else 'false'
def js_float(x):
return str(float(x))
def js_int(x):
return str(int(x))
def js_default_full_data(full_data):
return full_data if full_data is not None else "getDataAndInfo()"
def json_with_jsvalue_or_null(x):
if x is None:
return 'null'
to_ret = '{'
first = True
for key, val in sorted(x.items()):
if not first: to_ret += ', '
to_ret += '"%s": %s' % (key, val)
first = False
to_ret += '}'
return to_ret
arguments = [
js_default_value(self._width_in_pixels),
js_default_value(self._height_in_pixels),
js_default_value_to_null(self._max_snippets),
js_default_value_to_null(self._color),
js_bool(self._sort_by_dist),
js_bool(self._use_full_doc),
js_bool(self._grey_zero_scores),
js_bool(self._asian_mode),
js_bool(self._use_non_text_features),
js_bool(self._show_characteristic),
js_bool(self._word_vec_use_p_vals),
js_bool(self._save_svg_button),
js_bool(self._reverse_sort_scores_for_not_category),
js_float(self._max_p_val),
js_bool(self._p_value_colors),
js_default_string(self._x_label),
js_default_string(self._y_label),
js_default_full_data(self._full_data),
js_bool(self._show_top_terms),
js_bool(self._show_neutral),
js_default_value_to_null(self._get_tooltip_content),
js_default_value_to_null(self._x_axis_values),
js_default_value_to_null(self._y_axis_values),
js_default_value_to_null(self._color_func),
js_bool(self._show_axes),
js_bool(self._show_extra),
js_bool(self._do_censor_points),
js_bool(self._center_label_over_points),
json_or_null(self._x_axis_labels),
json_or_null(self._y_axis_labels),
js_default_value(self._topic_model_preview_size),
json_or_null(self._vertical_lines),
js_default_value_to_null(self._horizontal_line_y_position),
js_default_value_to_null(self._vertical_line_x_position),
js_bool(self._unified_context),
js_bool(self._show_category_headings),
js_bool(self._show_cross_axes),
js_default_string(self._div_name),
js_default_value_to_null(self._alternative_term_func),
js_bool(self._include_all_contexts),
js_bool(self._show_axes_and_cross_hairs),
js_default_string(self._x_axis_values_format, DEFAULT_D3_AXIS_VALUE_FORMAT),
js_default_string(self._y_axis_values_format, DEFAULT_D3_AXIS_VALUE_FORMAT),
js_bool(self._match_full_line),
js_int(self._max_overlapping),
js_bool(self._show_corpus_stats),
js_bool(self._sort_doc_labels_by_name),
js_bool(self._always_jump),
js_bool(self._highlight_selected_category),
js_bool(self._show_diagonal),
js_bool(self._use_global_scale),
js_bool(self._enable_term_category_description),
js_default_value_to_null(self._get_custom_term_html),
json_or_null(self._header_names),
json_with_jsvalue_or_null(self._header_sorting_algos)
]
return 'buildViz(' + ',\n'.join(arguments) + ');\n'
def get_js_to_call_build_scatterplot(self, object_name='plotInterface'):
return object_name + ' = ' + self.call_build_visualization_in_javascript()
def get_js_to_call_build_scatterplot_with_a_function(self, object_name='plotInterface', function_name=None):
if function_name is None:
function_name = 'build' + object_name
function_text = ('function ' + function_name + '() { return '
+ self.call_build_visualization_in_javascript() + ';}')
return function_text + '\n\n' + object_name + ' = ' + function_name + '();'
def get_js_reset_function(self, values_to_set, functions_to_reset, reset_function_name='reset'):
'''
:param functions_to_reset: List[str]
:param values_to_set: List[str]
:param reset_function_name: str, default = rest
:return: str
'''
return ('function ' + reset_function_name + '() {'
+ "document.querySelectorAll('.scattertext').forEach(element=>element.innerHTML=null);\n"
+ "document.querySelectorAll('#d3-div-1-corpus-stats').forEach(element=>element.innerHTML=null);\n"
+ ' '.join([value + ' = ' + function_name + '();'
for value, function_name
in zip(values_to_set, functions_to_reset)])
+ '}')
|
import urllib2
import diamond.collector
try:
import simplejson as json
except ImportError:
import json
class FlumeCollector(diamond.collector.Collector):
# items to collect
_metrics_collect = {
'CHANNEL': [
'ChannelFillPercentage',
'EventPutAttemptCount',
'EventPutSuccessCount',
'EventTakeAttemptCount',
'EventTakeSuccessCount'
],
'SINK': [
'BatchCompleteCount',
'BatchEmptyCount',
'BatchUnderflowCount',
'ConnectionClosedCount',
'ConnectionCreatedCount',
'ConnectionFailedCount',
'EventDrainAttemptCount',
'EventDrainSuccessCount'
],
'SOURCE': [
'AppendAcceptedCount',
'AppendBatchAcceptedCount',
'AppendBatchReceivedCount',
'AppendReceivedCount',
'EventAcceptedCount',
'EventReceivedCount',
'OpenConnectionCount'
]
}
def get_default_config_help(self):
config_help = super(FlumeCollector, self).get_default_config_help()
config_help.update({
'req_host': 'Hostname',
'req_port': 'Port',
'req_path': 'Path',
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
default_config = super(FlumeCollector, self).get_default_config()
default_config['path'] = 'flume'
default_config['req_host'] = 'localhost'
default_config['req_port'] = 41414
default_config['req_path'] = '/metrics'
return default_config
def collect(self):
url = 'http://{}:{}{}'.format(
self.config['req_host'],
self.config['req_port'],
self.config['req_path']
)
try:
resp = urllib2.urlopen(url)
try:
j = json.loads(resp.read())
resp.close()
except Exception as e:
resp.close()
self.log.error('Cannot load json data: %s', e)
return None
except urllib2.URLError as e:
self.log.error('Failed to open url: %s', e)
return None
except Exception as e:
self.log.error('Unknown error opening url: %s', e)
return None
for comp in j.iteritems():
comp_name = comp[0]
comp_items = comp[1]
comp_type = comp_items['Type']
for item in self._metrics_collect[comp_type]:
if item.endswith('Count'):
metric_name = '{}.{}'.format(comp_name, item[:-5])
metric_value = int(comp_items[item])
self.publish_counter(metric_name, metric_value)
elif item.endswith('Percentage'):
metric_name = '{}.{}'.format(comp_name, item)
metric_value = float(comp_items[item])
self.publish_gauge(metric_name, metric_value)
else:
metric_name = item
metric_value = int(comp_items[item])
self.publish_gauge(metric_name, metric_value)
|
from __future__ import print_function, absolute_import
import re
import os
import sys
import logging
if sys.version_info[0] == 2: # pragma: no cover
from ConfigParser import ConfigParser as _ConfigParser
from StringIO import StringIO
class ConfigParser(_ConfigParser):
def read_string(self, string, source):
return self.readfp(StringIO(string), source)
else:
from configparser import ConfigParser
def as_bool(value):
if isinstance(value, bool):
return value
elif isinstance(value, basestring) and value.lower() in BOOLEAN_STATES:
return BOOLEAN_STATES[value.lower()]
else:
raise ValueError('Cannot make a bool of %r' % value)
def get_tuple_validator(subvalidator):
def validator(value):
if isinstance(value, (tuple, list)):
value2 = tuple(value)
elif isinstance(value, basestring):
value2 = tuple([s.strip() for s in value.strip('()[]').split(',')])
else:
raise ValueError('Cannot make a tuple of %r' % value)
return tuple([subvalidator(x) for x in value2])
return validator
def stack_sorter(key):
# Implement ordering, files and strings go at spot 1
return dict(default=0, environ=2, argv=3, set=4).get(key[0], 1)
BOOLEAN_STATES = {'1': True, 'yes': True, 'true': True, 'on': True,
'0': False, 'no': False, 'false': False, 'off': False}
TYPEMAP = {float: float, int: int, bool: as_bool}
if sys.version_info[0] == 2: # pragma: no cover
TYPEMAP[basestring] = unicode # noqa
TYPEMAP[str] = unicode # noqa
else:
basestring = str
TYPEMAP[str] = str
INSTANCE_DOCS = """ Configuration object for {name}
The options below can be set from different sources, and are
evaluated in the following order:
* From the default value.
* From .cfg or .ini file, or a string in cfg format.
* From environment variables, e.g. ``{NAME}_FOO=3``.
* From command-line arguments, e.g. ``--{name}-foo=3``.
* From setting the config option directly, e.g. ``config.foo = 3``.
Use ``print(config)`` to get a summary of the current values and
from which sources they were set.
Parameters:
"""
class Config(object):
""" Class for configuration objects.
A Config object has a set of options, which can be str, int, float,
bool, or a tuple of any of the above. Options can be set from
different sources:
* Each option has a default value.
* From .cfg or .ini files.
* From strings in ini format.
* From environment variables.
* From command-line arguments.
* By setting the config option directly.
Parameters:
name (str): the name by which to identify this config. This name
is used as a prefix in environment variables and command
line arguments, and optionally as a section header in .cfg files.
*sources: Sources to initialize the option values with.
These can be strings in ini format, or .ini or .cfg filenames.
If a file is given that does not exist, it is simply ignored.
Special prefixes ``~/`` and ``~appdata/`` are expanded to the
home dir and appdata dir.
**options: The options specification: each option consists of
a 3-element tuple (default, type, docstring).
Example:
.. code-block:: Python
config = Config('myconfig', '~appdata/.myconfig.cfg',
foo=(False, bool, 'Whether to foo'),
bar=(0.0, float, 'The size of the bar'),
spam=('1,2,3', [int], 'A tuple of ints'))
With this, options can be set:
* With an entry ``foo = 3`` in "~appdata/.myconfig.cfg".
* With a string ``"foo = 3"`` passed at initialization.
* With an environment variable named ``MYCONFIG_FOO``.
* With a command line argument ``--myconfig-foo=3``.
* By doing ``config.foo = 3``, or ``config['foo'] = 3`` in Python.
Notes:
* Option names are case insensitive, except for attribute access and
environment variables (the latter must be all uppercase).
* All values can be set as a Python object or a string; they
are automatically converted to the correct type.
* Each instance gets a docstring that lists all options, so it
can easily be used in e.g. Sphynx docs.
"""
def __init__(self, name, *sources, **options):
# The identifier name for this config
self._name = name
if not is_valid_name(name):
raise ValueError('Config name must be an alphanumeric string, '
'starting with a letter.')
# The option names (unmodified case)
self._options = []
# Where the values are stored, we keep a stack, lowercase keys
self._opt_values = {} # name -> list of (source, value) tuples
# Map of lowercase option names to validator functions
self._opt_validators = {}
# Map of lowercase option names to type names, for better reporting
self._opt_typenames = {}
# Map of lowercase option names to docstrings
self._opt_docs = {}
# Parse options
option_docs = ['']
for name in sorted(options.keys(), key=lambda x: x.lower()):
lname = name.lower()
spec = options[name]
# Checks
if not is_valid_name(name):
raise ValueError('Option name must be alphanumeric strings, '
'starting with a letter, and not private.')
if not len(spec) == 3:
raise ValueError('Option spec must be (default, type, docs)')
default, typ, doc = spec
istuple = False
if isinstance(typ, (tuple, list)):
if len(typ) != 1:
raise ValueError('Tuple type spec should have one element.')
istuple, typ = True, typ[0]
if not (isinstance(typ, type) and issubclass(typ, tuple(TYPEMAP))):
raise ValueError('Option types can be str, bool, int, float.')
# Parse
typename = typ.__name__ + ('-tuple' if istuple else '')
args = name, typename, doc, default
option_docs.append(' '*8 + '%s (%s): %s (default %r)' % args)
self._options.append(name)
self._opt_typenames[lname] = typename
self._opt_validators[lname] = (get_tuple_validator(TYPEMAP[typ])
if istuple else TYPEMAP[typ])
self._opt_docs[lname] = doc
self._opt_values[lname] = []
# Overwrite docstring
self.__doc__ = INSTANCE_DOCS.format(name=self._name,
NAME=self._name.upper())
self.__doc__ += '\n'.join(option_docs)
# --- init values
# Set defaults
for name, spec in options.items():
self._set('default', name, spec[0])
# Load from sources
for source in sources:
if not isinstance(source, basestring):
raise ValueError('Sources should be strings or filenames.')
if '\n' in source:
self.load_from_string(source)
else:
self.load_from_file(source)
# Load from environ
for name in self._opt_values:
env_name = (self._name + '_' + name).upper()
value = os.getenv(env_name, None) # getenv is case insensitive
if value is not None:
self._set('environ', name, value)
# Load from argv
arg_prefix = '--' + self._name.lower() + '-'
for i in range(1, len(sys.argv)):
arg = sys.argv[i]
if arg.lower().startswith(arg_prefix) and '=' in arg:
name, value = arg[len(arg_prefix):].split('=', 1)
if name.lower() in self._opt_values:
self._set('argv', name, value)
def __repr__(self):
t = '<Config %r with %i options at 0x%x>'
return t % (self._name, len(self._options), id(self))
def __str__(self):
# Return a string representing a summary of the options and
# how they were set from different sources.
lines = []
lines.append('Config %r with %i options.' %
(self._name, len(self._options)))
for name in self._options:
lname = name.lower()
lines.append('\nOption %s (%s) - %s' % (name,
self._opt_typenames[lname],
self._opt_docs[lname]))
for source, val in self._opt_values[lname]:
lines.append(' %r from %s' % (val, source))
lines[-1] = ' -> ' + lines[-1][4:] # Mark current value
return '\n'.join(lines)
def __len__(self):
return len(self._options)
def __iter__(self):
return self._options.__iter__()
def __dir__(self):
return self._options
def __getattr__(self, name):
# Case sensitive get
if not name.startswith('_') and name in self._options:
return self._opt_values[name.lower()][-1][1]
return super(Config, self).__getattribute__(name)
def __getitem__(self, name):
# Case insensitive get
if not isinstance(name, basestring):
raise TypeError('Config only allows subscripting by name strings.')
if name.lower() in self._opt_values:
return self._opt_values[name.lower()][-1][1]
else:
raise IndexError('Config has no option %r' % name)
def __setattr__(self, name, value):
# Case sensitive set
if not name.startswith('_') and name in self._options:
return self._set('set', name, value)
return super(Config, self).__setattr__(name, value)
def __setitem__(self, name, value):
# Case insensitve set
if not isinstance(name, basestring):
raise TypeError('Config only allows subscripting by name strings.')
if name.lower() in self._opt_values:
return self._set('set', name, value)
else:
raise IndexError('Config has no option %r' % name)
def _set(self, source, name, value):
# The actual setter (case insensitive), applies the validator
validator = self._opt_validators[name.lower()]
try:
real_value = validator(value)
except Exception:
args = name, self._opt_typenames[name.lower()], value
raise ValueError('Cannot set option %s (%s) from %r' % args)
stack = self._opt_values[name.lower()]
if stack and stack[-1][0] == source:
stack[-1] = source, real_value
else:
stack.append((source, real_value))
stack.sort(key=stack_sorter)
def load_from_file(self, filename):
""" Load config options from a file, as if it was given as a
source during initialization. This means that options set via
argv, environ or directly will not be influenced.
"""
# Expand special prefix
filename = filename.replace('~appdata/', appdata_dir() + '/')
filename = filename.replace('~appdata\\', appdata_dir() + '\\')
filename = os.path.expanduser(filename)
# Proceed if is an actual file
if os.path.isfile(filename):
text = None
try:
text = open(filename, 'rb').read().decode()
except Exception as err:
logging.warning('Could not read config from %r:\n%s' %
(filename, str(err)))
return
self.load_from_string(text, filename)
def load_from_string(self, text, filename='<string>'):
""" Load config options from a string, as if it was given as a
source during initialization. This means that options set via
argv, environ or directly will not be influenced.
"""
try:
self._load_from_string(text, filename)
except Exception as err:
logging.warning(str(err))
def _load_from_string(self, s, filename):
# Create default section, so that users can work with sectionless
# files (as is common in an .ini file)
name_section = '[%s]\n' % self._name
if name_section not in s:
s = name_section + s
s += '\n'
parser = ConfigParser()
parser.read_string(s, filename)
if parser.has_section(self._name):
for name in self._options:
if parser.has_option(self._name, name):
value = parser.get(self._name, name)
self._set(filename, name, value)
def is_valid_name(n):
return isidentifier(n) and not n.startswith('_')
def isidentifier(s):
# http://stackoverflow.com/questions/2544972/
if not isinstance(s, basestring): # noqa
return False
return re.match(r'^\w+$', s, re.UNICODE) and re.match(r'^[0-9]', s) is None
# From pyzolib/paths.py (https://bitbucket.org/pyzo/pyzolib/src/tip/paths.py)
def appdata_dir(appname=None, roaming=False):
""" Get the path to the application directory, where applications
are allowed to write user specific files (e.g. configurations).
"""
# Define default user directory
userDir = os.path.expanduser('~')
# Get system app data dir
path = None
if sys.platform.startswith('win'):
path1, path2 = os.getenv('LOCALAPPDATA'), os.getenv('APPDATA')
path = (path2 or path1) if roaming else (path1 or path2)
elif sys.platform.startswith('darwin'):
path = os.path.join(userDir, 'Library', 'Application Support')
# On Linux and as fallback
if not (path and os.path.isdir(path)): # pragma: no cover
path = os.environ.get(
"XDG_CONFIG_HOME",
os.path.expanduser(os.path.join("~", ".config")))
# Maybe we should store things local to the executable (in case of a
# portable distro or a frozen application that wants to be portable)
prefix = sys.prefix
if getattr(sys, 'frozen', None): # See application_dir() function
prefix = os.path.abspath(os.path.dirname(sys.executable))
for reldir in ('settings', '../settings'):
localpath = os.path.abspath(os.path.join(prefix, reldir))
if os.path.isdir(localpath): # pragma: no cover
try:
open(os.path.join(localpath, 'test.write'), 'wb').close()
os.remove(os.path.join(localpath, 'test.write'))
except IOError:
pass # We cannot write in this directory
else:
path = localpath
break
# Get path specific for this app
if appname: # pragma: no cover
if path == userDir:
appname = '.' + appname.lstrip('.') # Make it a hidden directory
path = os.path.join(path, appname)
if not os.path.isdir(path): # pragma: no cover
os.mkdir(path)
# Done
return path
if __name__ == '__main__':
sys.argv.append('--test-foo=8')
c = Config('test',
foo=(3, int, 'foo yeah'),
spam=(2.1, float, 'a float!'))
|
from datetime import datetime, timedelta
import logging
from august.activity import ActivityType
from august.lock import LockDoorStatus
from august.util import update_lock_detail_from_activity
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_CONNECTIVITY,
DEVICE_CLASS_DOOR,
DEVICE_CLASS_MOTION,
DEVICE_CLASS_OCCUPANCY,
BinarySensorEntity,
)
from homeassistant.core import callback
from homeassistant.helpers.event import async_track_point_in_utc_time
from homeassistant.util.dt import utcnow
from .const import DATA_AUGUST, DOMAIN
from .entity import AugustEntityMixin
_LOGGER = logging.getLogger(__name__)
TIME_TO_DECLARE_DETECTION = timedelta(seconds=60)
def _retrieve_online_state(data, detail):
"""Get the latest state of the sensor."""
# The doorbell will go into standby mode when there is no motion
# for a short while. It will wake by itself when needed so we need
# to consider is available or we will not report motion or dings
return detail.is_online or detail.is_standby
def _retrieve_motion_state(data, detail):
return _activity_time_based_state(
data,
detail.device_id,
[ActivityType.DOORBELL_MOTION, ActivityType.DOORBELL_DING],
)
def _retrieve_ding_state(data, detail):
return _activity_time_based_state(
data, detail.device_id, [ActivityType.DOORBELL_DING]
)
def _activity_time_based_state(data, device_id, activity_types):
"""Get the latest state of the sensor."""
latest = data.activity_stream.get_latest_device_activity(device_id, activity_types)
if latest is not None:
start = latest.activity_start_time
end = latest.activity_end_time + TIME_TO_DECLARE_DETECTION
return start <= datetime.now() <= end
return None
SENSOR_NAME = 0
SENSOR_DEVICE_CLASS = 1
SENSOR_STATE_PROVIDER = 2
SENSOR_STATE_IS_TIME_BASED = 3
# sensor_type: [name, device_class, state_provider, is_time_based]
SENSOR_TYPES_DOORBELL = {
"doorbell_ding": ["Ding", DEVICE_CLASS_OCCUPANCY, _retrieve_ding_state, True],
"doorbell_motion": ["Motion", DEVICE_CLASS_MOTION, _retrieve_motion_state, True],
"doorbell_online": [
"Online",
DEVICE_CLASS_CONNECTIVITY,
_retrieve_online_state,
False,
],
}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the August binary sensors."""
data = hass.data[DOMAIN][config_entry.entry_id][DATA_AUGUST]
devices = []
for door in data.locks:
detail = data.get_device_detail(door.device_id)
if not detail.doorsense:
_LOGGER.debug(
"Not adding sensor class door for lock %s because it does not have doorsense",
door.device_name,
)
continue
_LOGGER.debug("Adding sensor class door for %s", door.device_name)
devices.append(AugustDoorBinarySensor(data, "door_open", door))
for doorbell in data.doorbells:
for sensor_type in SENSOR_TYPES_DOORBELL:
_LOGGER.debug(
"Adding doorbell sensor class %s for %s",
SENSOR_TYPES_DOORBELL[sensor_type][SENSOR_DEVICE_CLASS],
doorbell.device_name,
)
devices.append(AugustDoorbellBinarySensor(data, sensor_type, doorbell))
async_add_entities(devices, True)
class AugustDoorBinarySensor(AugustEntityMixin, BinarySensorEntity):
"""Representation of an August Door binary sensor."""
def __init__(self, data, sensor_type, device):
"""Initialize the sensor."""
super().__init__(data, device)
self._data = data
self._sensor_type = sensor_type
self._device = device
self._update_from_data()
@property
def available(self):
"""Return the availability of this sensor."""
return self._detail.bridge_is_online
@property
def is_on(self):
"""Return true if the binary sensor is on."""
return self._detail.door_state == LockDoorStatus.OPEN
@property
def device_class(self):
"""Return the class of this device."""
return DEVICE_CLASS_DOOR
@property
def name(self):
"""Return the name of the binary sensor."""
return f"{self._device.device_name} Open"
@callback
def _update_from_data(self):
"""Get the latest state of the sensor and update activity."""
door_activity = self._data.activity_stream.get_latest_device_activity(
self._device_id, [ActivityType.DOOR_OPERATION]
)
if door_activity is not None:
update_lock_detail_from_activity(self._detail, door_activity)
@property
def unique_id(self) -> str:
"""Get the unique of the door open binary sensor."""
return f"{self._device_id}_open"
class AugustDoorbellBinarySensor(AugustEntityMixin, BinarySensorEntity):
"""Representation of an August binary sensor."""
def __init__(self, data, sensor_type, device):
"""Initialize the sensor."""
super().__init__(data, device)
self._check_for_off_update_listener = None
self._data = data
self._sensor_type = sensor_type
self._device = device
self._state = None
self._available = False
self._update_from_data()
@property
def available(self):
"""Return the availability of this sensor."""
return self._available
@property
def is_on(self):
"""Return true if the binary sensor is on."""
return self._state
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
return SENSOR_TYPES_DOORBELL[self._sensor_type][SENSOR_DEVICE_CLASS]
@property
def name(self):
"""Return the name of the binary sensor."""
return f"{self._device.device_name} {SENSOR_TYPES_DOORBELL[self._sensor_type][SENSOR_NAME]}"
@property
def _state_provider(self):
"""Return the state provider for the binary sensor."""
return SENSOR_TYPES_DOORBELL[self._sensor_type][SENSOR_STATE_PROVIDER]
@property
def _is_time_based(self):
"""Return true of false if the sensor is time based."""
return SENSOR_TYPES_DOORBELL[self._sensor_type][SENSOR_STATE_IS_TIME_BASED]
@callback
def _update_from_data(self):
"""Get the latest state of the sensor."""
self._cancel_any_pending_updates()
self._state = self._state_provider(self._data, self._detail)
if self._is_time_based:
self._available = _retrieve_online_state(self._data, self._detail)
self._schedule_update_to_recheck_turn_off_sensor()
else:
self._available = True
def _schedule_update_to_recheck_turn_off_sensor(self):
"""Schedule an update to recheck the sensor to see if it is ready to turn off."""
# If the sensor is already off there is nothing to do
if not self._state:
return
# self.hass is only available after setup is completed
# and we will recheck in async_added_to_hass
if not self.hass:
return
@callback
def _scheduled_update(now):
"""Timer callback for sensor update."""
self._check_for_off_update_listener = None
self._update_from_data()
self._check_for_off_update_listener = async_track_point_in_utc_time(
self.hass, _scheduled_update, utcnow() + TIME_TO_DECLARE_DETECTION
)
def _cancel_any_pending_updates(self):
"""Cancel any updates to recheck a sensor to see if it is ready to turn off."""
if self._check_for_off_update_listener:
_LOGGER.debug("%s: canceled pending update", self.entity_id)
self._check_for_off_update_listener()
self._check_for_off_update_listener = None
async def async_added_to_hass(self):
"""Call the mixin to subscribe and setup an async_track_point_in_utc_time to turn off the sensor if needed."""
self._schedule_update_to_recheck_turn_off_sensor()
await super().async_added_to_hass()
@property
def unique_id(self) -> str:
"""Get the unique id of the doorbell sensor."""
return (
f"{self._device_id}_"
f"{SENSOR_TYPES_DOORBELL[self._sensor_type][SENSOR_NAME].lower()}"
)
|
import logging
from django.core.management.base import BaseCommand as DjangoBaseCommand
class BaseCommand(DjangoBaseCommand):
requires_system_checks = False
def execute(self, *args, **options):
logger = logging.getLogger("weblate")
if not any(handler.get_name() == "console" for handler in logger.handlers):
console = logging.StreamHandler()
console.set_name("console")
verbosity = int(options["verbosity"])
if verbosity > 1:
console.setLevel(logging.DEBUG)
elif verbosity == 1:
console.setLevel(logging.INFO)
else:
console.setLevel(logging.ERROR)
console.setFormatter(logging.Formatter("%(levelname)s %(message)s"))
logger.addHandler(console)
return super().execute(*args, **options)
def handle(self, *args, **options):
"""The actual logic of the command.
Subclasses must implement this method.
"""
raise NotImplementedError()
|
from copy import deepcopy
import pydeconz
import pytest
from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN
from homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN
from homeassistant.components.cover import DOMAIN as COVER_DOMAIN
from homeassistant.components.deconz.config_flow import DECONZ_MANUFACTURERURL
from homeassistant.components.deconz.const import DOMAIN as DECONZ_DOMAIN
from homeassistant.components.deconz.errors import AuthenticationRequired, CannotConnect
from homeassistant.components.deconz.gateway import (
get_gateway,
get_gateway_from_config_entry,
)
from homeassistant.components.fan import DOMAIN as FAN_DOMAIN
from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN
from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN
from homeassistant.components.scene import DOMAIN as SCENE_DOMAIN
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.components.ssdp import (
ATTR_SSDP_LOCATION,
ATTR_UPNP_MANUFACTURER_URL,
ATTR_UPNP_SERIAL,
ATTR_UPNP_UDN,
)
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
from homeassistant.config_entries import CONN_CLASS_LOCAL_PUSH, SOURCE_SSDP
from homeassistant.const import CONF_API_KEY, CONF_HOST, CONF_PORT
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from tests.async_mock import Mock, patch
from tests.common import MockConfigEntry
API_KEY = "1234567890ABCDEF"
BRIDGEID = "01234E56789A"
ENTRY_CONFIG = {CONF_API_KEY: API_KEY, CONF_HOST: "1.2.3.4", CONF_PORT: 80}
ENTRY_OPTIONS = {}
DECONZ_CONFIG = {
"bridgeid": BRIDGEID,
"ipaddress": "1.2.3.4",
"mac": "00:11:22:33:44:55",
"modelid": "deCONZ",
"name": "deCONZ mock gateway",
"sw_version": "2.05.69",
"uuid": "1234",
"websocketport": 1234,
}
DECONZ_WEB_REQUEST = {
"config": DECONZ_CONFIG,
"groups": {},
"lights": {},
"sensors": {},
}
async def setup_deconz_integration(
hass,
config=ENTRY_CONFIG,
options=ENTRY_OPTIONS,
get_state_response=DECONZ_WEB_REQUEST,
entry_id="1",
source="user",
):
"""Create the deCONZ gateway."""
config_entry = MockConfigEntry(
domain=DECONZ_DOMAIN,
source=source,
data=deepcopy(config),
connection_class=CONN_CLASS_LOCAL_PUSH,
options=deepcopy(options),
entry_id=entry_id,
)
config_entry.add_to_hass(hass)
with patch(
"pydeconz.DeconzSession.request", return_value=deepcopy(get_state_response)
), patch("pydeconz.DeconzSession.start", return_value=True):
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
return config_entry
async def test_gateway_setup(hass):
"""Successful setup."""
with patch(
"homeassistant.config_entries.ConfigEntries.async_forward_entry_setup",
return_value=True,
) as forward_entry_setup:
config_entry = await setup_deconz_integration(hass)
gateway = get_gateway_from_config_entry(hass, config_entry)
assert gateway.bridgeid == BRIDGEID
assert gateway.master is True
assert gateway.option_allow_clip_sensor is False
assert gateway.option_allow_deconz_groups is True
assert gateway.option_allow_new_devices is True
assert len(gateway.deconz_ids) == 0
assert len(hass.states.async_all()) == 0
assert forward_entry_setup.mock_calls[0][1] == (
config_entry,
BINARY_SENSOR_DOMAIN,
)
assert forward_entry_setup.mock_calls[1][1] == (config_entry, CLIMATE_DOMAIN)
assert forward_entry_setup.mock_calls[2][1] == (config_entry, COVER_DOMAIN)
assert forward_entry_setup.mock_calls[3][1] == (config_entry, FAN_DOMAIN)
assert forward_entry_setup.mock_calls[4][1] == (config_entry, LIGHT_DOMAIN)
assert forward_entry_setup.mock_calls[5][1] == (config_entry, LOCK_DOMAIN)
assert forward_entry_setup.mock_calls[6][1] == (config_entry, SCENE_DOMAIN)
assert forward_entry_setup.mock_calls[7][1] == (config_entry, SENSOR_DOMAIN)
assert forward_entry_setup.mock_calls[8][1] == (config_entry, SWITCH_DOMAIN)
async def test_gateway_retry(hass):
"""Retry setup."""
with patch(
"homeassistant.components.deconz.gateway.get_gateway",
side_effect=CannotConnect,
):
await setup_deconz_integration(hass)
assert not hass.data[DECONZ_DOMAIN]
async def test_gateway_setup_fails(hass):
"""Retry setup."""
with patch(
"homeassistant.components.deconz.gateway.get_gateway", side_effect=Exception
):
await setup_deconz_integration(hass)
assert not hass.data[DECONZ_DOMAIN]
async def test_connection_status_signalling(hass):
"""Make sure that connection status triggers a dispatcher send."""
config_entry = await setup_deconz_integration(hass)
gateway = get_gateway_from_config_entry(hass, config_entry)
event_call = Mock()
unsub = async_dispatcher_connect(hass, gateway.signal_reachable, event_call)
gateway.async_connection_status_callback(False)
await hass.async_block_till_done()
assert gateway.available is False
assert len(event_call.mock_calls) == 1
unsub()
async def test_update_address(hass):
"""Make sure that connection status triggers a dispatcher send."""
config_entry = await setup_deconz_integration(hass)
gateway = get_gateway_from_config_entry(hass, config_entry)
assert gateway.api.host == "1.2.3.4"
with patch(
"homeassistant.components.deconz.async_setup_entry",
return_value=True,
) as mock_setup_entry:
await hass.config_entries.flow.async_init(
DECONZ_DOMAIN,
data={
ATTR_SSDP_LOCATION: "http://2.3.4.5:80/",
ATTR_UPNP_MANUFACTURER_URL: DECONZ_MANUFACTURERURL,
ATTR_UPNP_SERIAL: BRIDGEID,
ATTR_UPNP_UDN: "uuid:456DEF",
},
context={"source": SOURCE_SSDP},
)
await hass.async_block_till_done()
assert gateway.api.host == "2.3.4.5"
assert len(mock_setup_entry.mock_calls) == 1
async def test_reset_after_successful_setup(hass):
"""Make sure that connection status triggers a dispatcher send."""
config_entry = await setup_deconz_integration(hass)
gateway = get_gateway_from_config_entry(hass, config_entry)
result = await gateway.async_reset()
await hass.async_block_till_done()
assert result is True
async def test_get_gateway(hass):
"""Successful call."""
with patch("pydeconz.DeconzSession.initialize", return_value=True):
assert await get_gateway(hass, ENTRY_CONFIG, Mock(), Mock())
async def test_get_gateway_fails_unauthorized(hass):
"""Failed call."""
with patch(
"pydeconz.DeconzSession.initialize",
side_effect=pydeconz.errors.Unauthorized,
), pytest.raises(AuthenticationRequired):
assert await get_gateway(hass, ENTRY_CONFIG, Mock(), Mock()) is False
async def test_get_gateway_fails_cannot_connect(hass):
"""Failed call."""
with patch(
"pydeconz.DeconzSession.initialize",
side_effect=pydeconz.errors.RequestError,
), pytest.raises(CannotConnect):
assert await get_gateway(hass, ENTRY_CONFIG, Mock(), Mock()) is False
|
import os
import unittest
import responses
import smart_open.http
import smart_open.s3
import smart_open.constants
BYTES = b'i tried so hard and got so far but in the end it doesn\'t even matter'
URL = 'http://localhost'
HTTPS_URL = 'https://localhost'
HEADERS = {
'Content-Length': str(len(BYTES)),
'Accept-Ranges': 'bytes',
}
def request_callback(request):
try:
range_string = request.headers['range']
except KeyError:
return (200, HEADERS, BYTES)
start, end = range_string.replace('bytes=', '').split('-', 1)
start = int(start)
if end:
end = int(end)
else:
end = len(BYTES)
return (200, HEADERS, BYTES[start:end])
@unittest.skipIf(os.environ.get('TRAVIS'), 'This test does not work on TravisCI for some reason')
class HttpTest(unittest.TestCase):
@responses.activate
def test_read_all(self):
responses.add(responses.GET, URL, body=BYTES, stream=True)
reader = smart_open.http.SeekableBufferedInputBase(URL)
read_bytes = reader.read()
self.assertEqual(BYTES, read_bytes)
@responses.activate
def test_seek_from_start(self):
responses.add_callback(responses.GET, URL, callback=request_callback)
reader = smart_open.http.SeekableBufferedInputBase(URL)
reader.seek(10)
self.assertEqual(reader.tell(), 10)
read_bytes = reader.read(size=10)
self.assertEqual(reader.tell(), 20)
self.assertEqual(BYTES[10:20], read_bytes)
reader.seek(20)
read_bytes = reader.read(size=10)
self.assertEqual(BYTES[20:30], read_bytes)
reader.seek(0)
read_bytes = reader.read(size=10)
self.assertEqual(BYTES[:10], read_bytes)
@responses.activate
def test_seek_from_current(self):
responses.add_callback(responses.GET, URL, callback=request_callback)
reader = smart_open.http.SeekableBufferedInputBase(URL)
reader.seek(10)
read_bytes = reader.read(size=10)
self.assertEqual(BYTES[10:20], read_bytes)
self.assertEqual(reader.tell(), 20)
reader.seek(10, whence=smart_open.constants.WHENCE_CURRENT)
self.assertEqual(reader.tell(), 30)
read_bytes = reader.read(size=10)
self.assertEqual(reader.tell(), 40)
self.assertEqual(BYTES[30:40], read_bytes)
@responses.activate
def test_seek_from_end(self):
responses.add_callback(responses.GET, URL, callback=request_callback)
reader = smart_open.http.SeekableBufferedInputBase(URL)
reader.seek(-10, whence=smart_open.constants.WHENCE_END)
self.assertEqual(reader.tell(), len(BYTES) - 10)
read_bytes = reader.read(size=10)
self.assertEqual(reader.tell(), len(BYTES))
self.assertEqual(BYTES[-10:], read_bytes)
@responses.activate
def test_headers_are_as_assigned(self):
responses.add_callback(responses.GET, URL, callback=request_callback)
# use default _HEADERS
x = smart_open.http.BufferedInputBase(URL)
# set different ones
x.headers['Accept-Encoding'] = 'compress, gzip'
x.headers['Other-Header'] = 'value'
# use default again, global shoudn't overwritten from x
y = smart_open.http.BufferedInputBase(URL)
# should be default headers
self.assertEqual(y.headers, {'Accept-Encoding': 'identity'})
# should be assigned headers
self.assertEqual(x.headers, {'Accept-Encoding': 'compress, gzip', 'Other-Header': 'value'})
@responses.activate
def test_headers(self):
"""Does the top-level http.open function handle headers correctly?"""
responses.add_callback(responses.GET, URL, callback=request_callback)
reader = smart_open.http.open(URL, 'rb', headers={'Foo': 'bar'})
self.assertEqual(reader.headers['Foo'], 'bar')
@responses.activate
def test_https_seek_start(self):
"""Did the seek start over HTTPS work?"""
responses.add_callback(responses.GET, HTTPS_URL, callback=request_callback)
with smart_open.open(HTTPS_URL, "rb") as fin:
read_bytes_1 = fin.read(size=10)
fin.seek(0)
read_bytes_2 = fin.read(size=10)
self.assertEqual(read_bytes_1, read_bytes_2)
@responses.activate
def test_https_seek_forward(self):
"""Did the seek forward over HTTPS work?"""
responses.add_callback(responses.GET, HTTPS_URL, callback=request_callback)
with smart_open.open(HTTPS_URL, "rb") as fin:
fin.seek(10)
read_bytes = fin.read(size=10)
self.assertEqual(BYTES[10:20], read_bytes)
@responses.activate
def test_https_seek_reverse(self):
"""Did the seek in reverse over HTTPS work?"""
responses.add_callback(responses.GET, HTTPS_URL, callback=request_callback)
with smart_open.open(HTTPS_URL, "rb") as fin:
read_bytes_1 = fin.read(size=10)
fin.seek(-10, whence=smart_open.constants.WHENCE_CURRENT)
read_bytes_2 = fin.read(size=10)
self.assertEqual(read_bytes_1, read_bytes_2)
|
import os
import os.path
import pytest
import jinja2
from PyQt5.QtCore import QUrl
import qutebrowser
class JSTester:
"""Common subclass providing basic functionality for all JS testers.
Attributes:
tab: The tab object which is used.
qtbot: The QtBot fixture from pytest-qt.
_jinja_env: The jinja2 environment used to get templates.
"""
def __init__(self, tab, qtbot, config_stub):
self.tab = tab
self.qtbot = qtbot
loader = jinja2.FileSystemLoader(os.path.dirname(__file__))
self._jinja_env = jinja2.Environment(loader=loader, autoescape=True)
# Make sure error logging via JS fails tests
config_stub.val.content.javascript.log = {
'info': 'info',
'error': 'error',
'unknown': 'error',
'warning': 'error'
}
def load(self, path, **kwargs):
"""Load and display the given jinja test data.
Args:
path: The path to the test file, relative to the javascript/
folder.
**kwargs: Passed to jinja's template.render().
"""
template = self._jinja_env.get_template(path)
try:
with self.qtbot.waitSignal(self.tab.load_finished,
timeout=2000) as blocker:
self.tab.set_html(template.render(**kwargs))
except self.qtbot.TimeoutError:
# Sometimes this fails for some odd reason on macOS, let's just try
# again.
print("Trying to load page again...")
with self.qtbot.waitSignal(self.tab.load_finished,
timeout=2000) as blocker:
self.tab.set_html(template.render(**kwargs))
assert blocker.args == [True]
def load_file(self, path: str, force: bool = False):
"""Load a file from disk.
Args:
path: The string path from disk to load (relative to this file)
force: Whether to force loading even if the file is invalid.
"""
self.load_url(QUrl.fromLocalFile(
os.path.join(os.path.dirname(__file__), path)), force)
def load_url(self, url: QUrl, force: bool = False):
"""Load a given QUrl.
Args:
url: The QUrl to load.
force: Whether to force loading even if the file is invalid.
"""
with self.qtbot.waitSignal(self.tab.load_finished,
timeout=2000) as blocker:
self.tab.load_url(url)
if not force:
assert blocker.args == [True]
def run_file(self, path: str, expected=None) -> None:
"""Run a javascript file.
Args:
path: The path to the JS file, relative to the qutebrowser package.
expected: The value expected return from the javascript execution
"""
base_path = os.path.dirname(os.path.abspath(qutebrowser.__file__))
with open(os.path.join(base_path, path), 'r', encoding='utf-8') as f:
source = f.read()
self.run(source, expected)
def run(self, source: str, expected, world=None) -> None:
"""Run the given javascript source.
Args:
source: The source to run as a string.
expected: The value expected return from the javascript execution
world: The scope the javascript will run in
"""
with self.qtbot.wait_callback() as callback:
self.tab.run_js_async(source, callback, world=world)
callback.assert_called_with(expected)
@pytest.fixture
def js_tester_webkit(webkit_tab, qtbot, config_stub):
"""Fixture to test javascript snippets in webkit."""
return JSTester(webkit_tab, qtbot, config_stub)
@pytest.fixture
def js_tester_webengine(webengine_tab, qtbot, config_stub):
"""Fixture to test javascript snippets in webengine."""
return JSTester(webengine_tab, qtbot, config_stub)
@pytest.fixture
def js_tester(web_tab, qtbot, config_stub):
"""Fixture to test javascript snippets with both backends."""
return JSTester(web_tab, qtbot, config_stub)
|
from unittest.mock import patch
from homeassistant import config_entries, data_entry_flow
from homeassistant.components import sonos
from homeassistant.setup import async_setup_component
from tests.common import mock_coro
async def test_creating_entry_sets_up_media_player(hass):
"""Test setting up Sonos loads the media player."""
with patch(
"homeassistant.components.sonos.media_player.async_setup_entry",
return_value=mock_coro(True),
) as mock_setup, patch("pysonos.discover", return_value=True):
result = await hass.config_entries.flow.async_init(
sonos.DOMAIN, context={"source": config_entries.SOURCE_USER}
)
# Confirmation form
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
await hass.async_block_till_done()
assert len(mock_setup.mock_calls) == 1
async def test_configuring_sonos_creates_entry(hass):
"""Test that specifying config will create an entry."""
with patch(
"homeassistant.components.sonos.async_setup_entry", return_value=mock_coro(True)
) as mock_setup, patch("pysonos.discover", return_value=True):
await async_setup_component(
hass,
sonos.DOMAIN,
{"sonos": {"media_player": {"interface_addr": "127.0.0.1"}}},
)
await hass.async_block_till_done()
assert len(mock_setup.mock_calls) == 1
async def test_not_configuring_sonos_not_creates_entry(hass):
"""Test that no config will not create an entry."""
with patch(
"homeassistant.components.sonos.async_setup_entry", return_value=mock_coro(True)
) as mock_setup, patch("pysonos.discover", return_value=True):
await async_setup_component(hass, sonos.DOMAIN, {})
await hass.async_block_till_done()
assert len(mock_setup.mock_calls) == 0
|
import logging
from homeassistant.const import (
CONF_MONITORED_CONDITIONS,
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_TEMPERATURE,
PERCENTAGE,
STATE_OFF,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
)
from . import CONF_SENSORS, DATA_NEST, DATA_NEST_CONFIG, NestSensorDevice
SENSOR_TYPES = ["humidity", "operation_mode", "hvac_state"]
TEMP_SENSOR_TYPES = ["temperature", "target"]
PROTECT_SENSOR_TYPES = [
"co_status",
"smoke_status",
"battery_health",
# color_status: "gray", "green", "yellow", "red"
"color_status",
]
STRUCTURE_SENSOR_TYPES = ["eta"]
STATE_HEAT = "heat"
STATE_COOL = "cool"
# security_state is structure level sensor, but only meaningful when
# Nest Cam exist
STRUCTURE_CAMERA_SENSOR_TYPES = ["security_state"]
_VALID_SENSOR_TYPES = (
SENSOR_TYPES
+ TEMP_SENSOR_TYPES
+ PROTECT_SENSOR_TYPES
+ STRUCTURE_SENSOR_TYPES
+ STRUCTURE_CAMERA_SENSOR_TYPES
)
SENSOR_UNITS = {"humidity": PERCENTAGE}
SENSOR_DEVICE_CLASSES = {"humidity": DEVICE_CLASS_HUMIDITY}
VARIABLE_NAME_MAPPING = {"eta": "eta_begin", "operation_mode": "mode"}
VALUE_MAPPING = {
"hvac_state": {"heating": STATE_HEAT, "cooling": STATE_COOL, "off": STATE_OFF}
}
SENSOR_TYPES_DEPRECATED = ["last_ip", "local_ip", "last_connection", "battery_level"]
DEPRECATED_WEATHER_VARS = [
"weather_humidity",
"weather_temperature",
"weather_condition",
"wind_speed",
"wind_direction",
]
_SENSOR_TYPES_DEPRECATED = SENSOR_TYPES_DEPRECATED + DEPRECATED_WEATHER_VARS
_LOGGER = logging.getLogger(__name__)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Nest Sensor.
No longer used.
"""
async def async_setup_legacy_entry(hass, entry, async_add_entities):
"""Set up a Nest sensor based on a config entry."""
nest = hass.data[DATA_NEST]
discovery_info = hass.data.get(DATA_NEST_CONFIG, {}).get(CONF_SENSORS, {})
# Add all available sensors if no Nest sensor config is set
if discovery_info == {}:
conditions = _VALID_SENSOR_TYPES
else:
conditions = discovery_info.get(CONF_MONITORED_CONDITIONS, {})
for variable in conditions:
if variable in _SENSOR_TYPES_DEPRECATED:
if variable in DEPRECATED_WEATHER_VARS:
wstr = (
"Nest no longer provides weather data like %s. See "
"https://www.home-assistant.io/integrations/#weather "
"for a list of other weather integrations to use." % variable
)
else:
wstr = (
f"{variable} is no a longer supported "
"monitored_conditions. See "
"https://www.home-assistant.io/integrations/"
"binary_sensor.nest/ for valid options."
)
_LOGGER.error(wstr)
def get_sensors():
"""Get the Nest sensors."""
all_sensors = []
for structure in nest.structures():
all_sensors += [
NestBasicSensor(structure, None, variable)
for variable in conditions
if variable in STRUCTURE_SENSOR_TYPES
]
for structure, device in nest.thermostats():
all_sensors += [
NestBasicSensor(structure, device, variable)
for variable in conditions
if variable in SENSOR_TYPES
]
all_sensors += [
NestTempSensor(structure, device, variable)
for variable in conditions
if variable in TEMP_SENSOR_TYPES
]
for structure, device in nest.smoke_co_alarms():
all_sensors += [
NestBasicSensor(structure, device, variable)
for variable in conditions
if variable in PROTECT_SENSOR_TYPES
]
structures_has_camera = {}
for structure, device in nest.cameras():
structures_has_camera[structure] = True
for structure in structures_has_camera:
all_sensors += [
NestBasicSensor(structure, None, variable)
for variable in conditions
if variable in STRUCTURE_CAMERA_SENSOR_TYPES
]
return all_sensors
async_add_entities(await hass.async_add_executor_job(get_sensors), True)
class NestBasicSensor(NestSensorDevice):
"""Representation a basic Nest sensor."""
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def device_class(self):
"""Return the device class of the sensor."""
return SENSOR_DEVICE_CLASSES.get(self.variable)
def update(self):
"""Retrieve latest state."""
self._unit = SENSOR_UNITS.get(self.variable)
if self.variable in VARIABLE_NAME_MAPPING:
self._state = getattr(self.device, VARIABLE_NAME_MAPPING[self.variable])
elif self.variable in VALUE_MAPPING:
state = getattr(self.device, self.variable)
self._state = VALUE_MAPPING[self.variable].get(state, state)
elif self.variable in PROTECT_SENSOR_TYPES and self.variable != "color_status":
# keep backward compatibility
state = getattr(self.device, self.variable)
self._state = state.capitalize() if state is not None else None
else:
self._state = getattr(self.device, self.variable)
class NestTempSensor(NestSensorDevice):
"""Representation of a Nest Temperature sensor."""
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def device_class(self):
"""Return the device class of the sensor."""
return DEVICE_CLASS_TEMPERATURE
def update(self):
"""Retrieve latest state."""
if self.device.temperature_scale == "C":
self._unit = TEMP_CELSIUS
else:
self._unit = TEMP_FAHRENHEIT
temp = getattr(self.device, self.variable)
if temp is None:
self._state = None
if isinstance(temp, tuple):
low, high = temp
self._state = f"{int(low)}-{int(high)}"
else:
self._state = round(temp, 1)
|
import numpy as np
import unittest
from chainer import testing
from chainercv.experimental.links.model.pspnet import convolution_crop
class TestConvolutionCrop(unittest.TestCase):
def test_convolution_crop(self):
size = (8, 6)
stride = (8, 6)
n_channel = 3
img = np.random.uniform(size=(n_channel, 16, 12)).astype(np.float32)
crop_imgs, param = convolution_crop(
img, size, stride, return_param=True)
self.assertEqual(crop_imgs.shape, (4, n_channel) + size)
self.assertEqual(crop_imgs.dtype, np.float32)
for y in range(2):
for x in range(2):
self.assertEqual(param['y_slices'][2 * y + x].start, 8 * y)
self.assertEqual(
param['y_slices'][2 * y + x].stop, 8 * (y + 1))
self.assertEqual(param['x_slices'][2 * y + x].start, 6 * x)
self.assertEqual(
param['x_slices'][2 * y + x].stop, 6 * (x + 1))
for i in range(4):
self.assertEqual(param['crop_y_slices'][i].start, 0)
self.assertEqual(param['crop_y_slices'][i].stop, 8)
self.assertEqual(param['crop_x_slices'][i].start, 0)
self.assertEqual(param['crop_x_slices'][i].stop, 6)
testing.run_module(__name__, __file__)
|
import re
import click
mention_re = re.compile(r'@<(?:(?P<name>\S+?)\s+)?(?P<url>\S+?://.*?)>')
short_mention_re = re.compile(r'@(?P<name>\w+)')
def get_source_by_url(url):
conf = click.get_current_context().obj["conf"]
if url == conf.twturl:
return conf.source
return next((source for source in conf.following if url == source.url), None)
def get_source_by_name(nick):
nick = nick.lower()
conf = click.get_current_context().obj["conf"]
if nick == conf.nick and conf.twturl:
return conf.source
return next((source for source in conf.following if nick == source.nick), None)
def expand_mentions(text, embed_names=True):
"""Searches the given text for mentions and expands them.
For example:
"@source.nick" will be expanded to "@<source.nick source.url>".
"""
if embed_names:
mention_format = "@<{name} {url}>"
else:
mention_format = "@<{url}>"
def handle_mention(match):
source = get_source_by_name(match.group(1))
if source is None:
return "@{0}".format(match.group(1))
return mention_format.format(
name=source.nick,
url=source.url)
return short_mention_re.sub(handle_mention, text)
def format_mention(name, url):
source = get_source_by_url(url)
if source:
if source.nick == click.get_current_context().obj["conf"].nick:
return click.style("@{0}".format(source.nick), fg="magenta", bold=True)
else:
return click.style("@{0}".format(source.nick), bold=True)
elif name:
return "@{0}".format(name)
else:
return "@<{0}>".format(url)
def format_mentions(text, format_callback=format_mention):
"""Searches the given text for mentions generated by `expand_mention()` and returns a human-readable form.
For example:
"@<bob http://example.org/twtxt.txt>" will result in "@bob"
If you follow a source: source.nick will be bold
If you are the mentioned source: source.nick will be bold and coloured
If nothing from the above is true: nick will be unstyled
If nothing from the above is true and nick is not given: url will be used
"""
def handle_mention(match):
name, url = match.groups()
return format_callback(name, url)
return mention_re.sub(handle_mention, text)
|
from pylutron_caseta.smartbridge import Smartbridge
from homeassistant import config_entries, data_entry_flow
from homeassistant.components.lutron_caseta import DOMAIN
import homeassistant.components.lutron_caseta.config_flow as CasetaConfigFlow
from homeassistant.components.lutron_caseta.const import (
CONF_CA_CERTS,
CONF_CERTFILE,
CONF_KEYFILE,
ERROR_CANNOT_CONNECT,
STEP_IMPORT_FAILED,
)
from homeassistant.const import CONF_HOST
from tests.async_mock import AsyncMock, patch
from tests.common import MockConfigEntry
class MockBridge:
"""Mock Lutron bridge that emulates configured connected status."""
def __init__(self, can_connect=True):
"""Initialize MockBridge instance with configured mock connectivity."""
self.can_connect = can_connect
self.is_currently_connected = False
async def connect(self):
"""Connect the mock bridge."""
if self.can_connect:
self.is_currently_connected = True
def is_connected(self):
"""Return whether the mock bridge is connected."""
return self.is_currently_connected
async def close(self):
"""Close the mock bridge connection."""
self.is_currently_connected = False
async def test_bridge_import_flow(hass):
"""Test a bridge entry gets created and set up during the import flow."""
entry_mock_data = {
CONF_HOST: "1.1.1.1",
CONF_KEYFILE: "",
CONF_CERTFILE: "",
CONF_CA_CERTS: "",
}
with patch(
"homeassistant.components.lutron_caseta.async_setup_entry",
return_value=True,
) as mock_setup_entry, patch(
"homeassistant.components.lutron_caseta.async_setup", return_value=True
), patch.object(
Smartbridge, "create_tls"
) as create_tls:
create_tls.return_value = MockBridge(can_connect=True)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data=entry_mock_data,
)
assert result["type"] == "create_entry"
assert result["title"] == CasetaConfigFlow.ENTRY_DEFAULT_TITLE
assert result["data"] == entry_mock_data
await hass.async_block_till_done()
assert len(mock_setup_entry.mock_calls) == 1
async def test_bridge_cannot_connect(hass):
"""Test checking for connection and cannot_connect error."""
entry_mock_data = {
CONF_HOST: "not.a.valid.host",
CONF_KEYFILE: "",
CONF_CERTFILE: "",
CONF_CA_CERTS: "",
}
with patch.object(Smartbridge, "create_tls") as create_tls:
create_tls.return_value = MockBridge(can_connect=False)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data=entry_mock_data,
)
assert result["type"] == "form"
assert result["step_id"] == STEP_IMPORT_FAILED
assert result["errors"] == {"base": ERROR_CANNOT_CONNECT}
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == CasetaConfigFlow.ABORT_REASON_CANNOT_CONNECT
async def test_bridge_cannot_connect_unknown_error(hass):
"""Test checking for connection and encountering an unknown error."""
entry_mock_data = {
CONF_HOST: "",
CONF_KEYFILE: "",
CONF_CERTFILE: "",
CONF_CA_CERTS: "",
}
with patch.object(Smartbridge, "create_tls") as create_tls:
mock_bridge = MockBridge()
mock_bridge.connect = AsyncMock(side_effect=Exception())
create_tls.return_value = mock_bridge
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data=entry_mock_data,
)
assert result["type"] == "form"
assert result["step_id"] == STEP_IMPORT_FAILED
assert result["errors"] == {"base": ERROR_CANNOT_CONNECT}
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == CasetaConfigFlow.ABORT_REASON_CANNOT_CONNECT
async def test_duplicate_bridge_import(hass):
"""Test that creating a bridge entry with a duplicate host errors."""
entry_mock_data = {
CONF_HOST: "1.1.1.1",
CONF_KEYFILE: "",
CONF_CERTFILE: "",
CONF_CA_CERTS: "",
}
mock_entry = MockConfigEntry(domain=DOMAIN, data=entry_mock_data)
mock_entry.add_to_hass(hass)
with patch(
"homeassistant.components.lutron_caseta.async_setup_entry",
return_value=True,
) as mock_setup_entry:
# Mock entry added, try initializing flow with duplicate host
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data=entry_mock_data,
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == CasetaConfigFlow.ABORT_REASON_ALREADY_CONFIGURED
assert len(mock_setup_entry.mock_calls) == 0
|
from homeassistant.components.directv.const import CONF_RECEIVER_ID, DOMAIN
from homeassistant.components.ssdp import ATTR_SSDP_LOCATION
from homeassistant.const import (
CONF_HOST,
CONTENT_TYPE_JSON,
HTTP_FORBIDDEN,
HTTP_INTERNAL_SERVER_ERROR,
)
from homeassistant.helpers.typing import HomeAssistantType
from tests.common import MockConfigEntry, load_fixture
from tests.test_util.aiohttp import AiohttpClientMocker
HOST = "127.0.0.1"
RECEIVER_ID = "028877455858"
SSDP_LOCATION = "http://127.0.0.1/"
UPNP_SERIAL = "RID-028877455858"
MOCK_CONFIG = {DOMAIN: [{CONF_HOST: HOST}]}
MOCK_SSDP_DISCOVERY_INFO = {ATTR_SSDP_LOCATION: SSDP_LOCATION}
MOCK_USER_INPUT = {CONF_HOST: HOST}
def mock_connection(aioclient_mock: AiohttpClientMocker) -> None:
"""Mock the DirecTV connection for Home Assistant."""
aioclient_mock.get(
f"http://{HOST}:8080/info/getVersion",
text=load_fixture("directv/info-get-version.json"),
headers={"Content-Type": CONTENT_TYPE_JSON},
)
aioclient_mock.get(
f"http://{HOST}:8080/info/getLocations",
text=load_fixture("directv/info-get-locations.json"),
headers={"Content-Type": CONTENT_TYPE_JSON},
)
aioclient_mock.get(
f"http://{HOST}:8080/info/mode",
params={"clientAddr": "B01234567890"},
text=load_fixture("directv/info-mode-standby.json"),
headers={"Content-Type": CONTENT_TYPE_JSON},
)
aioclient_mock.get(
f"http://{HOST}:8080/info/mode",
params={"clientAddr": "9XXXXXXXXXX9"},
status=HTTP_INTERNAL_SERVER_ERROR,
text=load_fixture("directv/info-mode-error.json"),
headers={"Content-Type": CONTENT_TYPE_JSON},
)
aioclient_mock.get(
f"http://{HOST}:8080/info/mode",
text=load_fixture("directv/info-mode.json"),
headers={"Content-Type": CONTENT_TYPE_JSON},
)
aioclient_mock.get(
f"http://{HOST}:8080/remote/processKey",
text=load_fixture("directv/remote-process-key.json"),
headers={"Content-Type": CONTENT_TYPE_JSON},
)
aioclient_mock.get(
f"http://{HOST}:8080/tv/tune",
text=load_fixture("directv/tv-tune.json"),
headers={"Content-Type": CONTENT_TYPE_JSON},
)
aioclient_mock.get(
f"http://{HOST}:8080/tv/getTuned",
params={"clientAddr": "2CA17D1CD30X"},
text=load_fixture("directv/tv-get-tuned.json"),
headers={"Content-Type": CONTENT_TYPE_JSON},
)
aioclient_mock.get(
f"http://{HOST}:8080/tv/getTuned",
params={"clientAddr": "A01234567890"},
text=load_fixture("directv/tv-get-tuned-music.json"),
headers={"Content-Type": CONTENT_TYPE_JSON},
)
aioclient_mock.get(
f"http://{HOST}:8080/tv/getTuned",
params={"clientAddr": "C01234567890"},
status=HTTP_FORBIDDEN,
text=load_fixture("directv/tv-get-tuned-restricted.json"),
headers={"Content-Type": CONTENT_TYPE_JSON},
)
aioclient_mock.get(
f"http://{HOST}:8080/tv/getTuned",
text=load_fixture("directv/tv-get-tuned-movie.json"),
headers={"Content-Type": CONTENT_TYPE_JSON},
)
async def setup_integration(
hass: HomeAssistantType,
aioclient_mock: AiohttpClientMocker,
skip_entry_setup: bool = False,
setup_error: bool = False,
) -> MockConfigEntry:
"""Set up the DirecTV integration in Home Assistant."""
if setup_error:
aioclient_mock.get(
f"http://{HOST}:8080/info/getVersion", status=HTTP_INTERNAL_SERVER_ERROR
)
else:
mock_connection(aioclient_mock)
entry = MockConfigEntry(
domain=DOMAIN,
unique_id=RECEIVER_ID,
data={CONF_HOST: HOST, CONF_RECEIVER_ID: RECEIVER_ID},
)
entry.add_to_hass(hass)
if not skip_entry_setup:
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
return entry
|
import urwid.curses_display
import urwid.raw_display
import urwid.web_display
import urwid
import sys
if urwid.web_display.is_web_request():
Screen = urwid.web_display.Screen
else:
if len(sys.argv)>1 and sys.argv[1][:1] == "r":
Screen = urwid.raw_display.Screen
else:
Screen = urwid.curses_display.Screen
def key_test():
screen = Screen()
header = urwid.Text("Values from get_input(). Q exits.")
header = urwid.AttrWrap(header,'header')
lw = urwid.SimpleListWalker([])
listbox = urwid.ListBox(lw)
listbox = urwid.AttrWrap(listbox, 'listbox')
top = urwid.Frame(listbox, header)
def input_filter(keys, raw):
if 'q' in keys or 'Q' in keys:
raise urwid.ExitMainLoop
t = []
a = []
for k in keys:
if type(k) == tuple:
out = []
for v in k:
if out:
out += [', ']
out += [('key',repr(v))]
t += ["("] + out + [")"]
else:
t += ["'",('key',k),"' "]
rawt = urwid.Text(", ".join(["%d"%r for r in raw]))
if t:
lw.append(
urwid.Columns([
('weight',2,urwid.Text(t)),
rawt])
)
listbox.set_focus(len(lw)-1,'above')
return keys
loop = urwid.MainLoop(top, [
('header', 'black', 'dark cyan', 'standout'),
('key', 'yellow', 'dark blue', 'bold'),
('listbox', 'light gray', 'black' ),
], screen, input_filter=input_filter)
try:
old = screen.tty_signal_keys('undefined','undefined',
'undefined','undefined','undefined')
loop.run()
finally:
screen.tty_signal_keys(*old)
def main():
urwid.web_display.set_preferences('Input Test')
if urwid.web_display.handle_short_request():
return
key_test()
if '__main__'==__name__ or urwid.web_display.is_web_request():
main()
|
import pytest
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
@pytest.fixture
def time_zone():
"""Fixture for time zone."""
return dt_util.get_time_zone("America/New_York")
async def test_time(hass, time_zone):
"""Test the time at a different location."""
config = {"sensor": {"platform": "worldclock", "time_zone": "America/New_York"}}
assert await async_setup_component(
hass,
"sensor",
config,
)
await hass.async_block_till_done()
state = hass.states.get("sensor.worldclock_sensor")
assert state is not None
assert state.state == dt_util.now(time_zone=time_zone).strftime("%H:%M")
async def test_time_format(hass, time_zone):
"""Test time_format setting."""
time_format = "%a, %b %d, %Y %I:%M %p"
config = {
"sensor": {
"platform": "worldclock",
"time_zone": "America/New_York",
"time_format": time_format,
}
}
assert await async_setup_component(
hass,
"sensor",
config,
)
await hass.async_block_till_done()
state = hass.states.get("sensor.worldclock_sensor")
assert state is not None
assert state.state == dt_util.now(time_zone=time_zone).strftime(time_format)
|
import pytest
from django.contrib.auth import get_user_model
from django.contrib.auth.models import AnonymousUser
from django.contrib.sessions.backends.cache import SessionStore
from shop.models.customer import VisitingCustomer
from testshop.models import Customer
@pytest.mark.django_db
def test_visiting_customer(rf, session):
"""
Check that an anonymous user creates a visiting customer.
"""
request = rf.get('/', follow=True)
request.user = AnonymousUser()
request.session = session
customer = Customer.objects.get_from_request(request)
customer.save()
assert isinstance(customer, VisitingCustomer)
assert str(customer) == 'Visitor'
assert customer.is_anonymous is True
assert customer.is_authenticated is False
assert customer.is_recognized is False
assert customer.is_guest is False
assert customer.is_registered is False
assert customer.is_visitor is True
@pytest.mark.django_db
def test_unrecognized_customer(rf, session):
"""
Check that an anonymous user creates an unrecognized customer.
"""
request = rf.get('/', follow=True)
request.user = AnonymousUser()
request.session = session
customer = Customer.objects.get_or_create_from_request(request)
assert isinstance(customer, Customer)
assert customer.is_anonymous is True
assert customer.is_authenticated is False
assert customer.is_recognized is False
assert customer.is_guest is False
assert customer.is_registered is False
assert customer.is_visitor is False
@pytest.mark.django_db
def test_unexpired_customer(rf, session):
"""
Check that an anonymous user creates an unrecognized customer using the current session-key.
"""
request = rf.get('/', follow=True)
request.user = AnonymousUser()
request.session = SessionStore()
customer = Customer.objects.get_or_create_from_request(request)
assert isinstance(customer, Customer)
assert customer.is_anonymous is True
assert customer.is_expired is False
assert Customer.objects.decode_session_key(customer.user.username) == request.session.session_key
customer.delete()
with pytest.raises(Customer.DoesNotExist):
Customer.objects.get(pk=customer.pk)
with pytest.raises(get_user_model().DoesNotExist):
get_user_model().objects.get(pk=customer.pk)
@pytest.mark.django_db
def test_authenticated_purchasing_user(user_factory, rf, session):
"""
Check that an authenticated Django user creates a recognized django-SHOP customer.
"""
user = user_factory()
with pytest.raises(Customer.DoesNotExist):
Customer.objects.get(pk=user.pk)
request = rf.get('/', follow=True)
request.user = user
request.session = session
customer = Customer.objects.get_or_create_from_request(request)
assert isinstance(customer, Customer)
assert customer.is_anonymous is False
assert customer.is_authenticated is True
assert customer.is_recognized is True
assert customer.is_guest is False
assert customer.is_registered is True
assert customer.is_visitor is False
@pytest.mark.django_db
def test_authenticated_visiting_user(user_factory, rf, session):
"""
Check that an authenticated user creates a recognized customer visiting the site.
"""
user = user_factory()
with pytest.raises(Customer.DoesNotExist):
Customer.objects.get(pk=user.pk)
request = rf.get('/', follow=True)
request.user = user
request.session = SessionStore()
customer = Customer.objects.get_from_request(request)
assert isinstance(customer, Customer)
assert customer.is_authenticated is True
assert customer.is_recognized is True
assert customer.is_registered is True
@pytest.mark.django_db
def test_authenticated_visiting_customer(customer_factory, rf, session):
"""
Check that an authenticated user creates a recognized customer visiting the site.
"""
request = rf.get('/', follow=True)
request.user = customer_factory().user
request.session = session
customer = Customer.objects.get_from_request(request)
assert isinstance(customer, Customer)
assert customer.pk == request.user.pk
assert customer.is_authenticated is True
assert customer.is_recognized is True
assert customer.is_registered is True
|
import os
import pytest
from molecule import config
from molecule.driver import azure
# NOTE(retr0h): The use of the `patched_config_validate` fixture, disables
# config.Config._validate from executing. Thus preventing odd side-effects
# throughout patched.assert_called unit tests.
@pytest.fixture
def _instance(patched_config_validate, config_instance):
return azure.Azure(config_instance)
def test_config_private_member(_instance):
assert isinstance(_instance._config, config.Config)
def test_testinfra_options_property(_instance):
assert {
'connection': 'ansible',
'ansible-inventory': _instance._config.provisioner.inventory_file
} == _instance.testinfra_options
def test_name_property(_instance):
assert 'azure' == _instance.name
def test_options_property(_instance):
x = {'managed': True}
assert x == _instance.options
def test_login_cmd_template_property(_instance):
x = ('ssh {address} -l {user} -p {port} -i {identity_file} '
'-o UserKnownHostsFile=/dev/null '
'-o ControlMaster=auto '
'-o ControlPersist=60s '
'-o IdentitiesOnly=yes '
'-o StrictHostKeyChecking=no')
assert x == _instance.login_cmd_template
def test_safe_files_property(_instance):
x = [
os.path.join(_instance._config.scenario.ephemeral_directory,
'instance_config.yml'),
]
assert x == _instance.safe_files
def test_default_safe_files_property(_instance):
x = [
os.path.join(_instance._config.scenario.ephemeral_directory,
'instance_config.yml'),
]
assert x == _instance.default_safe_files
def test_delegated_property(_instance):
assert not _instance.delegated
def test_managed_property(_instance):
assert _instance.managed
def test_default_ssh_connection_options_property(_instance):
x = [
'-o UserKnownHostsFile=/dev/null',
'-o ControlMaster=auto',
'-o ControlPersist=60s',
'-o IdentitiesOnly=yes',
'-o StrictHostKeyChecking=no',
]
assert x == _instance.default_ssh_connection_options
def test_login_options(mocker, _instance):
m = mocker.patch('molecule.driver.azure.Azure._get_instance_config')
m.return_value = {
'instance': 'foo',
'address': '172.16.0.2',
'user': 'cloud-user',
'port': 22,
'identity_file': '/foo/bar',
}
x = {
'instance': 'foo',
'address': '172.16.0.2',
'user': 'cloud-user',
'port': 22,
'identity_file': '/foo/bar',
}
assert x == _instance.login_options('foo')
def test_ansible_connection_options(mocker, _instance):
m = mocker.patch('molecule.driver.azure.Azure._get_instance_config')
m.return_value = {
'instance': 'foo',
'address': '172.16.0.2',
'user': 'cloud-user',
'port': 22,
'identity_file': '/foo/bar',
}
x = {
'ansible_host':
'172.16.0.2',
'ansible_port':
22,
'ansible_user':
'cloud-user',
'ansible_private_key_file':
'/foo/bar',
'connection':
'ssh',
'ansible_ssh_common_args': ('-o UserKnownHostsFile=/dev/null '
'-o ControlMaster=auto '
'-o ControlPersist=60s '
'-o IdentitiesOnly=yes '
'-o StrictHostKeyChecking=no'),
}
assert x == _instance.ansible_connection_options('foo')
def test_ansible_connection_options_handles_missing_instance_config(
mocker, _instance):
m = mocker.patch('molecule.util.safe_load_file')
m.side_effect = IOError
assert {} == _instance.ansible_connection_options('foo')
def test_ansible_connection_options_handles_missing_results_key(
mocker, _instance):
m = mocker.patch('molecule.util.safe_load_file')
m.side_effect = StopIteration
assert {} == _instance.ansible_connection_options('foo')
def test_instance_config_property(_instance):
x = os.path.join(_instance._config.scenario.ephemeral_directory,
'instance_config.yml')
assert x == _instance.instance_config
def test_ssh_connection_options_property(_instance):
x = [
'-o UserKnownHostsFile=/dev/null',
'-o ControlMaster=auto',
'-o ControlPersist=60s',
'-o IdentitiesOnly=yes',
'-o StrictHostKeyChecking=no',
]
assert x == _instance.ssh_connection_options
def test_status(mocker, _instance):
result = _instance.status()
assert 2 == len(result)
assert result[0].instance_name == 'instance-1'
assert result[0].driver_name == 'azure'
assert result[0].provisioner_name == 'ansible'
assert result[0].scenario_name == 'default'
assert result[0].created == 'false'
assert result[0].converged == 'false'
assert result[1].instance_name == 'instance-2'
assert result[1].driver_name == 'azure'
assert result[1].provisioner_name == 'ansible'
assert result[1].scenario_name == 'default'
assert result[1].created == 'false'
assert result[1].converged == 'false'
def test_get_instance_config(mocker, _instance):
m = mocker.patch('molecule.util.safe_load_file')
m.return_value = [{
'instance': 'foo',
}, {
'instance': 'bar',
}]
x = {
'instance': 'foo',
}
assert x == _instance._get_instance_config('foo')
def test_created(_instance):
assert 'false' == _instance._created()
def test_converged(_instance):
assert 'false' == _instance._converged()
|
from __future__ import unicode_literals
import posixpath
from os import path
from docutils import nodes
from docutils.parsers.rst.directives import flag, images, nonnegative_int
from sphinx.errors import SphinxError
from sphinx.util import ensuredir, logging, relative_uri
try:
from hashlib import sha1 as sha
except ImportError:
from sha import sha
try:
import aafigure
except ImportError:
aafigure = None
logger = logging.getLogger(__name__)
DEFAULT_FORMATS = dict(html='svg', latex='pdf', text=None)
def merge_dict(dst, src):
for (k, v) in src.items():
if k not in dst:
dst[k] = v
return dst
def get_basename(text, options, prefix='aafig'):
options = options.copy()
if 'format' in options:
del options['format']
hashkey = text + str(options)
id = sha(hashkey.encode('utf-8')).hexdigest()
return '%s-%s' % (prefix, id)
class AafigError(SphinxError):
category = 'aafig error'
class AafigDirective(images.Image):
"""
Directive to insert an ASCII art figure to be rendered by aafigure.
"""
has_content = True
required_arguments = 0
own_option_spec = dict(
line_width=float,
background=str,
foreground=str,
fill=str,
aspect=nonnegative_int,
textual=flag,
proportional=flag,
)
option_spec = images.Image.option_spec.copy()
option_spec.update(own_option_spec)
def run(self):
aafig_options = dict()
own_options_keys = [self.own_option_spec.keys()] + ['scale']
for (k, v) in self.options.items():
if k in own_options_keys:
# convert flags to booleans
if v is None:
v = True
# convert percentage to float
if k == 'scale' or k == 'aspect':
v = float(v) / 100.0
aafig_options[k] = v
del self.options[k]
self.arguments = ['']
(image_node,) = images.Image.run(self)
if isinstance(image_node, nodes.system_message):
return [image_node]
text = '\n'.join(self.content)
image_node.aafig = dict(options=aafig_options, text=text)
return [image_node]
def render_aafig_images(app, doctree):
format_map = app.builder.config.aafig_format
merge_dict(format_map, DEFAULT_FORMATS)
if aafigure is None:
logger.warn(
'aafigure module not installed, ASCII art images '
'will be redered as literal text'
)
for img in doctree.traverse(nodes.image):
if not hasattr(img, 'aafig'):
continue
if aafigure is None:
continue
options = img.aafig['options']
text = img.aafig['text']
format = app.builder.format
merge_dict(options, app.builder.config.aafig_default_options)
if format in format_map:
options['format'] = format_map[format]
else:
logger.warn(
'unsupported builder format "%s", please '
'add a custom entry in aafig_format config '
'option for this builder' % format
)
img.replace_self(nodes.literal_block(text, text))
continue
if options['format'] is None:
img.replace_self(nodes.literal_block(text, text))
continue
try:
fname, outfn, id, extra = render_aafigure(app, text, options)
except AafigError as exc:
logger.warn('aafigure error: ' + str(exc))
img.replace_self(nodes.literal_block(text, text))
continue
img['uri'] = fname
# FIXME: find some way to avoid this hack in aafigure
if extra:
(width, height) = [x.split('"')[1] for x in extra.split()]
if 'width' not in img:
img['width'] = width
if 'height' not in img:
img['height'] = height
def render_aafigure(app, text, options):
"""
Render an ASCII art figure into the requested format output file.
"""
if aafigure is None:
raise AafigError('aafigure module not installed')
fname = get_basename(text, options)
fname = '%s.%s' % (get_basename(text, options), options['format'])
if app.builder.format == 'html':
# HTML
imgpath = relative_uri(app.builder.env.docname, '_images')
relfn = posixpath.join(imgpath, fname)
outfn = path.join(app.builder.outdir, '_images', fname)
else:
# Non-HTML
if app.builder.format != 'latex':
logger.warn(
'aafig: the builder format %s is not officially '
'supported, aafigure images could not work. '
'Please report problems and working builder to '
'avoid this warning inthe future' % app.builder.format
)
relfn = fname
outfn = path.join(app.builder.outdir, fname)
metadata_fname = '%s.aafig' % outfn
try:
if path.isfile(outfn):
extra = None
if options['format'].lower() == 'svg':
f = None
try:
try:
f = open(metadata_fname, 'r')
extra = f.read()
except:
raise AafigError()
finally:
if f is not None:
f.close()
return relfn, outfn, id, extra
except AafigError:
pass
ensuredir(path.dirname(outfn))
try:
(visitor, output) = aafigure.render(text, outfn, options)
output.close()
except aafigure.UnsupportedFormatError as e:
raise AafigError(str(e))
extra = None
if options['format'].lower() == 'svg':
extra = visitor.get_size_attrs()
f = open(metadata_fname, 'w')
f.write(extra)
f.close()
return relfn, outfn, id, extra
def setup(app):
app.add_directive('aafig', AafigDirective)
app.connect('doctree-read', render_aafig_images)
app.add_config_value('aafig_format', DEFAULT_FORMATS, 'html')
app.add_config_value('aafig_default_options', dict(), 'html')
# vim: set expandtab shiftwidth=4 softtabstop=4 :
|
import logging
from kalliope.core import Utils
logging.basicConfig()
logger = logging.getLogger("kalliope")
class PlayerLauncher(object):
def __init__(self):
pass
@staticmethod
def get_player(settings):
"""
Instantiate a Player
:param settings: setting object
:type settings: Settings
:return: the Player instance
:rtype: Player
"""
player_instance = None
for player in settings.players:
if player.name == settings.default_player_name:
logger.debug("PlayerLauncher: Start player %s with parameters: %s" % (player.name, player.parameters))
player_instance = Utils.get_dynamic_class_instantiation(package_name="players",
module_name=player.name,
parameters=player.parameters)
break
return player_instance
|
import logging
from threading import Thread
from kalliope.core import SignalModule
logging.basicConfig()
logger = logging.getLogger("kalliope")
class Geolocation(SignalModule, Thread):
def __init__(self, **kwargs):
super(Geolocation, self).__init__(**kwargs)
Thread.__init__(self, name=Geolocation)
def run(self):
logger.debug("[Geolocalisation] Loading ...")
self.list_synapses_with_geolocalion = list(super(Geolocation, self).get_list_synapse())
@staticmethod
def check_parameters(parameters):
"""
Overwritten method
receive a dict of parameter from a geolocation signal and them
:param parameters: dict of parameters
:return: True if parameters are valid
"""
# check mandatory parameters
mandatory_parameters = ["latitude", "longitude", "radius"]
return all(key in parameters for key in mandatory_parameters)
|
from redbot.cogs.permissions.permissions import Permissions, GLOBAL
def test_schema_update():
old = {
str(GLOBAL): {
"owner_models": {
"cogs": {
"Admin": {"allow": [78631113035100160], "deny": [96733288462286848]},
"Audio": {"allow": [133049272517001216], "default": "deny"},
},
"commands": {
"cleanup bot": {"allow": [78631113035100160], "default": "deny"},
"ping": {
"allow": [96733288462286848],
"deny": [96733288462286848],
"default": "allow",
},
},
}
},
"43733288462286848": {
"owner_models": {
"cogs": {
"Admin": {
"allow": [24231113035100160],
"deny": [35533288462286848, 24231113035100160],
},
"General": {"allow": [133049272517001216], "default": "deny"},
},
"commands": {
"cleanup bot": {"allow": [17831113035100160], "default": "allow"},
"set adminrole": {
"allow": [87733288462286848],
"deny": [95433288462286848],
"default": "allow",
},
},
}
},
}
new = Permissions._get_updated_schema(old)
assert new == (
{
"Admin": {
str(GLOBAL): {"78631113035100160": True, "96733288462286848": False},
"43733288462286848": {"24231113035100160": True, "35533288462286848": False},
},
"Audio": {str(GLOBAL): {"133049272517001216": True, "default": False}},
"General": {"43733288462286848": {"133049272517001216": True, "default": False}},
},
{
"cleanup bot": {
str(GLOBAL): {"78631113035100160": True, "default": False},
"43733288462286848": {"17831113035100160": True, "default": True},
},
"ping": {str(GLOBAL): {"96733288462286848": True, "default": True}},
"set adminrole": {
"43733288462286848": {
"87733288462286848": True,
"95433288462286848": False,
"default": True,
}
},
},
)
|
import sys
import mne
from mne.utils import _check_option
def run():
"""Run command."""
from mne.commands.utils import get_optparser, _add_verbose_flag
parser = get_optparser(__file__)
parser.add_option('-s', '--subject',
dest='subject',
help='Subject name (required)',
default=None)
parser.add_option('--src', dest='fname',
help='Output file name. Use a name <dir>/<name>-src.fif',
metavar='FILE', default=None)
parser.add_option('--morph',
dest='subject_to',
help='morph the source space to this subject',
default=None)
parser.add_option('--surf',
dest='surface',
help='The surface to use. (default to white)',
default='white',
type='string')
parser.add_option('--spacing',
dest='spacing',
help='Specifies the approximate grid spacing of the '
'source space in mm. (default to 7mm)',
default=None,
type='int')
parser.add_option('--ico',
dest='ico',
help='use the recursively subdivided icosahedron '
'to create the source space.',
default=None,
type='int')
parser.add_option('--oct',
dest='oct',
help='use the recursively subdivided octahedron '
'to create the source space.',
default=None,
type='int')
parser.add_option('-d', '--subjects-dir',
dest='subjects_dir',
help='Subjects directory',
default=None)
parser.add_option('-n', '--n-jobs',
dest='n_jobs',
help='The number of jobs to run in parallel '
'(default 1). Requires the joblib package. '
'Will use at most 2 jobs'
' (one for each hemisphere).',
default=1,
type='int')
parser.add_option('--add-dist',
dest='add_dist',
help='Add distances. Can be "True", "False", or "patch" '
'to only compute cortical patch statistics (like the '
'--cps option in MNE-C; requires SciPy >= 1.3)',
default='True')
parser.add_option('-o', '--overwrite',
dest='overwrite',
help='to write over existing files',
default=None, action="store_true")
_add_verbose_flag(parser)
options, args = parser.parse_args()
if options.subject is None:
parser.print_help()
sys.exit(1)
subject = options.subject
subject_to = options.subject_to
fname = options.fname
subjects_dir = options.subjects_dir
spacing = options.spacing
ico = options.ico
oct = options.oct
surface = options.surface
n_jobs = options.n_jobs
add_dist = options.add_dist
_check_option('add_dist', add_dist, ('True', 'False', 'patch'))
add_dist = {'True': True, 'False': False, 'patch': 'patch'}[add_dist]
verbose = True if options.verbose is not None else False
overwrite = True if options.overwrite is not None else False
# Parse source spacing option
spacing_options = [ico, oct, spacing]
n_options = len([x for x in spacing_options if x is not None])
if n_options > 1:
raise ValueError('Only one spacing option can be set at the same time')
elif n_options == 0:
# Default to oct6
use_spacing = 'oct6'
elif n_options == 1:
if ico is not None:
use_spacing = "ico" + str(ico)
elif oct is not None:
use_spacing = "oct" + str(oct)
elif spacing is not None:
use_spacing = spacing
# Generate filename
if fname is None:
if subject_to is None:
fname = subject + '-' + str(use_spacing) + '-src.fif'
else:
fname = (subject_to + '-' + subject + '-' +
str(use_spacing) + '-src.fif')
else:
if not (fname.endswith('_src.fif') or fname.endswith('-src.fif')):
fname = fname + "-src.fif"
# Create source space
src = mne.setup_source_space(subject=subject, spacing=use_spacing,
surface=surface, subjects_dir=subjects_dir,
n_jobs=n_jobs, add_dist=add_dist,
verbose=verbose)
# Morph source space if --morph is set
if subject_to is not None:
src = mne.morph_source_spaces(src, subject_to=subject_to,
subjects_dir=subjects_dir,
surf=surface, verbose=verbose)
# Save source space to file
src.save(fname=fname, overwrite=overwrite)
mne.utils.run_command_if_main()
|
import numpy as np
import unittest
import chainer
from chainer import testing
from chainer.testing import attr
from chainercv.links import SSD300
from chainercv.links import SSD512
@testing.parameterize(*testing.product({
'insize': [300, 512],
'n_fg_class': [1, 5, 20],
}))
class TestSSDVGG16(unittest.TestCase):
def setUp(self):
if self.insize == 300:
self.link = SSD300(n_fg_class=self.n_fg_class)
self.n_bbox = 8732
elif self.insize == 512:
self.link = SSD512(n_fg_class=self.n_fg_class)
self.n_bbox = 24564
def _check_call(self):
x = self.link.xp.array(
np.random.uniform(-1, 1, size=(1, 3, self.insize, self.insize)),
dtype=np.float32)
loc, conf = self.link(x)
self.assertIsInstance(loc, chainer.Variable)
self.assertIsInstance(loc.array, self.link.xp.ndarray)
self.assertEqual(loc.shape, (1, self.n_bbox, 4))
self.assertIsInstance(conf, chainer.Variable)
self.assertIsInstance(conf.array, self.link.xp.ndarray)
self.assertEqual(conf.shape, (1, self.n_bbox, self.n_fg_class + 1))
@attr.slow
def test_call_cpu(self):
self._check_call()
@attr.gpu
@attr.slow
def test_call_gpu(self):
self.link.to_gpu()
self._check_call()
@testing.parameterize(*testing.product({
'model': [SSD300, SSD512],
'n_fg_class': [None, 10, 20],
'pretrained_model': ['voc0712', 'imagenet'],
}))
class TestSSDVGG16Pretrained(unittest.TestCase):
@attr.slow
def test_pretrained(self):
kwargs = {
'n_fg_class': self.n_fg_class,
'pretrained_model': self.pretrained_model,
}
if self.pretrained_model == 'voc0712':
valid = self.n_fg_class in {None, 20}
elif self.pretrained_model == 'imagenet':
valid = self.n_fg_class is not None
if valid:
self.model(**kwargs)
else:
with self.assertRaises(ValueError):
self.model(**kwargs)
testing.run_module(__name__, __file__)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import pytest
import numpy as np
import tensornetwork as tn
from tensornetwork.backends import backend_factory
from tensornetwork.matrixproductstates.base_mps import BaseMPS
import tensorflow as tf
from jax.config import config
config.update("jax_enable_x64", True)
tf.compat.v1.enable_v2_behavior()
@pytest.fixture(
name="backend_dtype_values",
params=[('numpy', np.float64), ('numpy', np.complex128),
('tensorflow', np.float64), ('tensorflow', np.complex128),
('pytorch', np.float64), ('jax', np.float64)])
def backend_dtype(request):
return request.param
def get_random_np(shape, dtype, seed=0):
np.random.seed(seed) #get the same tensors every time you call this function
if dtype is np.complex64:
return np.random.randn(*shape).astype(
np.float32) + 1j * np.random.randn(*shape).astype(np.float32)
if dtype is np.complex128:
return np.random.randn(*shape).astype(
np.float64) + 1j * np.random.randn(*shape).astype(np.float64)
return np.random.randn(*shape).astype(dtype)
def test_normalization(backend):
D, d, N = 10, 2, 10
tensors = [np.random.randn(1, d, D)] + [
np.random.randn(D, d, D) for _ in range(N - 2)
] + [np.random.randn(D, d, 1)]
mps = BaseMPS(tensors, center_position=0, backend=backend)
mps.position(len(mps) - 1)
Z = mps.position(0, normalize=True)
np.testing.assert_allclose(Z, 1.0)
def test_backend_initialization(backend):
be = backend_factory.get_backend(backend)
D, d, N = 10, 2, 10
tensors = [np.random.randn(1, d, D)] + [
np.random.randn(D, d, D) for _ in range(N - 2)
] + [np.random.randn(D, d, 1)]
mps = BaseMPS(tensors, center_position=0, backend=be)
mps.position(len(mps) - 1)
Z = mps.position(0, normalize=True)
np.testing.assert_allclose(Z, 1.0)
def test_backend_initialization_raises(backend):
be = backend_factory.get_backend(backend)
D, d, N = 10, 2, 10
tensors = [np.random.randn(1, d, D)] + [
np.random.randn(D, d, D) for _ in range(N - 2)
] + [np.random.randn(D, d, 1)]
with pytest.raises(
ValueError,
match="`center_position = 10` is different from `None` and "
"not between 0 <= center_position < 10"):
BaseMPS(tensors, center_position=N, backend=be)
with pytest.raises(
ValueError,
match="`center_position = -1` is different from `None` and "
"not between 0 <= center_position < 10"):
BaseMPS(tensors, center_position=-1, backend=be)
def test_left_orthonormalization(backend_dtype_values):
backend = backend_dtype_values[0]
dtype = backend_dtype_values[1]
D, d, N = 10, 2, 10
tensors = [get_random_np((1, d, D), dtype)] + [
get_random_np((D, d, D), dtype) for _ in range(N - 2)
] + [get_random_np((D, d, 1), dtype)]
mps = BaseMPS(tensors, center_position=N - 1, backend=backend)
mps.position(0)
mps.position(len(mps) - 1)
assert all([
abs(mps.check_orthonormality('left', site)) < 1E-12
for site in range(len(mps))
])
def test_right_orthonormalization(backend_dtype_values):
backend = backend_dtype_values[0]
dtype = backend_dtype_values[1]
D, d, N = 10, 2, 10
tensors = [get_random_np((1, d, D), dtype)] + [
get_random_np((D, d, D), dtype) for _ in range(N - 2)
] + [get_random_np((D, d, 1), dtype)]
mps = BaseMPS(tensors, center_position=0, backend=backend)
mps.position(len(mps) - 1)
mps.position(0)
assert all([
abs(mps.check_orthonormality('right', site)) < 1E-12
for site in range(len(mps))
])
def test_apply_one_site_gate(backend_dtype_values):
backend = backend_dtype_values[0]
dtype = backend_dtype_values[1]
D, d, N = 10, 2, 10
tensors = [get_random_np((1, d, D), dtype)] + [
get_random_np((D, d, D), dtype) for _ in range(N - 2)
] + [get_random_np((D, d, 1), dtype)]
mps = BaseMPS(tensors, center_position=0, backend=backend)
tensor = mps.tensors[5]
gate = get_random_np((2, 2), dtype)
mps.apply_one_site_gate(gate, 5)
actual = np.transpose(np.tensordot(tensor, gate, ([1], [1])), (0, 2, 1))
np.testing.assert_allclose(mps.tensors[5], actual)
def test_apply_two_site_gate(backend_dtype_values):
backend = backend_dtype_values[0]
dtype = backend_dtype_values[1]
D, d, N = 10, 2, 10
tensors = [get_random_np((1, d, D), dtype)] + [
get_random_np((D, d, D), dtype) for _ in range(N - 2)
] + [get_random_np((D, d, 1), dtype)]
mps = BaseMPS(tensors, center_position=0, backend=backend)
gate = get_random_np((2, 2, 2, 2), dtype)
tensor1 = mps.tensors[5]
tensor2 = mps.tensors[6]
mps.apply_two_site_gate(gate, 5, 6)
tmp = np.tensordot(tensor1, tensor2, ([2], [0]))
actual = np.transpose(np.tensordot(tmp, gate, ([1, 2], [2, 3])), (0, 2, 3, 1))
node1 = tn.Node(mps.tensors[5], backend=backend)
node2 = tn.Node(mps.tensors[6], backend=backend)
node1[2] ^ node2[0]
order = [node1[0], node1[1], node2[1], node2[2]]
res = tn.contract_between(node1, node2)
res.reorder_edges(order)
np.testing.assert_allclose(res.tensor, actual)
def test_position_raises_error(backend):
D, d, N = 10, 2, 10
tensors = [np.random.randn(1, d, D)] + [
np.random.randn(D, d, D) for _ in range(N - 2)
] + [np.random.randn(D, d, 1)]
mps = BaseMPS(tensors, center_position=0, backend=backend)
with pytest.raises(
ValueError, match="site = -1 not between values"
" 0 < site < N = 10"):
mps.position(-1)
with pytest.raises(
ValueError, match="site = 11 not between values"
" 0 < site < N = 10"):
mps.position(11)
mps = BaseMPS(tensors, center_position=None, backend=backend)
with pytest.raises(
ValueError,
match="BaseMPS.center_position is"
" `None`, cannot shift `center_position`."
"Reset `center_position` manually or use `canonicalize`"):
mps.position(1)
def test_position_no_normalization(backend):
D, d, N = 4, 2, 6
tensors = [np.ones((1, d, D))] + [np.ones((D, d, D)) for _ in range(N - 2)
] + [np.ones((D, d, 1))]
mps = BaseMPS(tensors, center_position=0, backend=backend)
Z = mps.position(len(mps) - 1, normalize=False)
np.testing.assert_allclose(Z, 8192.0)
def test_position_shift_left(backend):
D, d, N = 4, 2, 6
tensors = [np.ones((1, d, D))] + [np.ones((D, d, D)) for _ in range(N - 2)
] + [np.ones((D, d, 1))]
mps = BaseMPS(tensors, center_position=int(N / 2), backend=backend)
Z = mps.position(0, normalize=True)
np.testing.assert_allclose(Z, 2.828427)
def test_position_shift_right(backend):
D, d, N = 4, 2, 6
tensors = [np.ones((1, d, D))] + [np.ones((D, d, D)) for _ in range(N - 2)
] + [np.ones((D, d, 1))]
mps = BaseMPS(tensors, center_position=int(N / 2), backend=backend)
Z = mps.position(N - 1, normalize=True)
np.testing.assert_allclose(Z, 2.828427)
def test_position_no_shift(backend):
D, d, N = 4, 2, 6
tensors = [np.ones((1, d, D))] + [np.ones((D, d, D)) for _ in range(N - 2)
] + [np.ones((D, d, 1))]
mps = BaseMPS(tensors, center_position=int(N / 2), backend=backend)
Z = mps.position(int(N / 2), normalize=True)
np.testing.assert_allclose(Z, 5.656854)
def test_position_no_shift_no_normalization(backend):
D, d, N = 4, 2, 6
tensors = [np.ones((1, d, D))] + [np.ones((D, d, D)) for _ in range(N - 2)
] + [np.ones((D, d, 1))]
mps = BaseMPS(tensors, center_position=int(N / 2), backend=backend)
Z = mps.position(int(N / 2), normalize=False)
np.testing.assert_allclose(Z, 5.656854)
def test_different_dtypes_raises_error():
D, d = 4, 2
tensors = [
np.ones((1, d, D), dtype=np.float64),
np.ones((D, d, D), dtype=np.complex64)
]
with pytest.raises(TypeError):
BaseMPS(tensors, backend='numpy')
_tensors = [
np.ones((1, d, D), dtype=np.float64),
np.ones((D, d, D), dtype=np.float64)
]
mps = BaseMPS(_tensors, backend='numpy')
mps.tensors = tensors
with pytest.raises(TypeError):
mps.dtype
def test_not_implemented():
D, d = 4, 2
tensors = [np.ones((1, d, D)), np.ones((D, d, D))]
mps = BaseMPS(tensors, backend='numpy')
with pytest.raises(NotImplementedError):
mps.save('tmp')
with pytest.raises(NotImplementedError):
mps.right_envs([0])
with pytest.raises(NotImplementedError):
mps.left_envs([0])
with pytest.raises(NotImplementedError):
mps.canonicalize()
def test_physical_dimensions(backend):
D = 3
tensors = [np.ones((1, 2, D)), np.ones((D, 3, D)), np.ones((D, 4, 1))]
mps = BaseMPS(tensors, backend=backend)
assert mps.physical_dimensions == [2, 3, 4]
def test_apply_transfer_operator_left(backend):
backend = backend_factory.get_backend(backend)
tensor = np.array([[[1., 2., 1.], [1., -2., 1.]],
[[-1., 1., -1.], [-1., 1., -1.]], [[1., 2, 3], [3, 2, 1]]],
dtype=np.float64)
tensors = 6 * [backend.convert_to_tensor(tensor)]
mat = backend.convert_to_tensor(
np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]], dtype=np.float64))
mps = BaseMPS(tensors, backend=backend)
expected = np.array([[74., 58., 38.], [78., 146., 102.], [38., 114., 74.]])
actual = mps.apply_transfer_operator(site=3, direction=1, matrix=mat)
np.testing.assert_allclose(actual, expected)
actual = mps.apply_transfer_operator(site=3, direction="l", matrix=mat)
np.testing.assert_allclose(actual, expected)
actual = mps.apply_transfer_operator(site=3, direction="left", matrix=mat)
np.testing.assert_allclose(actual, expected)
def test_apply_transfer_operator_right(backend):
backend = backend_factory.get_backend(backend)
tensor = np.array([[[1., 2., 1.], [1., -2., 1.]],
[[-1., 1., -1.], [-1., 1., -1.]], [[1., 2, 3], [3, 2, 1]]],
dtype=np.float64)
tensors = 6 * [backend.convert_to_tensor(tensor)]
mat = backend.convert_to_tensor(
np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]], dtype=np.float64))
mps = BaseMPS(tensors, backend=backend)
expected = np.array([[80., -20., 128.], [-20., 10., -60.], [144., -60.,
360.]])
actual = mps.apply_transfer_operator(site=3, direction=-1, matrix=mat)
np.testing.assert_allclose(actual, expected)
actual = mps.apply_transfer_operator(site=3, direction="r", matrix=mat)
np.testing.assert_allclose(actual, expected)
actual = mps.apply_transfer_operator(site=3, direction="right", matrix=mat)
np.testing.assert_allclose(actual, expected)
def test_apply_transfer_operator_invalid_direction_raises_error(backend):
backend = backend_factory.get_backend(backend)
tensor = np.array([[[1., 2., 1.], [1., -2., 1.]],
[[-1., 1., -1.], [-1., 1., -1.]], [[1., 2, 3], [3, 2, 1]]],
dtype=np.float64)
tensors = 6 * [backend.convert_to_tensor(tensor)]
mat = backend.convert_to_tensor(
np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]], dtype=np.float64))
mps = BaseMPS(tensors, backend=backend)
with pytest.raises(ValueError):
mps.apply_transfer_operator(site=3, direction=0, matrix=mat)
with pytest.raises(ValueError):
mps.apply_transfer_operator(site=3, direction="keft", matrix=mat)
def test_measure_local_operator_value_error(backend):
backend = backend_factory.get_backend(backend)
tensor = np.array([[[1., 2., 1.], [1., -2., 1.]],
[[-1., 1., -1.], [-1., 1., -1.]], [[1., 2, 3], [3, 2, 1]]],
dtype=np.float64)
tensors = 6 * [backend.convert_to_tensor(tensor)]
operator = backend.convert_to_tensor(
np.array([[1, -1], [-1, 1]], dtype=np.float64))
mps = BaseMPS(tensors, backend=backend)
with pytest.raises(ValueError):
mps.measure_local_operator(ops=2 * [operator], sites=[1, 2, 3])
def test_measure_two_body_correlator_value_error(backend):
backend = backend_factory.get_backend(backend)
tensor = np.array([[[1., 2., 1.], [1., -2., 1.]],
[[-1., 1., -1.], [-1., 1., -1.]], [[1., 2, 3], [3, 2, 1]]],
dtype=np.float64)
tensors = 6 * [backend.convert_to_tensor(tensor)]
operator = backend.convert_to_tensor(
np.array([[1, -1], [-1, 1]], dtype=np.float64))
mps = BaseMPS(tensors, backend=backend)
with pytest.raises(ValueError):
mps.measure_two_body_correlator(
op1=operator, op2=operator, site1=-1, sites2=[2])
def test_get_tensor(backend):
backend = backend_factory.get_backend(backend)
tensor1 = np.ones((2, 3, 2), dtype=np.float64)
tensor2 = 2 * np.ones((2, 3, 2), dtype=np.float64)
tensors = [tensor1, tensor2]
mps = BaseMPS(tensors, backend=backend)
np.testing.assert_allclose(mps.get_tensor(0), tensor1)
np.testing.assert_allclose(mps.get_tensor(1), tensor2)
def test_get_tensor_connector_matrix(backend):
backend = backend_factory.get_backend(backend)
tensor1 = np.ones((2, 3, 2), dtype=np.float64)
tensor2 = 2 * np.ones((2, 3, 2), dtype=np.float64)
connector = backend.convert_to_tensor(np.ones((2, 2), dtype=np.float64))
tensors = [tensor1, tensor2]
mps = BaseMPS(tensors, backend=backend, connector_matrix=connector)
np.testing.assert_allclose(mps.get_tensor(0), tensor1)
np.testing.assert_allclose(mps.get_tensor(1), 2 * tensor2)
def test_get_tensor_raises_error(backend):
backend = backend_factory.get_backend(backend)
tensor1 = np.ones((2, 3, 2), dtype=np.float64)
tensor2 = 2 * np.ones((2, 3, 2), dtype=np.float64)
tensors = [tensor1, tensor2]
mps = BaseMPS(tensors, backend=backend)
with pytest.raises(ValueError):
mps.get_tensor(site=-1)
with pytest.raises(IndexError):
mps.get_tensor(site=3)
def test_check_canonical(backend):
backend = backend_factory.get_backend(backend)
tensor = np.array([[[1., 2., 1.], [1., -2., 1.]],
[[-1., 1., -1.], [-1., 1., -1.]], [[1., 2, 3], [3, 2, 1]]],
dtype=np.float64)
tensors = 6 * [backend.convert_to_tensor(tensor)]
mps = BaseMPS(tensors, backend=backend, center_position=2)
np.testing.assert_allclose(mps.check_canonical(), 71.714713)
def test_check_normality_raises_value_error(backend):
backend = backend_factory.get_backend(backend)
tensor = np.ones((2, 3, 2), dtype=np.float64)
tensors = [tensor]
mps = BaseMPS(tensors, backend=backend)
with pytest.raises(ValueError):
mps.check_orthonormality(which="keft", site=0)
def test_apply_two_site_gate_2(backend):
backend = backend_factory.get_backend(backend)
tensor = np.array([[[1., 2., 1.], [1., -2., 1.]],
[[-1., 1., -1.], [-1., 1., -1.]], [[1., 2, 3], [3, 2, 1]]],
dtype=np.float64)
tensors = 6 * [backend.convert_to_tensor(tensor)]
mps = BaseMPS(tensors, backend=backend, center_position=2)
gate = backend.convert_to_tensor(
np.array([[[[0., 1.], [0., 0.]], [[1., 0.], [0., 0.]]],
[[[0., 0.], [0., 1.]], [[0., 0.], [1., 0.]]]],
dtype=np.float64))
actual = mps.apply_two_site_gate(
gate=gate, site1=1, site2=2, max_singular_values=1)
np.testing.assert_allclose(actual[0], 9.133530)
expected = np.array([[5.817886], [9.039142]])
np.testing.assert_allclose(np.abs(mps.tensors[1][0]), expected, rtol=1e-04)
expected = np.array([[0.516264, 0.080136, 0.225841],
[0.225841, 0.59876, 0.516264]])
np.testing.assert_allclose(np.abs(mps.tensors[2][0]), expected, rtol=1e-04)
def test_apply_two_site_wrong_gate_raises_error(backend):
backend = backend_factory.get_backend(backend)
tensor = np.array([[[1., 2., 1.], [1., -2., 1.]],
[[-1., 1., -1.], [-1., 1., -1.]], [[1., 2, 3], [3, 2, 1]]],
dtype=np.float64)
tensors = 6 * [backend.convert_to_tensor(tensor)]
mps = BaseMPS(tensors, backend=backend, center_position=2)
gate1 = backend.convert_to_tensor(np.ones((2, 2, 2), dtype=np.float64))
gate2 = backend.convert_to_tensor(np.ones((2, 2, 2, 2, 2), dtype=np.float64))
with pytest.raises(ValueError):
mps.apply_two_site_gate(gate=gate1, site1=1, site2=2)
with pytest.raises(ValueError):
mps.apply_two_site_gate(gate=gate2, site1=1, site2=2)
def test_apply_two_site_wrong_site1_raises_error(backend):
backend = backend_factory.get_backend(backend)
tensor = np.array([[[1., 2., 1.], [1., -2., 1.]],
[[-1., 1., -1.], [-1., 1., -1.]], [[1., 2, 3], [3, 2, 1]]],
dtype=np.float64)
tensors = 6 * [backend.convert_to_tensor(tensor)]
mps = BaseMPS(tensors, backend=backend, center_position=2)
gate = backend.convert_to_tensor(np.ones((2, 2, 2, 2), dtype=np.float64))
with pytest.raises(ValueError):
mps.apply_two_site_gate(gate=gate, site1=-1, site2=2)
with pytest.raises(ValueError):
mps.apply_two_site_gate(gate=gate, site1=6, site2=2)
def test_apply_two_site_wrong_site2_raises_error(backend):
backend = backend_factory.get_backend(backend)
tensor = np.array([[[1., 2., 1.], [1., -2., 1.]],
[[-1., 1., -1.], [-1., 1., -1.]], [[1., 2, 3], [3, 2, 1]]],
dtype=np.float64)
tensors = 6 * [backend.convert_to_tensor(tensor)]
mps = BaseMPS(tensors, backend=backend, center_position=2)
gate = backend.convert_to_tensor(np.ones((2, 2, 2, 2), dtype=np.float64))
with pytest.raises(ValueError):
mps.apply_two_site_gate(gate=gate, site1=0, site2=0)
with pytest.raises(ValueError):
mps.apply_two_site_gate(gate=gate, site1=0, site2=6)
def test_apply_two_site_wrong_site1_site2_raises_error(backend):
backend = backend_factory.get_backend(backend)
tensor = np.array([[[1., 2., 1.], [1., -2., 1.]],
[[-1., 1., -1.], [-1., 1., -1.]], [[1., 2, 3], [3, 2, 1]]],
dtype=np.float64)
tensors = 6 * [backend.convert_to_tensor(tensor)]
mps = BaseMPS(tensors, backend=backend, center_position=2)
gate = backend.convert_to_tensor(np.ones((2, 2, 2, 2), dtype=np.float64))
with pytest.raises(ValueError):
mps.apply_two_site_gate(gate=gate, site1=2, site2=2)
with pytest.raises(ValueError):
mps.apply_two_site_gate(gate=gate, site1=2, site2=4)
def test_apply_two_site_max_singular_value_not_center_raises_error(backend):
backend = backend_factory.get_backend(backend)
tensor = np.array([[[1., 2., 1.], [1., -2., 1.]],
[[-1., 1., -1.], [-1., 1., -1.]], [[1., 2, 3], [3, 2, 1]]],
dtype=np.float64)
tensors = 6 * [backend.convert_to_tensor(tensor)]
mps = BaseMPS(tensors, backend=backend, center_position=2)
gate = backend.convert_to_tensor(np.ones((2, 2, 2, 2), dtype=np.float64))
with pytest.raises(ValueError):
mps.apply_two_site_gate(gate=gate, site1=3, site2=4, max_singular_values=1)
with pytest.raises(ValueError):
mps.apply_two_site_gate(gate=gate, site1=3, site2=4, max_truncation_err=.1)
def test_apply_one_site_gate_2(backend):
backend = backend_factory.get_backend(backend)
tensor = np.array([[[1., 2., 1.], [1., -2., 1.]],
[[-1., 1., -1.], [-1., 1., -1.]], [[1., 2, 3], [3, 2, 1]]],
dtype=np.float64)
tensors = 6 * [backend.convert_to_tensor(tensor)]
mps = BaseMPS(tensors, backend=backend, center_position=2)
gate = backend.convert_to_tensor(np.array([[0, 1], [1, 0]], dtype=np.float64))
mps.apply_one_site_gate(gate=gate, site=1)
expected = np.array([[1., -2., 1.], [1., 2., 1.]])
np.testing.assert_allclose(mps.tensors[1][0], expected)
def test_apply_one_site_gate_wrong_gate_raises_error(backend):
backend = backend_factory.get_backend(backend)
tensor = np.array([[[1., 2., 1.], [1., -2., 1.]],
[[-1., 1., -1.], [-1., 1., -1.]], [[1., 2, 3], [3, 2, 1]]],
dtype=np.float64)
tensors = 6 * [backend.convert_to_tensor(tensor)]
mps = BaseMPS(tensors, backend=backend, center_position=2)
gate1 = backend.convert_to_tensor(np.ones((2, 2, 2), dtype=np.float64))
gate2 = backend.convert_to_tensor(np.ones((2, 2, 2), dtype=np.float64))
with pytest.raises(ValueError):
mps.apply_one_site_gate(gate=gate1, site=1)
with pytest.raises(ValueError):
mps.apply_one_site_gate(gate=gate2, site=1)
def test_apply_one_site_gate_invalid_site_raises_error(backend):
backend = backend_factory.get_backend(backend)
tensor = np.array([[[1., 2., 1.], [1., -2., 1.]],
[[-1., 1., -1.], [-1., 1., -1.]], [[1., 2, 3], [3, 2, 1]]],
dtype=np.float64)
tensors = 6 * [backend.convert_to_tensor(tensor)]
mps = BaseMPS(tensors, backend=backend, center_position=2)
gate = backend.convert_to_tensor(np.ones((2, 2), dtype=np.float64))
with pytest.raises(ValueError):
mps.apply_one_site_gate(gate=gate, site=-1)
with pytest.raises(ValueError):
mps.apply_one_site_gate(gate=gate, site=6)
|
revision = "9f79024fe67b"
down_revision = "ee827d1e1974"
from alembic import op
import sqlalchemy as sa
def upgrade():
op.sync_enum_values(
"public",
"log_type",
["create_cert", "key_view", "revoke_cert", "update_cert"],
["create_cert", "delete_cert", "key_view", "revoke_cert", "update_cert"],
)
def downgrade():
op.sync_enum_values(
"public",
"log_type",
["create_cert", "delete_cert", "key_view", "revoke_cert", "update_cert"],
["create_cert", "key_view", "revoke_cert", "update_cert"],
)
|
import logging
import threading
from uuid import UUID
from pygatt import BLEAddressType
from pygatt.backends import Characteristic, GATTToolBackend
from pygatt.exceptions import BLEError, NotConnectedError, NotificationTimeout
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_MAC,
CONF_NAME,
EVENT_HOMEASSISTANT_STOP,
PERCENTAGE,
STATE_UNKNOWN,
TEMP_CELSIUS,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
ATTR_DEVICE = "device"
ATTR_MODEL = "model"
BLE_TEMP_HANDLE = 0x24
BLE_TEMP_UUID = "0000ff92-0000-1000-8000-00805f9b34fb"
CONNECT_LOCK = threading.Lock()
CONNECT_TIMEOUT = 30
DEFAULT_NAME = "Skybeacon"
SKIP_HANDLE_LOOKUP = True
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_MAC): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Skybeacon sensor."""
name = config.get(CONF_NAME)
mac = config.get(CONF_MAC)
_LOGGER.debug("Setting up...")
mon = Monitor(hass, mac, name)
add_entities([SkybeaconTemp(name, mon)])
add_entities([SkybeaconHumid(name, mon)])
def monitor_stop(_service_or_event):
"""Stop the monitor thread."""
_LOGGER.info("Stopping monitor for %s", name)
mon.terminate()
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, monitor_stop)
mon.start()
class SkybeaconHumid(Entity):
"""Representation of a Skybeacon humidity sensor."""
def __init__(self, name, mon):
"""Initialize a sensor."""
self.mon = mon
self._name = name
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the device."""
return self.mon.data["humid"]
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return PERCENTAGE
@property
def device_state_attributes(self):
"""Return the state attributes of the sensor."""
return {ATTR_DEVICE: "SKYBEACON", ATTR_MODEL: 1}
class SkybeaconTemp(Entity):
"""Representation of a Skybeacon temperature sensor."""
def __init__(self, name, mon):
"""Initialize a sensor."""
self.mon = mon
self._name = name
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the device."""
return self.mon.data["temp"]
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return TEMP_CELSIUS
@property
def device_state_attributes(self):
"""Return the state attributes of the sensor."""
return {ATTR_DEVICE: "SKYBEACON", ATTR_MODEL: 1}
class Monitor(threading.Thread):
"""Connection handling."""
def __init__(self, hass, mac, name):
"""Construct interface object."""
threading.Thread.__init__(self)
self.daemon = False
self.hass = hass
self.mac = mac
self.name = name
self.data = {"temp": STATE_UNKNOWN, "humid": STATE_UNKNOWN}
self.keep_going = True
self.event = threading.Event()
def run(self):
"""Thread that keeps connection alive."""
cached_char = Characteristic(BLE_TEMP_UUID, BLE_TEMP_HANDLE)
adapter = GATTToolBackend()
while True:
try:
_LOGGER.debug("Connecting to %s", self.name)
# We need concurrent connect, so lets not reset the device
adapter.start(reset_on_start=False)
# Seems only one connection can be initiated at a time
with CONNECT_LOCK:
device = adapter.connect(
self.mac, CONNECT_TIMEOUT, BLEAddressType.random
)
if SKIP_HANDLE_LOOKUP:
# HACK: inject handle mapping collected offline
# pylint: disable=protected-access
device._characteristics[UUID(BLE_TEMP_UUID)] = cached_char
# Magic: writing this makes device happy
device.char_write_handle(0x1B, bytearray([255]), False)
device.subscribe(BLE_TEMP_UUID, self._update)
_LOGGER.info("Subscribed to %s", self.name)
while self.keep_going:
# protect against stale connections, just read temperature
device.char_read(BLE_TEMP_UUID, timeout=CONNECT_TIMEOUT)
self.event.wait(60)
break
except (BLEError, NotConnectedError, NotificationTimeout) as ex:
_LOGGER.error("Exception: %s ", str(ex))
finally:
adapter.stop()
def _update(self, handle, value):
"""Notification callback from pygatt."""
_LOGGER.debug(
"%s: %15s temperature = %-2d.%-2d, humidity = %3d",
handle,
self.name,
value[0],
value[2],
value[1],
)
self.data["temp"] = float("%d.%d" % (value[0], value[2]))
self.data["humid"] = value[1]
def terminate(self):
"""Signal runner to stop and join thread."""
self.keep_going = False
self.event.set()
self.join()
|
from __future__ import unicode_literals
from lib.fun.fun import cool
from lib.fun.decorator import magic
from lib.data.data import pyoptions
def ftp_magic(*args):
"""[keyword1] [keyword2] ..."""
args = list(args[0])
if len(args) == 1:
exit(pyoptions.CRLF + cool.fuchsia("[!] Usage: {} {}".format(args[0], pyoptions.plugins_info.get(args[0]))))
@magic
def ftp():
results = []
default_password = ('ftp', 'anonymous', 'any@', 'craftpw', 'xbox', 'r@p8p0r+', 'pass', 'admin',
'lampp', 'password', 'Exabyte', 'pbxk1064', 'kilo1987', 'help1954', 'tuxalize')
results += default_password
weak_password = ('root', '123456', '111111', '666666', 'ftppass')
results += weak_password
for r in results:
yield r
tails = ['1', '01', '001', '123', 'abc', '!@#', '!QAZ', '1q2w3e', '!@#$', '!', '#', '.', '@123',
'2016', '2017', '2018', '@2016', '@2017', '@2018', ]
for keyword in args:
for tail in tails:
yield keyword + tail
|
from utils import WriterTC
from logilab.common.testlib import TestCase, unittest_main
from logilab.common.ureports.text_writer import TextWriter
class TextWriterTC(TestCase, WriterTC):
def setUp(self):
self.writer = TextWriter()
# Section tests ###########################################################
section_base = '''
Section title
=============
Section\'s description.
Blabla bla
'''
section_nested = '''
Section title
=============
Section\'s description.
Blabla bla
Subsection
----------
Sub section description
'''
# List tests ##############################################################
list_base = '''
* item1
* item2
* item3
* item4'''
nested_list = '''
* blabla
- 1
- 2
- 3
* an other point'''
# Table tests #############################################################
table_base = '''
+------+------+
|head1 |head2 |
+------+------+
|cell1 |cell2 |
+------+------+
'''
field_table = '''
f1 : v1
f22 : v22
f333: v333
'''
advanced_table = '''
+---------------+------+
|field |value |
+===============+======+
|f1 |v1 |
+---------------+------+
|f22 |v22 |
+---------------+------+
|f333 |v333 |
+---------------+------+
|`toi perdu ?`_ | |
+---------------+------+
'''
# VerbatimText tests ######################################################
verbatim_base = '''::
blablabla
'''
if __name__ == '__main__':
unittest_main()
|
import numpy as np
import unittest
import chainer
from chainer.datasets import TupleDataset
from chainer.iterators import SerialIterator
from chainer import testing
from chainercv.extensions import DetectionCOCOEvaluator
from chainercv.utils import generate_random_bbox
from chainercv.utils.testing import attr
from chainermn import create_communicator
try:
import pycocotools # NOQA
_available = True
except ImportError:
_available = False
class _DetectionStubLink(chainer.Link):
def __init__(self, bboxes, labels, initial_count=0):
super(_DetectionStubLink, self).__init__()
self.count = initial_count
self.bboxes = bboxes
self.labels = labels
def predict(self, imgs):
n_img = len(imgs)
bboxes = self.bboxes[self.count:self.count + n_img]
labels = self.labels[self.count:self.count + n_img]
scores = [np.ones_like(l) for l in labels]
self.count += n_img
return bboxes, labels, scores
@unittest.skipUnless(_available, 'pycocotools is not installed')
class TestDetectionCOCOEvaluator(unittest.TestCase):
def setUp(self):
bboxes = [generate_random_bbox(5, (256, 324), 24, 120)
for _ in range(10)]
areas = [[np.array([(bb[2] - bb[0]) * bb[3] - bb[0]]) for bb in bbox]
for bbox in bboxes]
labels = 2 * np.ones((10, 5), dtype=np.int32)
crowdeds = np.zeros((10, 5))
self.dataset = TupleDataset(
np.random.uniform(size=(10, 3, 32, 48)),
bboxes, labels, areas, crowdeds)
self.link = _DetectionStubLink(bboxes, labels)
self.iterator = SerialIterator(
self.dataset, 5, repeat=False, shuffle=False)
self.evaluator = DetectionCOCOEvaluator(
self.iterator, self.link, label_names=('cls0', 'cls1', 'cls2'))
self.expected_ap = 1
def test_evaluate(self):
reporter = chainer.Reporter()
reporter.add_observer('target', self.link)
with reporter:
mean = self.evaluator.evaluate()
# No observation is reported to the current reporter. Instead the
# evaluator collect results in order to calculate their mean.
self.assertEqual(len(reporter.observation), 0)
key = 'ap/iou=0.50:0.95/area=all/max_dets=100'
np.testing.assert_equal(
mean['target/m{}'.format(key)], self.expected_ap)
np.testing.assert_equal(mean['target/{}/cls0'.format(key)], np.nan)
np.testing.assert_equal(mean['target/{}/cls1'.format(key)], np.nan)
np.testing.assert_equal(
mean['target/{}/cls2'.format(key)], self.expected_ap)
def test_call(self):
mean = self.evaluator()
# main is used as default
key = 'ap/iou=0.50:0.95/area=all/max_dets=100'
np.testing.assert_equal(mean['main/m{}'.format(key)], self.expected_ap)
np.testing.assert_equal(mean['main/{}/cls0'.format(key)], np.nan)
np.testing.assert_equal(mean['main/{}/cls1'.format(key)], np.nan)
np.testing.assert_equal(
mean['main/{}/cls2'.format(key)], self.expected_ap)
def test_evaluator_name(self):
self.evaluator.name = 'eval'
mean = self.evaluator()
# name is used as a prefix
key = 'ap/iou=0.50:0.95/area=all/max_dets=100'
np.testing.assert_equal(
mean['eval/main/m{}'.format(key)], self.expected_ap)
np.testing.assert_equal(mean['eval/main/{}/cls0'.format(key)], np.nan)
np.testing.assert_equal(mean['eval/main/{}/cls1'.format(key)], np.nan)
np.testing.assert_equal(
mean['eval/main/{}/cls2'.format(key)], self.expected_ap)
def test_current_report(self):
reporter = chainer.Reporter()
with reporter:
mean = self.evaluator()
# The result is reported to the current reporter.
self.assertEqual(reporter.observation, mean)
@unittest.skipUnless(_available, 'pycocotools is not installed')
@attr.mpi
class TestDetectionCOCOEvaluatorMPI(unittest.TestCase):
def setUp(self):
self.comm = create_communicator('naive')
batchsize_per_process = 5
batchsize = batchsize_per_process * self.comm.size
if self.comm.rank == 0:
bboxes = [generate_random_bbox(5, (256, 324), 24, 120)
for _ in range(10)]
labels = [np.random.choice(np.arange(3, dtype=np.int32), size=(5,))
for _ in range(10)]
else:
bboxes = None
labels = None
initial_count = self.comm.rank * batchsize_per_process
bboxes = self.comm.bcast_obj(bboxes)
labels = self.comm.bcast_obj(labels)
self.bboxes = bboxes
self.labels = labels
areas = [[np.array([(bb[2] - bb[0]) * bb[3] - bb[0]])
for bb in bbox] for bbox in bboxes]
crowdeds = [np.zeros((5,)) for _ in range(10)]
self.dataset = TupleDataset(
np.random.uniform(size=(10, 3, 32, 48)),
bboxes, labels, areas, crowdeds)
self.initial_count = initial_count
self.batchsize = batchsize
def test_consistency(self):
reporter = chainer.Reporter()
if self.comm.rank == 0:
multi_iterator = SerialIterator(
self.dataset, self.batchsize, repeat=False, shuffle=False)
else:
multi_iterator = None
multi_link = _DetectionStubLink(
self.bboxes, self.labels, self.initial_count)
multi_evaluator = DetectionCOCOEvaluator(
multi_iterator, multi_link,
label_names=('cls0', 'cls1', 'cls2'),
comm=self.comm)
reporter.add_observer('target', multi_link)
with reporter:
multi_mean = multi_evaluator.evaluate()
if self.comm.rank != 0:
self.assertEqual(multi_mean, {})
return
single_iterator = SerialIterator(
self.dataset, self.batchsize, repeat=False, shuffle=False)
single_link = _DetectionStubLink(
self.bboxes, self.labels)
single_evaluator = DetectionCOCOEvaluator(
single_iterator, single_link,
label_names=('cls0', 'cls1', 'cls2'))
reporter.add_observer('target', single_link)
with reporter:
single_mean = single_evaluator.evaluate()
self.assertEqual(set(multi_mean.keys()), set(single_mean.keys()))
for key in multi_mean.keys():
np.testing.assert_equal(single_mean[key], multi_mean[key])
testing.run_module(__name__, __file__)
|
from rest_framework import renderers
from rest_framework.exceptions import APIException
from shop.models.cart import CartModel
from shop.serializers.cart import CartSerializer
class TemplateContextMixin:
"""
Alternative implementation which does not pollute the template context with
the serialized data on the root scope.
"""
def get_template_context(self, data, renderer_context):
response = renderer_context['response']
if response.exception:
return dict(data, status_code=response.status_code)
else:
view = renderer_context['view']
key = getattr(view, 'context_data_name', 'data')
return {key: data}
class ShopTemplateHTMLRenderer(TemplateContextMixin, renderers.TemplateHTMLRenderer):
"""
Modified TemplateHTMLRenderer, which shall be used to render templates used by django-SHOP.
Instead of polluting the template context with the serialized data, that information is
stored inside a separate `data` attribute, which allows to add a Cart and Paginator object.
Templates created for this renderer are compatible with the `CMSPageRenderer` (see below).
"""
def render(self, data, accepted_media_type=None, renderer_context=None):
request = renderer_context['request']
response = renderer_context['response']
if response.exception:
template = self.get_exception_template(response)
template_context = self.get_template_context(data, renderer_context)
return template.render(template_context)
view = renderer_context['view']
template_names = self.get_template_names(response, view)
template = self.resolve_template(template_names)
template_context = self.get_template_context(data, renderer_context)
self.update_with_cart_context(renderer_context)
template_context.update(renderer_context, paginator=view.paginator)
return template.render(template_context, request=request)
def update_with_cart_context(self, context):
try:
cart = CartModel.objects.get_from_request(context['request'])
context['is_cart_filled'] = cart.items.exists()
cart_serializer = CartSerializer(cart, context=context, label='cart')
context['cart'] = cart_serializer.data
except (KeyError, CartModel.DoesNotExist):
pass
class CMSPageRenderer(TemplateContextMixin, renderers.TemplateHTMLRenderer):
"""
Modified TemplateHTMLRenderer, which is able to render CMS pages containing the templatetag
``{% render_placeholder ... %}``, in addition to accept ordinary Python objects in their
rendering context.
The serialized data object, as available to other REST renderers, is explicitly added to the
context as ``data``. Therefore keep in mind that templates for REST's
:class:`rest_framework.renderers.TemplateHTMLRenderer` are not compatible with this renderer.
"""
def render(self, data, accepted_media_type=None, renderer_context=None):
renderer_context = renderer_context or {}
view = renderer_context['view']
request = renderer_context['request']
response = renderer_context['response']
if not getattr(request, 'current_page', None):
msg = "APIView class '{}' with 'renderer_class=(CMSPageRenderer, ...)' can only be used by a CMSApp"
response = view.handle_exception(APIException(detail=msg.format(view.__class__)))
if response.exception:
template = self.get_exception_template(response)
template_context = self.get_template_context(data, renderer_context)
return template.render(context=template_context, request=request)
# set edit_mode, so that otherwise invisible placeholders can be edited inline
edit_mode = getattr(request.current_page, 'publisher_is_draft', False)
template_names = [request.current_page.get_template()]
template = self.resolve_template(template_names)
template_context = self.get_template_context(data, renderer_context)
template_context.update(
renderer_context,
paginator=view.paginator,
edit_mode=edit_mode,
)
return template.render(template_context, request=request)
|
Subsets and Splits