text
stringlengths 213
32.3k
|
---|
from homeassistant.components.panasonic_viera.const import (
ATTR_DEVICE_INFO,
ATTR_FRIENDLY_NAME,
ATTR_MANUFACTURER,
ATTR_MODEL_NUMBER,
ATTR_UDN,
CONF_APP_ID,
CONF_ENCRYPTION_KEY,
CONF_ON_ACTION,
DEFAULT_MANUFACTURER,
DEFAULT_MODEL_NUMBER,
DEFAULT_NAME,
DEFAULT_PORT,
DOMAIN,
)
from homeassistant.config_entries import ENTRY_STATE_NOT_LOADED
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT
from homeassistant.setup import async_setup_component
from tests.async_mock import Mock, patch
from tests.common import MockConfigEntry
MOCK_CONFIG_DATA = {
CONF_HOST: "0.0.0.0",
CONF_NAME: DEFAULT_NAME,
CONF_PORT: DEFAULT_PORT,
CONF_ON_ACTION: None,
}
MOCK_ENCRYPTION_DATA = {
CONF_APP_ID: "mock-app-id",
CONF_ENCRYPTION_KEY: "mock-encryption-key",
}
MOCK_DEVICE_INFO = {
ATTR_FRIENDLY_NAME: DEFAULT_NAME,
ATTR_MANUFACTURER: DEFAULT_MANUFACTURER,
ATTR_MODEL_NUMBER: DEFAULT_MODEL_NUMBER,
ATTR_UDN: "mock-unique-id",
}
def get_mock_remote(device_info=MOCK_DEVICE_INFO):
"""Return a mock remote."""
mock_remote = Mock()
async def async_create_remote_control(during_setup=False):
return
mock_remote.async_create_remote_control = async_create_remote_control
async def async_get_device_info():
return device_info
mock_remote.async_get_device_info = async_get_device_info
return mock_remote
async def test_setup_entry_encrypted(hass):
"""Test setup with encrypted config entry."""
mock_entry = MockConfigEntry(
domain=DOMAIN,
unique_id=MOCK_DEVICE_INFO[ATTR_UDN],
data={**MOCK_CONFIG_DATA, **MOCK_ENCRYPTION_DATA, **MOCK_DEVICE_INFO},
)
mock_entry.add_to_hass(hass)
mock_remote = get_mock_remote()
with patch(
"homeassistant.components.panasonic_viera.Remote",
return_value=mock_remote,
):
await hass.config_entries.async_setup(mock_entry.entry_id)
await hass.async_block_till_done()
state = hass.states.get("media_player.panasonic_viera_tv")
assert state
assert state.name == DEFAULT_NAME
async def test_setup_entry_encrypted_missing_device_info(hass):
"""Test setup with encrypted config entry and missing device info."""
mock_entry = MockConfigEntry(
domain=DOMAIN,
unique_id=MOCK_CONFIG_DATA[CONF_HOST],
data={**MOCK_CONFIG_DATA, **MOCK_ENCRYPTION_DATA},
)
mock_entry.add_to_hass(hass)
mock_remote = get_mock_remote()
with patch(
"homeassistant.components.panasonic_viera.Remote",
return_value=mock_remote,
):
await hass.config_entries.async_setup(mock_entry.entry_id)
await hass.async_block_till_done()
assert mock_entry.data[ATTR_DEVICE_INFO] == MOCK_DEVICE_INFO
assert mock_entry.unique_id == MOCK_DEVICE_INFO[ATTR_UDN]
state = hass.states.get("media_player.panasonic_viera_tv")
assert state
assert state.name == DEFAULT_NAME
async def test_setup_entry_encrypted_missing_device_info_none(hass):
"""Test setup with encrypted config entry and device info set to None."""
mock_entry = MockConfigEntry(
domain=DOMAIN,
unique_id=MOCK_CONFIG_DATA[CONF_HOST],
data={**MOCK_CONFIG_DATA, **MOCK_ENCRYPTION_DATA},
)
mock_entry.add_to_hass(hass)
mock_remote = get_mock_remote(device_info=None)
with patch(
"homeassistant.components.panasonic_viera.Remote",
return_value=mock_remote,
):
await hass.config_entries.async_setup(mock_entry.entry_id)
await hass.async_block_till_done()
assert mock_entry.data[ATTR_DEVICE_INFO] is None
assert mock_entry.unique_id == MOCK_CONFIG_DATA[CONF_HOST]
state = hass.states.get("media_player.panasonic_viera_tv")
assert state
assert state.name == DEFAULT_NAME
async def test_setup_entry_unencrypted(hass):
"""Test setup with unencrypted config entry."""
mock_entry = MockConfigEntry(
domain=DOMAIN,
unique_id=MOCK_DEVICE_INFO[ATTR_UDN],
data={**MOCK_CONFIG_DATA, **MOCK_DEVICE_INFO},
)
mock_entry.add_to_hass(hass)
mock_remote = get_mock_remote()
with patch(
"homeassistant.components.panasonic_viera.Remote",
return_value=mock_remote,
):
await hass.config_entries.async_setup(mock_entry.entry_id)
await hass.async_block_till_done()
state = hass.states.get("media_player.panasonic_viera_tv")
assert state
assert state.name == DEFAULT_NAME
async def test_setup_entry_unencrypted_missing_device_info(hass):
"""Test setup with unencrypted config entry and missing device info."""
mock_entry = MockConfigEntry(
domain=DOMAIN,
unique_id=MOCK_CONFIG_DATA[CONF_HOST],
data=MOCK_CONFIG_DATA,
)
mock_entry.add_to_hass(hass)
mock_remote = get_mock_remote()
with patch(
"homeassistant.components.panasonic_viera.Remote",
return_value=mock_remote,
):
await hass.config_entries.async_setup(mock_entry.entry_id)
await hass.async_block_till_done()
assert mock_entry.data[ATTR_DEVICE_INFO] == MOCK_DEVICE_INFO
assert mock_entry.unique_id == MOCK_DEVICE_INFO[ATTR_UDN]
state = hass.states.get("media_player.panasonic_viera_tv")
assert state
assert state.name == DEFAULT_NAME
async def test_setup_entry_unencrypted_missing_device_info_none(hass):
"""Test setup with unencrypted config entry and device info set to None."""
mock_entry = MockConfigEntry(
domain=DOMAIN,
unique_id=MOCK_CONFIG_DATA[CONF_HOST],
data=MOCK_CONFIG_DATA,
)
mock_entry.add_to_hass(hass)
mock_remote = get_mock_remote(device_info=None)
with patch(
"homeassistant.components.panasonic_viera.Remote",
return_value=mock_remote,
):
await hass.config_entries.async_setup(mock_entry.entry_id)
await hass.async_block_till_done()
assert mock_entry.data[ATTR_DEVICE_INFO] is None
assert mock_entry.unique_id == MOCK_CONFIG_DATA[CONF_HOST]
state = hass.states.get("media_player.panasonic_viera_tv")
assert state
assert state.name == DEFAULT_NAME
async def test_setup_config_flow_initiated(hass):
"""Test if config flow is initiated in setup."""
assert (
await async_setup_component(
hass,
DOMAIN,
{DOMAIN: {CONF_HOST: "0.0.0.0"}},
)
is True
)
assert len(hass.config_entries.flow.async_progress()) == 1
async def test_setup_unload_entry(hass):
"""Test if config entry is unloaded."""
mock_entry = MockConfigEntry(
domain=DOMAIN, unique_id=MOCK_DEVICE_INFO[ATTR_UDN], data=MOCK_CONFIG_DATA
)
mock_entry.add_to_hass(hass)
mock_remote = get_mock_remote()
with patch(
"homeassistant.components.panasonic_viera.Remote",
return_value=mock_remote,
):
await hass.config_entries.async_setup(mock_entry.entry_id)
await hass.async_block_till_done()
await hass.config_entries.async_unload(mock_entry.entry_id)
assert mock_entry.state == ENTRY_STATE_NOT_LOADED
state = hass.states.get("media_player.panasonic_viera_tv")
assert state is None
|
import time
class IntSlider(object):
"""Class to set a integer slider."""
def __init__(self, plotter=None, callback=None, first_call=True):
self.plotter = plotter
self.callback = callback
self.slider_rep = None
self.first_call = first_call
self._first_time = True
def __call__(self, value):
"""Round the label of the slider."""
idx = int(round(value))
if self.slider_rep is not None:
self.slider_rep.SetValue(idx)
self.plotter.update()
if not self._first_time or all([self._first_time, self.first_call]):
self.callback(idx)
if self._first_time:
self._first_time = False
class TimeSlider(object):
"""Class to update the time slider."""
def __init__(self, plotter=None, brain=None, callback=None,
first_call=True):
self.plotter = plotter
self.brain = brain
self.callback = callback
self.slider_rep = None
self.first_call = first_call
self._first_time = True
self.time_label = None
if self.brain is not None and callable(self.brain._data['time_label']):
self.time_label = self.brain._data['time_label']
def __call__(self, value, update_widget=False, time_as_index=True):
"""Update the time slider."""
value = float(value)
if not time_as_index:
value = self.brain._to_time_index(value)
if not self._first_time or all([self._first_time, self.first_call]):
self.brain.set_time_point(value)
if self.callback is not None:
self.callback()
current_time = self.brain._current_time
if self.slider_rep is not None:
if self.time_label is not None:
current_time = self.time_label(current_time)
self.slider_rep.SetTitleText(current_time)
if update_widget:
self.slider_rep.SetValue(value)
self.plotter.update()
if self._first_time:
self._first_time = False
class UpdateColorbarScale(object):
"""Class to update the values of the colorbar sliders."""
def __init__(self, plotter=None, brain=None):
self.plotter = plotter
self.brain = brain
self.keys = ('fmin', 'fmid', 'fmax')
self.reps = {key: None for key in self.keys}
self.slider_rep = None
self._first_time = True
def __call__(self, value):
"""Update the colorbar sliders."""
if self._first_time:
self._first_time = False
return
self.brain._update_fscale(value)
for key in self.keys:
if self.reps[key] is not None:
self.reps[key].SetValue(self.brain._data[key])
if self.slider_rep is not None:
self.slider_rep.SetValue(1.0)
self.plotter.update()
class BumpColorbarPoints(object):
"""Class that ensure constraints over the colorbar points."""
def __init__(self, plotter=None, brain=None, name=None):
self.plotter = plotter
self.brain = brain
self.name = name
self.callback = {
"fmin": lambda fmin: brain.update_lut(fmin=fmin),
"fmid": lambda fmid: brain.update_lut(fmid=fmid),
"fmax": lambda fmax: brain.update_lut(fmax=fmax),
}
self.keys = ('fmin', 'fmid', 'fmax')
self.reps = {key: None for key in self.keys}
self.last_update = time.time()
self._first_time = True
def __call__(self, value):
"""Update the colorbar sliders."""
if self._first_time:
self._first_time = False
return
vals = {key: self.brain._data[key] for key in self.keys}
if self.name == "fmin" and self.reps["fmin"] is not None:
if vals['fmax'] < value:
vals['fmax'] = value
self.reps['fmax'].SetValue(value)
if vals['fmid'] < value:
vals['fmid'] = value
self.reps['fmid'].SetValue(value)
self.reps['fmin'].SetValue(value)
elif self.name == "fmid" and self.reps['fmid'] is not None:
if vals['fmin'] > value:
vals['fmin'] = value
self.reps['fmin'].SetValue(value)
if vals['fmax'] < value:
vals['fmax'] = value
self.reps['fmax'].SetValue(value)
self.reps['fmid'].SetValue(value)
elif self.name == "fmax" and self.reps['fmax'] is not None:
if vals['fmin'] > value:
vals['fmin'] = value
self.reps['fmin'].SetValue(value)
if vals['fmid'] > value:
vals['fmid'] = value
self.reps['fmid'].SetValue(value)
self.reps['fmax'].SetValue(value)
self.brain.update_lut(**vals)
if time.time() > self.last_update + 1. / 60.:
self.callback[self.name](value)
self.last_update = time.time()
self.plotter.update()
class ShowView(object):
"""Class that selects the correct view."""
def __init__(self, plotter=None, brain=None, orientation=None,
row=None, col=None, hemi=None):
self.plotter = plotter
self.brain = brain
self.orientation = orientation
self.short_orientation = [s[:3] for s in orientation]
self.row = row
self.col = col
self.hemi = hemi
self.slider_rep = None
def __call__(self, value, update_widget=False):
"""Update the view."""
self.brain.show_view(value, row=self.row, col=self.col,
hemi=self.hemi)
if update_widget:
if len(value) > 3:
idx = self.orientation.index(value)
else:
idx = self.short_orientation.index(value)
if self.slider_rep is not None:
self.slider_rep.SetValue(idx)
self.slider_rep.SetTitleText(self.orientation[idx])
self.plotter.update()
class SmartSlider(object):
"""Class to manage smart slider.
It stores it's own slider representation for efficiency
and uses it when necessary.
"""
def __init__(self, plotter=None, callback=None):
self.plotter = plotter
self.callback = callback
self.slider_rep = None
def __call__(self, value, update_widget=False):
"""Update the value."""
self.callback(value)
if update_widget:
if self.slider_rep is not None:
self.slider_rep.SetValue(value)
self.plotter.update()
|
import numpy as np
import unittest
from chainer import testing
from chainercv.links.model.ssd import random_distort
try:
import cv2 # NOQA
_cv2_available = True
except ImportError:
_cv2_available = False
@unittest.skipUnless(_cv2_available, 'cv2 is not installed')
class TestRandomDistort(unittest.TestCase):
def test_random_distort(self):
img = np.random.randint(0, 256, size=(3, 48, 32)).astype(np.float32)
out = random_distort(img)
self.assertEqual(out.shape, img.shape)
self.assertEqual(out.dtype, img.dtype)
testing.run_module(__name__, __file__)
|
import re
import numpy as np
import datetime
from ..base import BaseRaw
from ..meas_info import create_info
from ..utils import _mult_cal_one
from ...annotations import Annotations
from ...utils import logger, verbose, fill_doc, warn
from ...utils.check import _require_version
from ..constants import FIFF
from .._digitization import _make_dig_points
from ...transforms import _frame_to_str
@fill_doc
def read_raw_snirf(fname, preload=False, verbose=None):
"""Reader for a continuous wave SNIRF data.
.. note:: This reader supports the .snirf file type only,
not the .jnirs version.
Parameters
----------
fname : str
Path to the SNIRF data file.
%(preload)s
%(verbose)s
Returns
-------
raw : instance of RawSNIRF
A Raw object containing SNIRF data.
See Also
--------
mne.io.Raw : Documentation of attribute and methods.
"""
return RawSNIRF(fname, preload, verbose)
def _open(fname):
return open(fname, 'r', encoding='latin-1')
@fill_doc
class RawSNIRF(BaseRaw):
"""Raw object from a continuous wave SNIRF file.
Parameters
----------
fname : str
Path to the SNIRF data file.
%(preload)s
%(verbose)s
See Also
--------
mne.io.Raw : Documentation of attribute and methods.
"""
@verbose
def __init__(self, fname, preload=False, verbose=None):
_require_version('h5py', 'read raw SNIRF data')
from ...externals.pymatreader.utils import _import_h5py
h5py = _import_h5py()
logger.info('Loading %s' % fname)
with h5py.File(fname, 'r') as dat:
if 'data2' in dat['nirs']:
warn("File contains multiple recordings. "
"MNE does not support this feature. "
"Only the first dataset will be processed.")
if np.array(dat.get('nirs/data1/measurementList1/dataType')) != 1:
raise RuntimeError('File does not contain continuous wave '
'data. MNE only supports reading continuous'
' wave amplitude SNIRF files. Expected type'
' code 1 but received type code %d' %
(np.array(dat.get(
'nirs/data1/measurementList1/dataType'
))))
last_samps = dat.get('/nirs/data1/dataTimeSeries').shape[0] - 1
samplingrate_raw = np.array(dat.get('nirs/data1/time'))
sampling_rate = 0
if samplingrate_raw.shape == (2, 1):
# specified as onset/samplerate
warn("Onset/sample rate SNIRF not yet supported.")
else:
# specified as time points
fs_diff = np.around(np.diff(samplingrate_raw), decimals=4)
if len(np.unique(fs_diff)) == 1:
# Uniformly sampled data
sampling_rate = 1. / np.unique(fs_diff)
else:
# print(np.unique(fs_diff))
warn("Non uniform sampled data not supported.")
if sampling_rate == 0:
warn("Unable to extract sample rate from SNIRF file.")
sources = np.array(dat.get('nirs/probe/sourceLabels'))
detectors = np.array(dat.get('nirs/probe/detectorLabels'))
sources = [s.decode('UTF-8') for s in sources]
detectors = [d.decode('UTF-8') for d in detectors]
# Extract source and detector locations
detPos3D = np.array(dat.get('nirs/probe/detectorPos3D'))
srcPos3D = np.array(dat.get('nirs/probe/sourcePos3D'))
assert len(sources) == srcPos3D.shape[0]
assert len(detectors) == detPos3D.shape[0]
# Extract wavelengths
fnirs_wavelengths = np.array(dat.get('nirs/probe/wavelengths'))
fnirs_wavelengths = [int(w) for w in fnirs_wavelengths]
# Extract channels
def atoi(text):
return int(text) if text.isdigit() else text
def natural_keys(text):
return [atoi(c) for c in re.split(r'(\d+)', text)]
channels = np.array([name for name in dat['nirs']['data1'].keys()])
channels_idx = np.array(['measurementList' in n for n in channels])
channels = channels[channels_idx]
channels = sorted(channels, key=natural_keys)
chnames = []
for chan in channels:
src_idx = int(np.array(dat.get('nirs/data1/' +
chan + '/sourceIndex'))[0])
det_idx = int(np.array(dat.get('nirs/data1/' +
chan + '/detectorIndex'))[0])
wve_idx = int(np.array(dat.get('nirs/data1/' +
chan + '/wavelengthIndex'))[0])
ch_name = sources[src_idx - 1] + '_' +\
detectors[det_idx - 1] + ' ' +\
str(fnirs_wavelengths[wve_idx - 1])
chnames.append(ch_name)
# Create mne structure
info = create_info(chnames,
sampling_rate,
ch_types='fnirs_cw_amplitude')
subject_info = {}
names = np.array(dat.get('nirs/metaDataTags/SubjectID'))
subject_info['first_name'] = names[0].decode('UTF-8')
# Read non standard (but allowed) custom metadata tags
if 'lastName' in dat.get('nirs/metaDataTags/'):
ln = dat.get('/nirs/metaDataTags/lastName')[0].decode('UTF-8')
subject_info['last_name'] = ln
if 'middleName' in dat.get('nirs/metaDataTags/'):
m = dat.get('/nirs/metaDataTags/middleName')[0].decode('UTF-8')
subject_info['middle_name'] = m
if 'sex' in dat.get('nirs/metaDataTags/'):
s = dat.get('/nirs/metaDataTags/sex')[0].decode('UTF-8')
if s in {'M', 'Male', '1', 'm'}:
subject_info['sex'] = FIFF.FIFFV_SUBJ_SEX_MALE
elif s in {'F', 'Female', '2', 'f'}:
subject_info['sex'] = FIFF.FIFFV_SUBJ_SEX_FEMALE
elif s in {'0', 'u'}:
subject_info['sex'] = FIFF.FIFFV_SUBJ_SEX_UNKNOWN
# End non standard name reading
# Update info
info.update(subject_info=subject_info)
LengthUnit = np.array(dat.get('/nirs/metaDataTags/LengthUnit'))
LengthUnit = LengthUnit[0].decode('UTF-8')
scal = 1
if "cm" in LengthUnit:
scal = 100
elif "mm" in LengthUnit:
scal = 1000
for idx, chan in enumerate(channels):
src_idx = int(np.array(dat.get('nirs/data1/' +
chan + '/sourceIndex'))[0])
det_idx = int(np.array(dat.get('nirs/data1/' +
chan + '/detectorIndex'))[0])
wve_idx = int(np.array(dat.get('nirs/data1/' +
chan + '/wavelengthIndex'))[0])
info['chs'][idx]['loc'][3:6] = srcPos3D[src_idx - 1, :] / scal
info['chs'][idx]['loc'][6:9] = detPos3D[det_idx - 1, :] / scal
# Store channel as mid point
midpoint = (info['chs'][idx]['loc'][3:6] +
info['chs'][idx]['loc'][6:9]) / 2
info['chs'][idx]['loc'][0:3] = midpoint
info['chs'][idx]['loc'][9] = fnirs_wavelengths[wve_idx - 1]
if 'MNE_coordFrame' in dat.get('nirs/metaDataTags/'):
coord_frame = int(dat.get('/nirs/metaDataTags/MNE_coordFrame')
[0])
else:
coord_frame = FIFF.FIFFV_COORD_UNKNOWN
if 'landmarkPos3D' in dat.get('nirs/probe/'):
diglocs = np.array(dat.get('/nirs/probe/landmarkPos3D'))
digname = np.array(dat.get('/nirs/probe/landmarkLabels'))
nasion, lpa, rpa, hpi = None, None, None, None
extra_ps = dict()
for idx, dign in enumerate(digname):
if dign == b'LPA':
lpa = diglocs[idx, :]
elif dign == b'NASION':
nasion = diglocs[idx, :]
elif dign == b'RPA':
rpa = diglocs[idx, :]
else:
extra_ps[f'EEG{len(extra_ps) + 1:03d}'] = diglocs[idx]
info['dig'] = _make_dig_points(nasion=nasion, lpa=lpa, rpa=rpa,
hpi=hpi, dig_ch_pos=extra_ps,
coord_frame=_frame_to_str[
coord_frame])
str_date = np.array((dat.get(
'/nirs/metaDataTags/MeasurementDate')))[0].decode('UTF-8')
str_time = np.array((dat.get(
'/nirs/metaDataTags/MeasurementTime')))[0].decode('UTF-8')
str_datetime = str_date + str_time
# Several formats have been observed so we try each in turn
for dt_code in ['%Y-%m-%d%H:%M:%SZ',
'%Y-%m-%d%H:%M:%S']:
try:
meas_date = datetime.datetime.strptime(
str_datetime, dt_code)
except ValueError:
pass
else:
break
else:
warn("Extraction of measurement date from SNIRF file failed. "
"The date is being set to January 1st, 2000, "
f"instead of {str_datetime}")
meas_date = datetime.datetime(2000, 1, 1, 0, 0, 0)
meas_date = meas_date.replace(tzinfo=datetime.timezone.utc)
info['meas_date'] = meas_date
if 'DateOfBirth' in dat.get('nirs/metaDataTags/'):
str_birth = np.array((dat.get('/nirs/metaDataTags/'
'DateOfBirth')))[0].decode()
birth_matched = re.fullmatch(r'(\d+)-(\d+)-(\d+)', str_birth)
if birth_matched is not None:
info["subject_info"]['birthday'] = (
int(birth_matched.groups()[0]),
int(birth_matched.groups()[1]),
int(birth_matched.groups()[2]))
super(RawSNIRF, self).__init__(info, preload, filenames=[fname],
last_samps=[last_samps],
verbose=verbose)
# Extract annotations
annot = Annotations([], [], [])
for key in dat['nirs']:
if 'stim' in key:
data = np.array(dat.get('/nirs/' + key + '/data'))
if data.size > 0:
annot.append(data[:, 0], 1.0, key[4:])
self.set_annotations(annot)
# Reorder channels to match expected ordering in MNE
num_chans = len(self.ch_names)
chans = []
for idx in range(num_chans // 2):
chans.append(idx)
chans.append(idx + num_chans // 2)
self.pick(picks=chans)
def _read_segment_file(self, data, idx, fi, start, stop, cals, mult):
"""Read a segment of data from a file."""
import h5py
with h5py.File(self._filenames[0], 'r') as dat:
one = dat['/nirs/data1/dataTimeSeries'][start:stop].T
_mult_cal_one(data, one, idx, cals, mult)
|
from homeassistant.const import CONF_UNIT_OF_MEASUREMENT
from homeassistant.helpers.entity import Entity
from . import IHC_CONTROLLER, IHC_INFO
from .ihcdevice import IHCDevice
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the IHC sensor platform."""
if discovery_info is None:
return
devices = []
for name, device in discovery_info.items():
ihc_id = device["ihc_id"]
product_cfg = device["product_cfg"]
product = device["product"]
# Find controller that corresponds with device id
ctrl_id = device["ctrl_id"]
ihc_key = f"ihc{ctrl_id}"
info = hass.data[ihc_key][IHC_INFO]
ihc_controller = hass.data[ihc_key][IHC_CONTROLLER]
unit = product_cfg[CONF_UNIT_OF_MEASUREMENT]
sensor = IHCSensor(ihc_controller, name, ihc_id, info, unit, product)
devices.append(sensor)
add_entities(devices)
class IHCSensor(IHCDevice, Entity):
"""Implementation of the IHC sensor."""
def __init__(
self, ihc_controller, name, ihc_id: int, info: bool, unit, product=None
) -> None:
"""Initialize the IHC sensor."""
super().__init__(ihc_controller, name, ihc_id, info, product)
self._state = None
self._unit_of_measurement = unit
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
def on_ihc_change(self, ihc_id, value):
"""Handle IHC resource change."""
self._state = value
self.schedule_update_ha_state()
|
from weblate.trans.tests.test_views import ViewTestCase
from weblate.utils.state import STATE_APPROVED
class ReviewTest(ViewTestCase):
def setUp(self):
super().setUp()
self.project.translation_review = True
self.project.save()
def approve(self):
unit = self.get_unit()
unit.target = "Ahoj svete!\n"
unit.state = STATE_APPROVED
unit.save()
def check_result(self, fail):
unit = self.get_unit()
if fail:
self.assertTrue(unit.approved)
self.assertEqual(unit.target, "Ahoj svete!\n")
else:
self.assertFalse(unit.approved)
self.assertEqual(unit.target, "Nazdar svete!\n")
def test_approve(self):
self.make_manager()
self.edit_unit("Hello, world!\n", "Nazdar svete!\n", review=str(STATE_APPROVED))
unit = self.get_unit()
self.assertTrue(unit.approved)
def test_edit_approved(self, fail=True):
self.approve()
self.edit_unit("Hello, world!\n", "Nazdar svete!\n")
self.check_result(fail)
def test_edit_reviewer(self):
self.make_manager()
self.test_edit_approved(False)
def test_suggest(self, fail=True):
self.approve()
self.edit_unit("Hello, world!\n", "Nazdar svete!\n", suggest="yes")
# Get ids of created suggestions
suggestion = self.get_unit().suggestions[0].pk
# Accept one of suggestions
self.edit_unit("Hello, world!\n", "", accept_edit=suggestion)
self.check_result(fail)
def test_suggest_reviewr(self):
self.make_manager()
self.test_suggest(False)
|
from functools import partial
import logging
from numato_gpio import NumatoGpioError
from homeassistant.components.binary_sensor import BinarySensorEntity
from homeassistant.const import DEVICE_DEFAULT_NAME
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect, dispatcher_send
from . import (
CONF_BINARY_SENSORS,
CONF_DEVICES,
CONF_ID,
CONF_INVERT_LOGIC,
CONF_PORTS,
DATA_API,
DOMAIN,
)
_LOGGER = logging.getLogger(__name__)
NUMATO_SIGNAL = "numato_signal_{}_{}"
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the configured Numato USB GPIO binary sensor ports."""
if discovery_info is None:
return
def read_gpio(device_id, port, level):
"""Send signal to entity to have it update state."""
dispatcher_send(hass, NUMATO_SIGNAL.format(device_id, port), level)
api = hass.data[DOMAIN][DATA_API]
binary_sensors = []
devices = hass.data[DOMAIN][CONF_DEVICES]
for device in [d for d in devices if CONF_BINARY_SENSORS in d]:
device_id = device[CONF_ID]
platform = device[CONF_BINARY_SENSORS]
invert_logic = platform[CONF_INVERT_LOGIC]
ports = platform[CONF_PORTS]
for port, port_name in ports.items():
try:
api.setup_input(device_id, port)
api.edge_detect(device_id, port, partial(read_gpio, device_id))
except NumatoGpioError as err:
_LOGGER.error(
"Failed to initialize binary sensor '%s' on Numato device %s port %s: %s",
port_name,
device_id,
port,
err,
)
continue
binary_sensors.append(
NumatoGpioBinarySensor(
port_name,
device_id,
port,
invert_logic,
api,
)
)
add_entities(binary_sensors, True)
class NumatoGpioBinarySensor(BinarySensorEntity):
"""Represents a binary sensor (input) port of a Numato GPIO expander."""
def __init__(self, name, device_id, port, invert_logic, api):
"""Initialize the Numato GPIO based binary sensor object."""
self._name = name or DEVICE_DEFAULT_NAME
self._device_id = device_id
self._port = port
self._invert_logic = invert_logic
self._state = None
self._api = api
async def async_added_to_hass(self):
"""Connect state update callback."""
self.async_on_remove(
async_dispatcher_connect(
self.hass,
NUMATO_SIGNAL.format(self._device_id, self._port),
self._async_update_state,
)
)
@callback
def _async_update_state(self, level):
"""Update entity state."""
self._state = level
self.async_write_ha_state()
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def is_on(self):
"""Return the state of the entity."""
return self._state != self._invert_logic
def update(self):
"""Update the GPIO state."""
try:
self._state = self._api.read_input(self._device_id, self._port)
except NumatoGpioError as err:
self._state = None
_LOGGER.error(
"Failed to update Numato device %s port %s: %s",
self._device_id,
self._port,
err,
)
|
from functools import partial
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
import voluptuous as vol
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_COLOR_TEMP,
ATTR_EFFECT,
ATTR_HS_COLOR,
ATTR_TRANSITION,
ATTR_WHITE_VALUE,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
SUPPORT_COLOR_TEMP,
SUPPORT_EFFECT,
SUPPORT_TRANSITION,
SUPPORT_WHITE_VALUE,
LightEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import callback
from homeassistant.helpers import config_validation as cv, entity_platform
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.entity_registry import (
async_get_registry as async_get_entity_registry,
)
from homeassistant.helpers.typing import HomeAssistantType
import homeassistant.util.color as color_util
from . import WLEDDataUpdateCoordinator, WLEDDeviceEntity, wled_exception_handler
from .const import (
ATTR_COLOR_PRIMARY,
ATTR_INTENSITY,
ATTR_ON,
ATTR_PALETTE,
ATTR_PLAYLIST,
ATTR_PRESET,
ATTR_REVERSE,
ATTR_SEGMENT_ID,
ATTR_SPEED,
DOMAIN,
SERVICE_EFFECT,
SERVICE_PRESET,
)
PARALLEL_UPDATES = 1
async def async_setup_entry(
hass: HomeAssistantType,
entry: ConfigEntry,
async_add_entities: Callable[[List[Entity], bool], None],
) -> None:
"""Set up WLED light based on a config entry."""
coordinator: WLEDDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id]
platform = entity_platform.current_platform.get()
platform.async_register_entity_service(
SERVICE_EFFECT,
{
vol.Optional(ATTR_EFFECT): vol.Any(cv.positive_int, cv.string),
vol.Optional(ATTR_INTENSITY): vol.All(
vol.Coerce(int), vol.Range(min=0, max=255)
),
vol.Optional(ATTR_PALETTE): vol.Any(cv.positive_int, cv.string),
vol.Optional(ATTR_REVERSE): cv.boolean,
vol.Optional(ATTR_SPEED): vol.All(
vol.Coerce(int), vol.Range(min=0, max=255)
),
},
"async_effect",
)
platform.async_register_entity_service(
SERVICE_PRESET,
{
vol.Required(ATTR_PRESET): vol.All(
vol.Coerce(int), vol.Range(min=-1, max=65535)
),
},
"async_preset",
)
update_segments = partial(
async_update_segments, entry, coordinator, {}, async_add_entities
)
coordinator.async_add_listener(update_segments)
update_segments()
class WLEDMasterLight(LightEntity, WLEDDeviceEntity):
"""Defines a WLED master light."""
def __init__(self, entry_id: str, coordinator: WLEDDataUpdateCoordinator):
"""Initialize WLED master light."""
super().__init__(
entry_id=entry_id,
coordinator=coordinator,
name=f"{coordinator.data.info.name} Master",
icon="mdi:led-strip-variant",
)
@property
def unique_id(self) -> str:
"""Return the unique ID for this sensor."""
return f"{self.coordinator.data.info.mac_address}"
@property
def supported_features(self) -> int:
"""Flag supported features."""
return SUPPORT_BRIGHTNESS | SUPPORT_TRANSITION
@property
def brightness(self) -> Optional[int]:
"""Return the brightness of this light between 1..255."""
return self.coordinator.data.state.brightness
@property
def is_on(self) -> bool:
"""Return the state of the light."""
return bool(self.coordinator.data.state.on)
@wled_exception_handler
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn off the light."""
data = {ATTR_ON: False}
if ATTR_TRANSITION in kwargs:
# WLED uses 100ms per unit, so 10 = 1 second.
data[ATTR_TRANSITION] = round(kwargs[ATTR_TRANSITION] * 10)
await self.coordinator.wled.master(**data)
@wled_exception_handler
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn on the light."""
data = {ATTR_ON: True}
if ATTR_TRANSITION in kwargs:
# WLED uses 100ms per unit, so 10 = 1 second.
data[ATTR_TRANSITION] = round(kwargs[ATTR_TRANSITION] * 10)
if ATTR_BRIGHTNESS in kwargs:
data[ATTR_BRIGHTNESS] = kwargs[ATTR_BRIGHTNESS]
await self.coordinator.wled.master(**data)
class WLEDSegmentLight(LightEntity, WLEDDeviceEntity):
"""Defines a WLED light based on a segment."""
def __init__(
self, entry_id: str, coordinator: WLEDDataUpdateCoordinator, segment: int
):
"""Initialize WLED segment light."""
self._rgbw = coordinator.data.info.leds.rgbw
self._segment = segment
# If this is the one and only segment, use a simpler name
name = f"{coordinator.data.info.name} Segment {self._segment}"
if len(coordinator.data.state.segments) == 1:
name = coordinator.data.info.name
super().__init__(
entry_id=entry_id,
coordinator=coordinator,
name=name,
icon="mdi:led-strip-variant",
)
@property
def unique_id(self) -> str:
"""Return the unique ID for this sensor."""
return f"{self.coordinator.data.info.mac_address}_{self._segment}"
@property
def available(self) -> bool:
"""Return True if entity is available."""
try:
self.coordinator.data.state.segments[self._segment]
except IndexError:
return False
return super().available
@property
def device_state_attributes(self) -> Optional[Dict[str, Any]]:
"""Return the state attributes of the entity."""
playlist = self.coordinator.data.state.playlist
if playlist == -1:
playlist = None
preset = self.coordinator.data.state.preset
if preset == -1:
preset = None
segment = self.coordinator.data.state.segments[self._segment]
return {
ATTR_INTENSITY: segment.intensity,
ATTR_PALETTE: segment.palette.name,
ATTR_PLAYLIST: playlist,
ATTR_PRESET: preset,
ATTR_REVERSE: segment.reverse,
ATTR_SPEED: segment.speed,
}
@property
def hs_color(self) -> Optional[Tuple[float, float]]:
"""Return the hue and saturation color value [float, float]."""
color = self.coordinator.data.state.segments[self._segment].color_primary
return color_util.color_RGB_to_hs(*color[:3])
@property
def effect(self) -> Optional[str]:
"""Return the current effect of the light."""
return self.coordinator.data.state.segments[self._segment].effect.name
@property
def brightness(self) -> Optional[int]:
"""Return the brightness of this light between 1..255."""
state = self.coordinator.data.state
# If this is the one and only segment, calculate brightness based
# on the master and segment brightness
if len(state.segments) == 1:
return int(
(state.segments[self._segment].brightness * state.brightness) / 255
)
return state.segments[self._segment].brightness
@property
def white_value(self) -> Optional[int]:
"""Return the white value of this light between 0..255."""
color = self.coordinator.data.state.segments[self._segment].color_primary
return color[-1] if self._rgbw else None
@property
def supported_features(self) -> int:
"""Flag supported features."""
flags = (
SUPPORT_BRIGHTNESS
| SUPPORT_COLOR
| SUPPORT_COLOR_TEMP
| SUPPORT_EFFECT
| SUPPORT_TRANSITION
)
if self._rgbw:
flags |= SUPPORT_WHITE_VALUE
return flags
@property
def effect_list(self) -> List[str]:
"""Return the list of supported effects."""
return [effect.name for effect in self.coordinator.data.effects]
@property
def is_on(self) -> bool:
"""Return the state of the light."""
state = self.coordinator.data.state
# If there is a single segment, take master into account
if len(state.segments) == 1 and not state.on:
return False
return bool(state.segments[self._segment].on)
@wled_exception_handler
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn off the light."""
data = {ATTR_ON: False}
if ATTR_TRANSITION in kwargs:
# WLED uses 100ms per unit, so 10 = 1 second.
data[ATTR_TRANSITION] = round(kwargs[ATTR_TRANSITION] * 10)
# If there is a single segment, control via the master
if len(self.coordinator.data.state.segments) == 1:
await self.coordinator.wled.master(**data)
return
data[ATTR_SEGMENT_ID] = self._segment
await self.coordinator.wled.segment(**data)
@wled_exception_handler
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn on the light."""
data = {ATTR_ON: True, ATTR_SEGMENT_ID: self._segment}
if ATTR_COLOR_TEMP in kwargs:
mireds = color_util.color_temperature_kelvin_to_mired(
kwargs[ATTR_COLOR_TEMP]
)
data[ATTR_COLOR_PRIMARY] = tuple(
map(int, color_util.color_temperature_to_rgb(mireds))
)
if ATTR_HS_COLOR in kwargs:
hue, sat = kwargs[ATTR_HS_COLOR]
data[ATTR_COLOR_PRIMARY] = color_util.color_hsv_to_RGB(hue, sat, 100)
if ATTR_TRANSITION in kwargs:
# WLED uses 100ms per unit, so 10 = 1 second.
data[ATTR_TRANSITION] = round(kwargs[ATTR_TRANSITION] * 10)
if ATTR_BRIGHTNESS in kwargs:
data[ATTR_BRIGHTNESS] = kwargs[ATTR_BRIGHTNESS]
if ATTR_EFFECT in kwargs:
data[ATTR_EFFECT] = kwargs[ATTR_EFFECT]
# Support for RGBW strips, adds white value
if self._rgbw and any(
x in (ATTR_COLOR_TEMP, ATTR_HS_COLOR, ATTR_WHITE_VALUE) for x in kwargs
):
# WLED cannot just accept a white value, it needs the color.
# We use the last know color in case just the white value changes.
if all(x not in (ATTR_COLOR_TEMP, ATTR_HS_COLOR) for x in kwargs):
hue, sat = self.hs_color
data[ATTR_COLOR_PRIMARY] = color_util.color_hsv_to_RGB(hue, sat, 100)
# On a RGBW strip, when the color is pure white, disable the RGB LEDs in
# WLED by setting RGB to 0,0,0
if data[ATTR_COLOR_PRIMARY] == (255, 255, 255):
data[ATTR_COLOR_PRIMARY] = (0, 0, 0)
# Add requested or last known white value
if ATTR_WHITE_VALUE in kwargs:
data[ATTR_COLOR_PRIMARY] += (kwargs[ATTR_WHITE_VALUE],)
else:
data[ATTR_COLOR_PRIMARY] += (self.white_value,)
# When only 1 segment is present, switch along the master, and use
# the master for power/brightness control.
if len(self.coordinator.data.state.segments) == 1:
master_data = {ATTR_ON: True}
if ATTR_BRIGHTNESS in data:
master_data[ATTR_BRIGHTNESS] = data[ATTR_BRIGHTNESS]
data[ATTR_BRIGHTNESS] = 255
if ATTR_TRANSITION in data:
master_data[ATTR_TRANSITION] = data[ATTR_TRANSITION]
del data[ATTR_TRANSITION]
await self.coordinator.wled.segment(**data)
await self.coordinator.wled.master(**master_data)
return
await self.coordinator.wled.segment(**data)
@wled_exception_handler
async def async_effect(
self,
effect: Optional[Union[int, str]] = None,
intensity: Optional[int] = None,
palette: Optional[Union[int, str]] = None,
reverse: Optional[bool] = None,
speed: Optional[int] = None,
) -> None:
"""Set the effect of a WLED light."""
data = {ATTR_SEGMENT_ID: self._segment}
if effect is not None:
data[ATTR_EFFECT] = effect
if intensity is not None:
data[ATTR_INTENSITY] = intensity
if palette is not None:
data[ATTR_PALETTE] = palette
if reverse is not None:
data[ATTR_REVERSE] = reverse
if speed is not None:
data[ATTR_SPEED] = speed
await self.coordinator.wled.segment(**data)
@wled_exception_handler
async def async_preset(
self,
preset: int,
) -> None:
"""Set a WLED light to a saved preset."""
data = {ATTR_PRESET: preset}
await self.coordinator.wled.preset(**data)
@callback
def async_update_segments(
entry: ConfigEntry,
coordinator: WLEDDataUpdateCoordinator,
current: Dict[int, WLEDSegmentLight],
async_add_entities,
) -> None:
"""Update segments."""
segment_ids = {light.segment_id for light in coordinator.data.state.segments}
current_ids = set(current)
# Discard master (if present)
current_ids.discard(-1)
# Process new segments, add them to Home Assistant
new_entities = []
for segment_id in segment_ids - current_ids:
current[segment_id] = WLEDSegmentLight(entry.entry_id, coordinator, segment_id)
new_entities.append(current[segment_id])
# More than 1 segment now? Add master controls
if len(current_ids) < 2 and len(segment_ids) > 1:
current[-1] = WLEDMasterLight(entry.entry_id, coordinator)
new_entities.append(current[-1])
if new_entities:
async_add_entities(new_entities)
# Process deleted segments, remove them from Home Assistant
for segment_id in current_ids - segment_ids:
coordinator.hass.async_create_task(
async_remove_entity(segment_id, coordinator, current)
)
# Remove master if there is only 1 segment left
if len(current_ids) > 1 and len(segment_ids) < 2:
coordinator.hass.async_create_task(
async_remove_entity(-1, coordinator, current)
)
async def async_remove_entity(
index: int,
coordinator: WLEDDataUpdateCoordinator,
current: Dict[int, WLEDSegmentLight],
) -> None:
"""Remove WLED segment light from Home Assistant."""
entity = current[index]
await entity.async_remove()
registry = await async_get_entity_registry(coordinator.hass)
if entity.entity_id in registry.entities:
registry.async_remove(entity.entity_id)
del current[index]
|
import json
import os
import time
import logging
import py.path # pylint: disable=no-name-in-module
from PyQt5.QtCore import QUrl, QUrlQuery
import pytest
from qutebrowser.browser import qutescheme, pdfjs, downloads
from qutebrowser.utils import utils
class TestJavascriptHandler:
"""Test the qute://javascript endpoint."""
# Tuples of fake JS files and their content.
js_files = [
('foo.js', "var a = 'foo';"),
('bar.js', "var a = 'bar';"),
]
@pytest.fixture(autouse=True)
def patch_read_file(self, monkeypatch):
"""Patch utils.read_file to return few fake JS files."""
def _read_file(path, binary=False):
"""Faked utils.read_file."""
assert not binary
for filename, content in self.js_files:
if path == os.path.join('javascript', filename):
return content
raise OSError("File not found {}!".format(path))
monkeypatch.setattr(utils, 'read_file', _read_file)
@pytest.mark.parametrize("filename, content", js_files)
def test_qutejavascript(self, filename, content):
url = QUrl("qute://javascript/{}".format(filename))
_mimetype, data = qutescheme.qute_javascript(url)
assert data == content
def test_qutejavascript_404(self):
url = QUrl("qute://javascript/404.js")
with pytest.raises(qutescheme.SchemeOSError):
qutescheme.data_for_url(url)
def test_qutejavascript_empty_query(self):
url = QUrl("qute://javascript")
with pytest.raises(qutescheme.UrlInvalidError):
qutescheme.qute_javascript(url)
class TestHistoryHandler:
"""Test the qute://history endpoint."""
@pytest.fixture(scope="module")
def now(self):
return int(time.time())
@pytest.fixture
def entries(self, now):
"""Create fake history entries."""
# create 12 history items spaced 6 hours apart, starting from now
entry_count = 12
interval = 6 * 60 * 60
items = []
for i in range(entry_count):
entry_atime = now - i * interval
entry = {"atime": str(entry_atime),
"url": QUrl("http://www.x.com/" + str(i)),
"title": "Page " + str(i)}
items.insert(0, entry)
return items
@pytest.fixture(autouse=True)
def fake_history(self, web_history, fake_args, entries):
"""Create fake history."""
for item in entries:
web_history.add_url(**item)
@pytest.mark.parametrize("start_time_offset, expected_item_count", [
(0, 4),
(24*60*60, 4),
(48*60*60, 4),
(72*60*60, 0)
])
def test_qutehistory_data(self, start_time_offset, expected_item_count,
now):
"""Ensure qute://history/data returns correct items."""
start_time = now - start_time_offset
url = QUrl("qute://history/data?start_time=" + str(start_time))
_mimetype, data = qutescheme.qute_history(url)
items = json.loads(data)
assert len(items) == expected_item_count
# test times
end_time = start_time - 24*60*60
for item in items:
assert item['time'] <= start_time
assert item['time'] > end_time
def test_exclude(self, web_history, now, config_stub):
"""Make sure the completion.web_history.exclude setting is not used."""
config_stub.val.completion.web_history.exclude = ['www.x.com']
url = QUrl("qute://history/data?start_time={}".format(now))
_mimetype, data = qutescheme.qute_history(url)
items = json.loads(data)
assert items
def test_qute_history_benchmark(self, web_history, benchmark, now):
r = range(20000)
entries = {
'atime': [int(now - t) for t in r],
'url': ['www.x.com/{}'.format(t) for t in r],
'title': ['x at {}'.format(t) for t in r],
'redirect': [False for _ in r],
}
web_history.insert_batch(entries)
url = QUrl("qute://history/data?start_time={}".format(now))
_mimetype, data = benchmark(qutescheme.qute_history, url)
assert len(json.loads(data)) > 1
class TestHelpHandler:
"""Tests for qute://help."""
@pytest.fixture
def data_patcher(self, monkeypatch):
def _patch(path, data):
def _read_file(name, binary=False):
assert path == name
if binary:
return data
return data.decode('utf-8')
monkeypatch.setattr(qutescheme.utils, 'read_file', _read_file)
return _patch
def test_unknown_file_type(self, data_patcher):
data_patcher('html/doc/foo.bin', b'\xff')
mimetype, data = qutescheme.qute_help(QUrl('qute://help/foo.bin'))
assert mimetype == 'application/octet-stream'
assert data == b'\xff'
class TestPDFJSHandler:
"""Test the qute://pdfjs endpoint."""
@pytest.fixture(autouse=True)
def fake_pdfjs(self, monkeypatch):
def get_pdfjs_res(path):
if path == '/existing/file.html':
return b'foobar'
raise pdfjs.PDFJSNotFound(path)
monkeypatch.setattr(pdfjs, 'get_pdfjs_res', get_pdfjs_res)
@pytest.fixture
def download_tmpdir(self):
tdir = downloads.temp_download_manager.get_tmpdir()
yield py.path.local(tdir.name) # pylint: disable=no-member
tdir.cleanup()
def test_existing_resource(self):
"""Test with a resource that exists."""
_mimetype, data = qutescheme.data_for_url(
QUrl('qute://pdfjs/existing/file.html'))
assert data == b'foobar'
def test_nonexisting_resource(self, caplog):
"""Test with a resource that does not exist."""
with caplog.at_level(logging.WARNING, 'misc'):
with pytest.raises(qutescheme.NotFoundError):
qutescheme.data_for_url(QUrl('qute://pdfjs/no/file.html'))
expected = 'pdfjs resource requested but not found: /no/file.html'
assert caplog.messages == [expected]
def test_viewer_page(self, data_tmpdir):
"""Load the /web/viewer.html page."""
filename = 'foobar.pdf'
path = qutescheme._pdf_path(filename)
# Make sure that the file exists otherwise the handler will attempt to
# redirect to source (it's not necessary to make sure that it's valid
# PDF content)
with open(path, 'w', encoding='utf-8') as f:
f.write('<pdf content>')
_mimetype, data = qutescheme.data_for_url(
QUrl('qute://pdfjs/web/viewer.html?filename=' + filename))
assert b'PDF.js' in data
def test_viewer_no_filename(self):
with pytest.raises(qutescheme.UrlInvalidError,
match='Missing filename'):
qutescheme.data_for_url(QUrl('qute://pdfjs/web/viewer.html'))
def test_viewer_inexistent_file(self):
with pytest.raises(qutescheme.Redirect):
qutescheme.data_for_url(QUrl('qute://pdfjs/web/viewer.html?'
'filename=foobar&source=example.org'))
def test_viewer_inexistent_file_no_source(self):
with pytest.raises(qutescheme.UrlInvalidError,
match='Missing source'):
qutescheme.data_for_url(
QUrl('qute://pdfjs/web/viewer.html?filename=foobar'))
def test_file(self, download_tmpdir):
"""Load a file via qute://pdfjs/file."""
(download_tmpdir / 'testfile').write_binary(b'foo')
_mimetype, data = qutescheme.data_for_url(
QUrl('qute://pdfjs/file?filename=testfile'))
assert data == b'foo'
def test_file_no_filename(self):
with pytest.raises(qutescheme.UrlInvalidError):
qutescheme.data_for_url(QUrl('qute://pdfjs/file'))
@pytest.mark.parametrize('sep', ['/', os.sep])
def test_file_pathsep(self, sep):
url = QUrl('qute://pdfjs/file')
query = QUrlQuery()
query.addQueryItem('filename', 'foo{}bar'.format(sep))
url.setQuery(query)
with pytest.raises(qutescheme.RequestDeniedError):
qutescheme.data_for_url(url)
|
from typing import Any, Dict
from aiohttp import web
import voluptuous as vol
from homeassistant import config_entries, data_entry_flow
from homeassistant.components.http import HomeAssistantView
from homeassistant.components.http.data_validator import RequestDataValidator
from homeassistant.const import HTTP_BAD_REQUEST, HTTP_NOT_FOUND
import homeassistant.helpers.config_validation as cv
class _BaseFlowManagerView(HomeAssistantView):
"""Foundation for flow manager views."""
def __init__(self, flow_mgr: data_entry_flow.FlowManager) -> None:
"""Initialize the flow manager index view."""
self._flow_mgr = flow_mgr
# pylint: disable=no-self-use
def _prepare_result_json(self, result: Dict[str, Any]) -> Dict[str, Any]:
"""Convert result to JSON."""
if result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY:
data = result.copy()
data.pop("result")
data.pop("data")
return data
if result["type"] != data_entry_flow.RESULT_TYPE_FORM:
return result
import voluptuous_serialize # pylint: disable=import-outside-toplevel
data = result.copy()
schema = data["data_schema"]
if schema is None:
data["data_schema"] = []
else:
data["data_schema"] = voluptuous_serialize.convert(
schema, custom_serializer=cv.custom_serializer
)
return data
class FlowManagerIndexView(_BaseFlowManagerView):
"""View to create config flows."""
@RequestDataValidator(
vol.Schema(
{
vol.Required("handler"): vol.Any(str, list),
vol.Optional("show_advanced_options", default=False): cv.boolean,
},
extra=vol.ALLOW_EXTRA,
)
)
async def post(self, request: web.Request, data: Dict[str, Any]) -> web.Response:
"""Handle a POST request."""
if isinstance(data["handler"], list):
handler = tuple(data["handler"])
else:
handler = data["handler"]
try:
result = await self._flow_mgr.async_init(
handler, # type: ignore
context={
"source": config_entries.SOURCE_USER,
"show_advanced_options": data["show_advanced_options"],
},
)
except data_entry_flow.UnknownHandler:
return self.json_message("Invalid handler specified", HTTP_NOT_FOUND)
except data_entry_flow.UnknownStep:
return self.json_message("Handler does not support user", HTTP_BAD_REQUEST)
result = self._prepare_result_json(result)
return self.json(result)
class FlowManagerResourceView(_BaseFlowManagerView):
"""View to interact with the flow manager."""
async def get(self, request: web.Request, flow_id: str) -> web.Response:
"""Get the current state of a data_entry_flow."""
try:
result = await self._flow_mgr.async_configure(flow_id)
except data_entry_flow.UnknownFlow:
return self.json_message("Invalid flow specified", HTTP_NOT_FOUND)
result = self._prepare_result_json(result)
return self.json(result)
@RequestDataValidator(vol.Schema(dict), allow_empty=True)
async def post(
self, request: web.Request, flow_id: str, data: Dict[str, Any]
) -> web.Response:
"""Handle a POST request."""
try:
result = await self._flow_mgr.async_configure(flow_id, data)
except data_entry_flow.UnknownFlow:
return self.json_message("Invalid flow specified", HTTP_NOT_FOUND)
except vol.Invalid:
return self.json_message("User input malformed", HTTP_BAD_REQUEST)
result = self._prepare_result_json(result)
return self.json(result)
async def delete(self, request: web.Request, flow_id: str) -> web.Response:
"""Cancel a flow in progress."""
try:
self._flow_mgr.async_abort(flow_id)
except data_entry_flow.UnknownFlow:
return self.json_message("Invalid flow specified", HTTP_NOT_FOUND)
return self.json_message("Flow aborted")
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
from diamond.collector import Collector
from unbound import UnboundCollector
##########################################################################
class TestUnboundCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('UnboundCollector', {})
self.collector = UnboundCollector(config, None)
def test_import(self):
self.assertTrue(UnboundCollector)
@patch.object(Collector, 'publish')
def test_should_work_wtih_real_data(self, publish_mock):
fixture_data = self.getFixture('unbound_stats').getvalue()
collector_mock = patch.object(UnboundCollector,
'run_command',
Mock(return_value=[fixture_data, '']))
collector_mock.start()
self.collector.collect()
collector_mock.stop()
metrics = {
'thread0.num.queries': 10028,
'thread0.num.cachehits': 10021,
'thread0.num.cachemiss': 7,
'thread0.num.prefetch': 1,
'thread0.num.recursivereplies': 9,
'thread0.requestlist.avg': 1.25,
'thread0.requestlist.max': 2,
'thread0.requestlist.overwritten': 0,
'thread0.requestlist.exceeded': 0,
'thread0.requestlist.current.all': 1,
'thread0.requestlist.current.user': 1,
'thread0.recursion.time.avg': 9.914812,
'thread0.recursion.time.median': 0.08192,
'total.num.queries': 125609,
'total.num.cachehits': 125483,
'total.num.cachemiss': 126,
'total.num.prefetch': 16,
'total.num.recursivereplies': 136,
'total.requestlist.avg': 5.07746,
'total.requestlist.max': 10,
'total.requestlist.overwritten': 0,
'total.requestlist.exceeded': 0,
'total.requestlist.current.all': 23,
'total.requestlist.current.user': 23,
'total.recursion.time.avg': 13.045485,
'total.recursion.time.median': 0.06016,
'time.now': 1361926066.384873,
'time.up': 3006293.632453,
'time.elapsed': 9.981882,
'mem.total.sbrk': 26767360,
'mem.cache.rrset': 142606276,
'mem.cache.message': 71303005,
'mem.mod.iterator': 16532,
'mem.mod.validator': 1114579,
'num.query.type.A': 25596,
'num.query.type.PTR': 39,
'num.query.type.MX': 91,
'num.query.type.AAAA': 99883,
'num.query.class.IN': 125609,
'num.query.opcode.QUERY': 125609,
'num.query.tcp': 0,
'num.query.ipv6': 0,
'num.query.flags.QR': 0,
'num.query.flags.AA': 0,
'num.query.flags.TC': 0,
'num.query.flags.RD': 125609,
'num.query.flags.RA': 0,
'num.query.flags.Z': 0,
'num.query.flags.AD': 0,
'num.query.flags.CD': 62,
'num.query.edns.present': 62,
'num.query.edns.DO': 62,
'num.answer.rcode.NOERROR': 46989,
'num.answer.rcode.SERVFAIL': 55,
'num.answer.rcode.NXDOMAIN': 78575,
'num.answer.rcode.nodata': 20566,
'num.answer.secure': 0,
'num.answer.bogus': 0,
'num.rrset.bogus': 0,
'unwanted.queries': 0,
'unwanted.replies': 0,
'histogram.16s+': 0.0,
'histogram.256ms+': 3.0,
'histogram.4s+': 1.0,
'histogram.2s+': 0.0,
'histogram.1s+': 0.0,
'histogram.2ms+': 0.0,
'histogram.1ms': 39.0,
'histogram.32ms+': 18.0,
'histogram.4ms+': 0.0,
'histogram.16ms+': 10.0,
'histogram.1ms+': 5.0,
'histogram.32s+': 3.0,
'histogram.512ms+': 6.0,
'histogram.128ms+': 19.0,
'histogram.64ms+': 20.0,
'histogram.8ms+': 3.0,
'histogram.64s+': 9.0,
'histogram.8s+': 0.0,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
@patch.object(Collector, 'publish')
def test_should_fail_gracefully(self, publish_mock):
collector_mock = patch.object(UnboundCollector,
'run_command',
Mock(return_value=None))
collector_mock.start()
self.collector.collect()
collector_mock.stop()
self.assertPublishedMany(publish_mock, {})
@patch.object(Collector, 'publish')
def test_exclude_histogram(self, publish_mock):
self.collector.config['histogram'] = False
fixture_data = self.getFixture('unbound_stats').getvalue()
collector_mock = patch.object(UnboundCollector,
'run_command',
Mock(return_value=[fixture_data, '']))
collector_mock.start()
self.collector.collect()
collector_mock.stop()
metrics = {
'thread0.num.queries': 10028,
'thread0.num.cachehits': 10021,
'thread0.num.cachemiss': 7,
'thread0.num.prefetch': 1,
'thread0.num.recursivereplies': 9,
'thread0.requestlist.avg': 1.25,
'thread0.requestlist.max': 2,
'thread0.requestlist.overwritten': 0,
'thread0.requestlist.exceeded': 0,
'thread0.requestlist.current.all': 1,
'thread0.requestlist.current.user': 1,
'thread0.recursion.time.avg': 9.914812,
'thread0.recursion.time.median': 0.08192,
'total.num.queries': 125609,
'total.num.cachehits': 125483,
'total.num.cachemiss': 126,
'total.num.prefetch': 16,
'total.num.recursivereplies': 136,
'total.requestlist.avg': 5.07746,
'total.requestlist.max': 10,
'total.requestlist.overwritten': 0,
'total.requestlist.exceeded': 0,
'total.requestlist.current.all': 23,
'total.requestlist.current.user': 23,
'total.recursion.time.avg': 13.045485,
'total.recursion.time.median': 0.06016,
'time.now': 1361926066.384873,
'time.up': 3006293.632453,
'time.elapsed': 9.981882,
'mem.total.sbrk': 26767360,
'mem.cache.rrset': 142606276,
'mem.cache.message': 71303005,
'mem.mod.iterator': 16532,
'mem.mod.validator': 1114579,
'num.query.type.A': 25596,
'num.query.type.PTR': 39,
'num.query.type.MX': 91,
'num.query.type.AAAA': 99883,
'num.query.class.IN': 125609,
'num.query.opcode.QUERY': 125609,
'num.query.tcp': 0,
'num.query.ipv6': 0,
'num.query.flags.QR': 0,
'num.query.flags.AA': 0,
'num.query.flags.TC': 0,
'num.query.flags.RD': 125609,
'num.query.flags.RA': 0,
'num.query.flags.Z': 0,
'num.query.flags.AD': 0,
'num.query.flags.CD': 62,
'num.query.edns.present': 62,
'num.query.edns.DO': 62,
'num.answer.rcode.NOERROR': 46989,
'num.answer.rcode.SERVFAIL': 55,
'num.answer.rcode.NXDOMAIN': 78575,
'num.answer.rcode.nodata': 20566,
'num.answer.secure': 0,
'num.answer.bogus': 0,
'num.rrset.bogus': 0,
'unwanted.queries': 0,
'unwanted.replies': 0,
}
histogram = {
'histogram.16s+': 0.0,
'histogram.256ms+': 3.0,
'histogram.4s+': 1.0,
'histogram.2s+': 0.0,
'histogram.1s+': 0.0,
'histogram.2ms+': 0.0,
'histogram.1ms': 39.0,
'histogram.32ms+': 18.0,
'histogram.4ms+': 0.0,
'histogram.16ms+': 10.0,
'histogram.1ms+': 5.0,
'histogram.32s+': 3.0,
'histogram.512ms+': 6.0,
'histogram.128ms+': 19.0,
'histogram.64ms+': 20.0,
'histogram.8ms+': 3.0,
'histogram.64s+': 9.0,
'histogram.8s+': 0.0,
}
self.assertPublishedMany(publish_mock, metrics)
self.assertUnpublishedMany(publish_mock, histogram)
##########################################################################
if __name__ == "__main__":
unittest.main()
|
from aiohomekit.model.characteristics import CharacteristicsTypes
from aiohomekit.model.services import ServicesTypes
from homeassistant import config_entries
from homeassistant.components.homekit_controller import async_remove_entry
from homeassistant.components.homekit_controller.const import ENTITY_MAP
from tests.common import flush_store
from tests.components.homekit_controller.common import (
setup_platform,
setup_test_component,
)
async def test_load_from_storage(hass, hass_storage):
"""Test that entity map can be correctly loaded from cache."""
hkid = "00:00:00:00:00:00"
hass_storage["homekit_controller-entity-map"] = {
"version": 1,
"data": {"pairings": {hkid: {"c#": 1, "accessories": []}}},
}
await setup_platform(hass)
assert hkid in hass.data[ENTITY_MAP].storage_data
async def test_storage_is_removed(hass, hass_storage):
"""Test entity map storage removal is idempotent."""
await setup_platform(hass)
entity_map = hass.data[ENTITY_MAP]
hkid = "00:00:00:00:00:01"
entity_map.async_create_or_update_map(hkid, 1, [])
assert hkid in entity_map.storage_data
await flush_store(entity_map.store)
assert hkid in hass_storage[ENTITY_MAP]["data"]["pairings"]
entity_map.async_delete_map(hkid)
assert hkid not in hass.data[ENTITY_MAP].storage_data
await flush_store(entity_map.store)
assert hass_storage[ENTITY_MAP]["data"]["pairings"] == {}
async def test_storage_is_removed_idempotent(hass):
"""Test entity map storage removal is idempotent."""
await setup_platform(hass)
entity_map = hass.data[ENTITY_MAP]
hkid = "00:00:00:00:00:01"
assert hkid not in entity_map.storage_data
entity_map.async_delete_map(hkid)
assert hkid not in entity_map.storage_data
def create_lightbulb_service(accessory):
"""Define lightbulb characteristics."""
service = accessory.add_service(ServicesTypes.LIGHTBULB)
on_char = service.add_char(CharacteristicsTypes.ON)
on_char.value = 0
async def test_storage_is_updated_on_add(hass, hass_storage, utcnow):
"""Test entity map storage is cleaned up on adding an accessory."""
await setup_test_component(hass, create_lightbulb_service)
entity_map = hass.data[ENTITY_MAP]
hkid = "00:00:00:00:00:00"
# Is in memory store updated?
assert hkid in entity_map.storage_data
# Is saved out to store?
await flush_store(entity_map.store)
assert hkid in hass_storage[ENTITY_MAP]["data"]["pairings"]
async def test_storage_is_removed_on_config_entry_removal(hass, utcnow):
"""Test entity map storage is cleaned up on config entry removal."""
await setup_test_component(hass, create_lightbulb_service)
hkid = "00:00:00:00:00:00"
pairing_data = {"AccessoryPairingID": hkid}
entry = config_entries.ConfigEntry(
1,
"homekit_controller",
"TestData",
pairing_data,
"test",
config_entries.CONN_CLASS_LOCAL_PUSH,
system_options={},
)
assert hkid in hass.data[ENTITY_MAP].storage_data
await async_remove_entry(hass, entry)
assert hkid not in hass.data[ENTITY_MAP].storage_data
|
import os
import unittest
from perfkitbenchmarker.linux_packages import wrk2
import six
def _ReadOutputFile(file_name):
data_dir = os.path.join(os.path.dirname(__file__), '..', 'data')
result_path = os.path.join(data_dir, file_name)
with open(result_path) as results_file:
return results_file.read()
class Wrk2Test(unittest.TestCase):
def testParseSuccessfulRun(self):
wrk_output = _ReadOutputFile('wrk2_output.txt')
result = list(wrk2._ParseOutput(wrk_output))
self.assertEqual(
[('p50 latency', .05331, 'ms'),
('p75 latency', 56.99, 'ms'),
('p90 latency', 62.62, 'ms'),
('p99 latency', 223.23, 'ms'),
('p99.9 latency', 244.22, 'ms'),
('p99.99 latency', 244.22, 'ms'),
('p99.999 latency', 36000., 'ms'),
('p100 latency', 54000., 'ms'),
('requests', 600, ''),
('error_rate', 0, ''),
('errors', 0, '')], result)
def testParseAllRequestsFailed(self):
wrk_output = _ReadOutputFile('wrk2_output_all_error.txt')
with six.assertRaisesRegex(self, ValueError, 'More than 10%'):
list(wrk2._ParseOutput(wrk_output))
def testParseWithErrors(self):
wrk_output = _ReadOutputFile('wrk2_output_errors.txt')
res = list(wrk2._ParseOutput(wrk_output))
self.assertIn(('errors', 14, ''), res)
self.assertIn(('error_rate', 14. / 600, ''), res)
if __name__ == '__main__':
unittest.main()
|
import logging
import requests
from homeassistant.components.binary_sensor import BinarySensorEntity
from . import BINARY_SENSOR_TYPES, DOMAIN as COMPONENT_DOMAIN
_LOGGER = logging.getLogger(__name__)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the available OctoPrint binary sensors."""
if discovery_info is None:
return
name = discovery_info["name"]
base_url = discovery_info["base_url"]
monitored_conditions = discovery_info["sensors"]
octoprint_api = hass.data[COMPONENT_DOMAIN][base_url]
devices = []
for octo_type in monitored_conditions:
new_sensor = OctoPrintBinarySensor(
octoprint_api,
octo_type,
BINARY_SENSOR_TYPES[octo_type][2],
name,
BINARY_SENSOR_TYPES[octo_type][3],
BINARY_SENSOR_TYPES[octo_type][0],
BINARY_SENSOR_TYPES[octo_type][1],
"flags",
)
devices.append(new_sensor)
add_entities(devices, True)
class OctoPrintBinarySensor(BinarySensorEntity):
"""Representation an OctoPrint binary sensor."""
def __init__(
self, api, condition, sensor_type, sensor_name, unit, endpoint, group, tool=None
):
"""Initialize a new OctoPrint sensor."""
self.sensor_name = sensor_name
if tool is None:
self._name = f"{sensor_name} {condition}"
else:
self._name = f"{sensor_name} {condition}"
self.sensor_type = sensor_type
self.api = api
self._state = False
self._unit_of_measurement = unit
self.api_endpoint = endpoint
self.api_group = group
self.api_tool = tool
_LOGGER.debug("Created OctoPrint binary sensor %r", self)
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def is_on(self):
"""Return true if binary sensor is on."""
return bool(self._state)
@property
def device_class(self):
"""Return the class of this sensor, from DEVICE_CLASSES."""
return None
def update(self):
"""Update state of sensor."""
try:
self._state = self.api.update(
self.sensor_type, self.api_endpoint, self.api_group, self.api_tool
)
except requests.exceptions.ConnectionError:
# Error calling the api, already logged in api.update()
return
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from test import run_only
from mock import Mock
from mock import patch
from diamond.collector import Collector
from beanstalkd import BeanstalkdCollector
##########################################################################
def run_only_if_beanstalkc_is_available(func):
try:
import beanstalkc
except ImportError:
beanstalkc = None
pred = lambda: beanstalkc is not None
return run_only(func, pred)
class TestBeanstalkdCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('BeanstalkdCollector', {
'host': 'localhost',
'port': 11300,
})
self.collector = BeanstalkdCollector(config, None)
def test_import(self):
self.assertTrue(BeanstalkdCollector)
@run_only_if_beanstalkc_is_available
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
stats = {
'instance': {
'current-connections': 10,
'max-job-size': 65535,
'cmd-release': 0,
'cmd-reserve': 4386,
'pid': 23703,
'cmd-bury': 0,
'current-producers': 0,
'total-jobs': 4331,
'current-jobs-ready': 0,
'cmd-peek-buried': 0,
'current-tubes': 7,
'current-jobs-delayed': 0,
'uptime': 182954,
'cmd-watch': 55,
'job-timeouts': 0,
'cmd-stats': 1,
'rusage-stime': 295.970497,
'current-jobs-reserved': 0,
'current-jobs-buried': 0,
'cmd-reserve-with-timeout': 0,
'cmd-put': 4331,
'cmd-pause-tube': 0,
'cmd-list-tubes-watched': 0,
'cmd-list-tubes': 0,
'current-workers': 9,
'cmd-list-tube-used': 0,
'cmd-ignore': 0,
'binlog-records-migrated': 0,
'current-waiting': 9,
'cmd-peek': 0,
'cmd-peek-ready': 0,
'cmd-peek-delayed': 0,
'cmd-touch': 0,
'binlog-oldest-index': 0,
'binlog-current-index': 0,
'cmd-use': 4321,
'total-connections': 4387,
'cmd-delete': 4331,
'binlog-max-size': 10485760,
'cmd-stats-job': 0,
'rusage-utime': 125.92787,
'cmd-stats-tube': 0,
'binlog-records-written': 0,
'cmd-kick': 0,
'current-jobs-urgent': 0,
},
'tubes': [
{
'current-jobs-delayed': 0,
'pause': 0,
'name': 'default',
'cmd-pause-tube': 0,
'current-jobs-buried': 0,
'cmd-delete': 10,
'pause-time-left': 0,
'current-waiting': 9,
'current-jobs-ready': 0,
'total-jobs': 10,
'current-watching': 10,
'current-jobs-reserved': 0,
'current-using': 10,
'current-jobs-urgent': 0,
}
]
}
patch_get_stats = patch.object(BeanstalkdCollector,
'_get_stats',
Mock(return_value=stats))
patch_get_stats.start()
self.collector.collect()
patch_get_stats.stop()
metrics = {
'current-connections': 10,
'max-job-size': 65535,
'cmd-release': 0,
'cmd-reserve': 4386,
'pid': 23703,
'cmd-bury': 0,
'current-producers': 0,
'total-jobs': 4331,
'current-jobs-ready': 0,
'cmd-peek-buried': 0,
'current-tubes': 7,
'current-jobs-delayed': 0,
'uptime': 182954,
'cmd-watch': 55,
'job-timeouts': 0,
'cmd-stats': 1,
'rusage-stime': 295.970497,
'current-jobs-reserved': 0,
'current-jobs-buried': 0,
'cmd-reserve-with-timeout': 0,
'cmd-put': 4331,
'cmd-pause-tube': 0,
'cmd-list-tubes-watched': 0,
'cmd-list-tubes': 0,
'current-workers': 9,
'cmd-list-tube-used': 0,
'cmd-ignore': 0,
'binlog-records-migrated': 0,
'current-waiting': 9,
'cmd-peek': 0,
'cmd-peek-ready': 0,
'cmd-peek-delayed': 0,
'cmd-touch': 0,
'binlog-oldest-index': 0,
'binlog-current-index': 0,
'cmd-use': 4321,
'total-connections': 4387,
'cmd-delete': 4331,
'binlog-max-size': 10485760,
'cmd-stats-job': 0,
'rusage-utime': 125.92787,
'cmd-stats-tube': 0,
'binlog-records-written': 0,
'cmd-kick': 0,
'current-jobs-urgent': 0,
'tubes.default.current-jobs-delayed': 0,
'tubes.default.pause': 0,
'tubes.default.cmd-pause-tube': 0,
'tubes.default.current-jobs-buried': 0,
'tubes.default.cmd-delete': 10,
'tubes.default.pause-time-left': 0,
'tubes.default.current-waiting': 9,
'tubes.default.current-jobs-ready': 0,
'tubes.default.total-jobs': 10,
'tubes.default.current-watching': 10,
'tubes.default.current-jobs-reserved': 0,
'tubes.default.current-using': 10,
'tubes.default.current-jobs-urgent': 0,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
##########################################################################
if __name__ == "__main__":
unittest.main()
|
from homeassistant import config_entries, setup
from homeassistant.components.ring import DOMAIN
from homeassistant.components.ring.config_flow import InvalidAuth
from tests.async_mock import Mock, patch
async def test_form(hass):
"""Test we get the form."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
with patch(
"homeassistant.components.ring.config_flow.Auth",
return_value=Mock(
fetch_token=Mock(return_value={"access_token": "mock-token"})
),
), patch(
"homeassistant.components.ring.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.ring.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"username": "[email protected]", "password": "test-password"},
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "[email protected]"
assert result2["data"] == {
"username": "[email protected]",
"token": {"access_token": "mock-token"},
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_invalid_auth(hass):
"""Test we handle invalid auth."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.ring.config_flow.Auth.fetch_token",
side_effect=InvalidAuth,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"username": "[email protected]", "password": "test-password"},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "invalid_auth"}
|
from django.contrib.auth.decorators import permission_required
from django.http import Http404
from django.shortcuts import redirect, render
from django.utils.decorators import method_decorator
from django.utils.http import urlencode
from django.utils.translation import gettext as _
from django.views.generic import CreateView, UpdateView
from weblate.lang.forms import LanguageForm, PluralForm
from weblate.lang.models import Language, Plural
from weblate.trans.forms import ProjectLanguageDeleteForm, SearchForm
from weblate.trans.models import Change
from weblate.trans.models.project import prefetch_project_flags
from weblate.trans.util import sort_objects
from weblate.utils import messages
from weblate.utils.stats import (
GlobalStats,
ProjectLanguage,
ProjectLanguageStats,
prefetch_stats,
)
from weblate.utils.views import get_project, optional_form
def show_languages(request):
if request.user.has_perm("language.edit"):
languages = Language.objects.all()
else:
languages = Language.objects.have_translation()
return render(
request,
"languages.html",
{
"allow_index": True,
"languages": prefetch_stats(sort_objects(languages)),
"title": _("Languages"),
"global_stats": GlobalStats(),
},
)
def show_language(request, lang):
try:
obj = Language.objects.get(code=lang)
except Language.DoesNotExist:
obj = Language.objects.fuzzy_get(lang)
if isinstance(obj, Language):
return redirect(obj)
raise Http404("No Language matches the given query.")
if request.method == "POST" and request.user.has_perm("language.edit"):
if obj.translation_set.exists():
messages.error(
request, _("Remove all translations using this language first.")
)
else:
obj.delete()
messages.success(request, _("Language %s removed.") % obj)
return redirect("languages")
last_changes = Change.objects.last_changes(request.user).filter(language=obj)[:10]
projects = request.user.allowed_projects
dicts = projects.filter(glossary__term__language=obj).distinct()
projects = prefetch_project_flags(
prefetch_stats(projects.filter(component__translation__language=obj).distinct())
)
projects = [ProjectLanguage(project, obj) for project in projects]
ProjectLanguageStats.prefetch_many([project.stats for project in projects])
return render(
request,
"language.html",
{
"allow_index": True,
"object": obj,
"last_changes": last_changes,
"last_changes_url": urlencode({"lang": obj.code}),
"dicts": dicts,
"projects": projects,
},
)
def show_project(request, lang, project):
try:
language_object = Language.objects.get(code=lang)
except Language.DoesNotExist:
language_object = Language.objects.fuzzy_get(lang)
if isinstance(language_object, Language):
return redirect(language_object)
raise Http404("No Language matches the given query.")
project_object = get_project(request, project)
obj = ProjectLanguage(project_object, language_object)
user = request.user
last_changes = Change.objects.last_changes(user).filter(
language=language_object, project=project_object
)[:10]
return render(
request,
"language-project.html",
{
"allow_index": True,
"language": language_object,
"project": project_object,
"object": obj,
"last_changes": last_changes,
"last_changes_url": urlencode(
{"lang": language_object.code, "project": project_object.slug}
),
"translations": obj.translation_set,
"title": f"{project_object} - {language_object}",
"search_form": SearchForm(user, language=language_object),
"licenses": project_object.component_set.exclude(license="").order_by(
"license"
),
"language_stats": project_object.stats.get_single_language_stats(
language_object
),
"delete_form": optional_form(
ProjectLanguageDeleteForm, user, "translation.delete", obj, obj=obj
),
},
)
@method_decorator(permission_required("language.add"), name="dispatch")
class CreateLanguageView(CreateView):
template_name = "lang/create.html"
def get_form(self, form_class=None):
kwargs = self.get_form_kwargs()
return (LanguageForm(**kwargs), PluralForm(**kwargs))
def post(self, request, *args, **kwargs):
self.object = None
forms = self.get_form()
if all(form.is_valid() for form in forms):
return self.form_valid(forms)
return self.form_invalid(forms)
def form_valid(self, form):
"""If the form is valid, save the associated model."""
self.object = form[0].save()
plural = form[1].instance
plural.language = self.object
plural.save()
return redirect(self.object)
@method_decorator(permission_required("language.edit"), name="dispatch")
class EditLanguageView(UpdateView):
form_class = LanguageForm
model = Language
@method_decorator(permission_required("language.edit"), name="dispatch")
class EditPluralView(UpdateView):
form_class = PluralForm
model = Plural
|
import calendar
from datetime import datetime
import sys
import cherrypy
from cherrypy.lib import sessions
page = """
<html>
<head>
<style type='text/css'>
table { border-collapse: collapse; border: 1px solid #663333; }
th { text-align: right; background-color: #663333; color: white; padding: 0.5em; }
td { white-space: pre-wrap; font-family: monospace; padding: 0.5em;
border: 1px solid #663333; }
.warn { font-family: serif; color: #990000; }
</style>
<script type="text/javascript">
<!--
function twodigit(d) { return d < 10 ? "0" + d : d; }
function formattime(t) {
var month = t.getUTCMonth() + 1;
var day = t.getUTCDate();
var year = t.getUTCFullYear();
var hours = t.getUTCHours();
var minutes = t.getUTCMinutes();
return (year + "/" + twodigit(month) + "/" + twodigit(day) + " " +
hours + ":" + twodigit(minutes) + " UTC");
}
function interval(s) {
// Return the given interval (in seconds) as an English phrase
var seconds = s %% 60;
s = Math.floor(s / 60);
var minutes = s %% 60;
s = Math.floor(s / 60);
var hours = s %% 24;
var v = twodigit(hours) + ":" + twodigit(minutes) + ":" + twodigit(seconds);
var days = Math.floor(s / 24);
if (days != 0) v = days + ' days, ' + v;
return v;
}
var fudge_seconds = 5;
function init() {
// Set the content of the 'btime' cell.
var currentTime = new Date();
var bunixtime = Math.floor(currentTime.getTime() / 1000);
var v = formattime(currentTime);
v += " (Unix time: " + bunixtime + ")";
var diff = Math.abs(%(serverunixtime)s - bunixtime);
if (diff > fudge_seconds) v += "<p class='warn'>Browser and Server times disagree.</p>";
document.getElementById('btime').innerHTML = v;
// Warn if response cookie expires is not close to one hour in the future.
// Yes, we want this to happen when wit hit the 'Expire' link, too.
var expires = Date.parse("%(expires)s") / 1000;
var onehour = (60 * 60);
if (Math.abs(expires - (bunixtime + onehour)) > fudge_seconds) {
diff = Math.floor(expires - bunixtime);
if (expires > (bunixtime + onehour)) {
var msg = "Response cookie 'expires' date is " + interval(diff) + " in the future.";
} else {
var msg = "Response cookie 'expires' date is " + interval(0 - diff) + " in the past.";
}
document.getElementById('respcookiewarn').innerHTML = msg;
}
}
//-->
</script>
</head>
<body onload='init()'>
<h2>Session Demo</h2>
<p>Reload this page. The session ID should not change from one reload to the next</p>
<p><a href='../'>Index</a> | <a href='expire'>Expire</a> | <a href='regen'>Regenerate</a></p>
<table>
<tr><th>Session ID:</th><td>%(sessionid)s<p class='warn'>%(changemsg)s</p></td></tr>
<tr><th>Request Cookie</th><td>%(reqcookie)s</td></tr>
<tr><th>Response Cookie</th><td>%(respcookie)s<p id='respcookiewarn' class='warn'></p></td></tr>
<tr><th>Session Data</th><td>%(sessiondata)s</td></tr>
<tr><th>Server Time</th><td id='stime'>%(servertime)s (Unix time: %(serverunixtime)s)</td></tr>
<tr><th>Browser Time</th><td id='btime'> </td></tr>
<tr><th>Cherrypy Version:</th><td>%(cpversion)s</td></tr>
<tr><th>Python Version:</th><td>%(pyversion)s</td></tr>
</table>
</body></html>
""" # noqa E501
class Root(object):
def page(self):
changemsg = []
if cherrypy.session.id != cherrypy.session.originalid:
if cherrypy.session.originalid is None:
changemsg.append(
'Created new session because no session id was given.')
if cherrypy.session.missing:
changemsg.append(
'Created new session due to missing '
'(expired or malicious) session.')
if cherrypy.session.regenerated:
changemsg.append('Application generated a new session.')
try:
expires = cherrypy.response.cookie['session_id']['expires']
except KeyError:
expires = ''
return page % {
'sessionid': cherrypy.session.id,
'changemsg': '<br>'.join(changemsg),
'respcookie': cherrypy.response.cookie.output(),
'reqcookie': cherrypy.request.cookie.output(),
'sessiondata': list(cherrypy.session.items()),
'servertime': (
datetime.utcnow().strftime('%Y/%m/%d %H:%M') + ' UTC'
),
'serverunixtime': calendar.timegm(datetime.utcnow().timetuple()),
'cpversion': cherrypy.__version__,
'pyversion': sys.version,
'expires': expires,
}
@cherrypy.expose
def index(self):
# Must modify data or the session will not be saved.
cherrypy.session['color'] = 'green'
return self.page()
@cherrypy.expose
def expire(self):
sessions.expire()
return self.page()
@cherrypy.expose
def regen(self):
cherrypy.session.regenerate()
# Must modify data or the session will not be saved.
cherrypy.session['color'] = 'yellow'
return self.page()
if __name__ == '__main__':
cherrypy.config.update({
# 'environment': 'production',
'log.screen': True,
'tools.sessions.on': True,
})
cherrypy.quickstart(Root())
|
import colorsys
import math
from typing import List, Optional, Tuple
import attr
# Official CSS3 colors from w3.org:
# https://www.w3.org/TR/2010/PR-css3-color-20101028/#html4
# names do not have spaces in them so that we can compare against
# requests more easily (by removing spaces from the requests as well).
# This lets "dark seagreen" and "dark sea green" both match the same
# color "darkseagreen".
COLORS = {
"aliceblue": (240, 248, 255),
"antiquewhite": (250, 235, 215),
"aqua": (0, 255, 255),
"aquamarine": (127, 255, 212),
"azure": (240, 255, 255),
"beige": (245, 245, 220),
"bisque": (255, 228, 196),
"black": (0, 0, 0),
"blanchedalmond": (255, 235, 205),
"blue": (0, 0, 255),
"blueviolet": (138, 43, 226),
"brown": (165, 42, 42),
"burlywood": (222, 184, 135),
"cadetblue": (95, 158, 160),
"chartreuse": (127, 255, 0),
"chocolate": (210, 105, 30),
"coral": (255, 127, 80),
"cornflowerblue": (100, 149, 237),
"cornsilk": (255, 248, 220),
"crimson": (220, 20, 60),
"cyan": (0, 255, 255),
"darkblue": (0, 0, 139),
"darkcyan": (0, 139, 139),
"darkgoldenrod": (184, 134, 11),
"darkgray": (169, 169, 169),
"darkgreen": (0, 100, 0),
"darkgrey": (169, 169, 169),
"darkkhaki": (189, 183, 107),
"darkmagenta": (139, 0, 139),
"darkolivegreen": (85, 107, 47),
"darkorange": (255, 140, 0),
"darkorchid": (153, 50, 204),
"darkred": (139, 0, 0),
"darksalmon": (233, 150, 122),
"darkseagreen": (143, 188, 143),
"darkslateblue": (72, 61, 139),
"darkslategray": (47, 79, 79),
"darkslategrey": (47, 79, 79),
"darkturquoise": (0, 206, 209),
"darkviolet": (148, 0, 211),
"deeppink": (255, 20, 147),
"deepskyblue": (0, 191, 255),
"dimgray": (105, 105, 105),
"dimgrey": (105, 105, 105),
"dodgerblue": (30, 144, 255),
"firebrick": (178, 34, 34),
"floralwhite": (255, 250, 240),
"forestgreen": (34, 139, 34),
"fuchsia": (255, 0, 255),
"gainsboro": (220, 220, 220),
"ghostwhite": (248, 248, 255),
"gold": (255, 215, 0),
"goldenrod": (218, 165, 32),
"gray": (128, 128, 128),
"green": (0, 128, 0),
"greenyellow": (173, 255, 47),
"grey": (128, 128, 128),
"honeydew": (240, 255, 240),
"hotpink": (255, 105, 180),
"indianred": (205, 92, 92),
"indigo": (75, 0, 130),
"ivory": (255, 255, 240),
"khaki": (240, 230, 140),
"lavender": (230, 230, 250),
"lavenderblush": (255, 240, 245),
"lawngreen": (124, 252, 0),
"lemonchiffon": (255, 250, 205),
"lightblue": (173, 216, 230),
"lightcoral": (240, 128, 128),
"lightcyan": (224, 255, 255),
"lightgoldenrodyellow": (250, 250, 210),
"lightgray": (211, 211, 211),
"lightgreen": (144, 238, 144),
"lightgrey": (211, 211, 211),
"lightpink": (255, 182, 193),
"lightsalmon": (255, 160, 122),
"lightseagreen": (32, 178, 170),
"lightskyblue": (135, 206, 250),
"lightslategray": (119, 136, 153),
"lightslategrey": (119, 136, 153),
"lightsteelblue": (176, 196, 222),
"lightyellow": (255, 255, 224),
"lime": (0, 255, 0),
"limegreen": (50, 205, 50),
"linen": (250, 240, 230),
"magenta": (255, 0, 255),
"maroon": (128, 0, 0),
"mediumaquamarine": (102, 205, 170),
"mediumblue": (0, 0, 205),
"mediumorchid": (186, 85, 211),
"mediumpurple": (147, 112, 219),
"mediumseagreen": (60, 179, 113),
"mediumslateblue": (123, 104, 238),
"mediumspringgreen": (0, 250, 154),
"mediumturquoise": (72, 209, 204),
"mediumvioletred": (199, 21, 133),
"midnightblue": (25, 25, 112),
"mintcream": (245, 255, 250),
"mistyrose": (255, 228, 225),
"moccasin": (255, 228, 181),
"navajowhite": (255, 222, 173),
"navy": (0, 0, 128),
"navyblue": (0, 0, 128),
"oldlace": (253, 245, 230),
"olive": (128, 128, 0),
"olivedrab": (107, 142, 35),
"orange": (255, 165, 0),
"orangered": (255, 69, 0),
"orchid": (218, 112, 214),
"palegoldenrod": (238, 232, 170),
"palegreen": (152, 251, 152),
"paleturquoise": (175, 238, 238),
"palevioletred": (219, 112, 147),
"papayawhip": (255, 239, 213),
"peachpuff": (255, 218, 185),
"peru": (205, 133, 63),
"pink": (255, 192, 203),
"plum": (221, 160, 221),
"powderblue": (176, 224, 230),
"purple": (128, 0, 128),
"red": (255, 0, 0),
"rosybrown": (188, 143, 143),
"royalblue": (65, 105, 225),
"saddlebrown": (139, 69, 19),
"salmon": (250, 128, 114),
"sandybrown": (244, 164, 96),
"seagreen": (46, 139, 87),
"seashell": (255, 245, 238),
"sienna": (160, 82, 45),
"silver": (192, 192, 192),
"skyblue": (135, 206, 235),
"slateblue": (106, 90, 205),
"slategray": (112, 128, 144),
"slategrey": (112, 128, 144),
"snow": (255, 250, 250),
"springgreen": (0, 255, 127),
"steelblue": (70, 130, 180),
"tan": (210, 180, 140),
"teal": (0, 128, 128),
"thistle": (216, 191, 216),
"tomato": (255, 99, 71),
"turquoise": (64, 224, 208),
"violet": (238, 130, 238),
"wheat": (245, 222, 179),
"white": (255, 255, 255),
"whitesmoke": (245, 245, 245),
"yellow": (255, 255, 0),
"yellowgreen": (154, 205, 50),
}
@attr.s()
class XYPoint:
"""Represents a CIE 1931 XY coordinate pair."""
x: float = attr.ib() # pylint: disable=invalid-name
y: float = attr.ib() # pylint: disable=invalid-name
@attr.s()
class GamutType:
"""Represents the Gamut of a light."""
# ColorGamut = gamut(xypoint(xR,yR),xypoint(xG,yG),xypoint(xB,yB))
red: XYPoint = attr.ib()
green: XYPoint = attr.ib()
blue: XYPoint = attr.ib()
def color_name_to_rgb(color_name: str) -> Tuple[int, int, int]:
"""Convert color name to RGB hex value."""
# COLORS map has no spaces in it, so make the color_name have no
# spaces in it as well for matching purposes
hex_value = COLORS.get(color_name.replace(" ", "").lower())
if not hex_value:
raise ValueError("Unknown color")
return hex_value
# pylint: disable=invalid-name
def color_RGB_to_xy(
iR: int, iG: int, iB: int, Gamut: Optional[GamutType] = None
) -> Tuple[float, float]:
"""Convert from RGB color to XY color."""
return color_RGB_to_xy_brightness(iR, iG, iB, Gamut)[:2]
# Taken from:
# http://www.developers.meethue.com/documentation/color-conversions-rgb-xy
# License: Code is given as is. Use at your own risk and discretion.
# pylint: disable=invalid-name
def color_RGB_to_xy_brightness(
iR: int, iG: int, iB: int, Gamut: Optional[GamutType] = None
) -> Tuple[float, float, int]:
"""Convert from RGB color to XY color."""
if iR + iG + iB == 0:
return 0.0, 0.0, 0
R = iR / 255
B = iB / 255
G = iG / 255
# Gamma correction
R = pow((R + 0.055) / (1.0 + 0.055), 2.4) if (R > 0.04045) else (R / 12.92)
G = pow((G + 0.055) / (1.0 + 0.055), 2.4) if (G > 0.04045) else (G / 12.92)
B = pow((B + 0.055) / (1.0 + 0.055), 2.4) if (B > 0.04045) else (B / 12.92)
# Wide RGB D65 conversion formula
X = R * 0.664511 + G * 0.154324 + B * 0.162028
Y = R * 0.283881 + G * 0.668433 + B * 0.047685
Z = R * 0.000088 + G * 0.072310 + B * 0.986039
# Convert XYZ to xy
x = X / (X + Y + Z)
y = Y / (X + Y + Z)
# Brightness
Y = 1 if Y > 1 else Y
brightness = round(Y * 255)
# Check if the given xy value is within the color-reach of the lamp.
if Gamut:
in_reach = check_point_in_lamps_reach((x, y), Gamut)
if not in_reach:
xy_closest = get_closest_point_to_point((x, y), Gamut)
x = xy_closest[0]
y = xy_closest[1]
return round(x, 3), round(y, 3), brightness
def color_xy_to_RGB(
vX: float, vY: float, Gamut: Optional[GamutType] = None
) -> Tuple[int, int, int]:
"""Convert from XY to a normalized RGB."""
return color_xy_brightness_to_RGB(vX, vY, 255, Gamut)
# Converted to Python from Obj-C, original source from:
# http://www.developers.meethue.com/documentation/color-conversions-rgb-xy
def color_xy_brightness_to_RGB(
vX: float, vY: float, ibrightness: int, Gamut: Optional[GamutType] = None
) -> Tuple[int, int, int]:
"""Convert from XYZ to RGB."""
if Gamut:
if not check_point_in_lamps_reach((vX, vY), Gamut):
xy_closest = get_closest_point_to_point((vX, vY), Gamut)
vX = xy_closest[0]
vY = xy_closest[1]
brightness = ibrightness / 255.0
if brightness == 0.0:
return (0, 0, 0)
Y = brightness
if vY == 0.0:
vY += 0.00000000001
X = (Y / vY) * vX
Z = (Y / vY) * (1 - vX - vY)
# Convert to RGB using Wide RGB D65 conversion.
r = X * 1.656492 - Y * 0.354851 - Z * 0.255038
g = -X * 0.707196 + Y * 1.655397 + Z * 0.036152
b = X * 0.051713 - Y * 0.121364 + Z * 1.011530
# Apply reverse gamma correction.
r, g, b = map(
lambda x: (12.92 * x)
if (x <= 0.0031308)
else ((1.0 + 0.055) * pow(x, (1.0 / 2.4)) - 0.055),
[r, g, b],
)
# Bring all negative components to zero.
r, g, b = map(lambda x: max(0, x), [r, g, b])
# If one component is greater than 1, weight components by that value.
max_component = max(r, g, b)
if max_component > 1:
r, g, b = map(lambda x: x / max_component, [r, g, b])
ir, ig, ib = map(lambda x: int(x * 255), [r, g, b])
return (ir, ig, ib)
def color_hsb_to_RGB(fH: float, fS: float, fB: float) -> Tuple[int, int, int]:
"""Convert a hsb into its rgb representation."""
if fS == 0.0:
fV = int(fB * 255)
return fV, fV, fV
r = g = b = 0
h = fH / 60
f = h - float(math.floor(h))
p = fB * (1 - fS)
q = fB * (1 - fS * f)
t = fB * (1 - (fS * (1 - f)))
if int(h) == 0:
r = int(fB * 255)
g = int(t * 255)
b = int(p * 255)
elif int(h) == 1:
r = int(q * 255)
g = int(fB * 255)
b = int(p * 255)
elif int(h) == 2:
r = int(p * 255)
g = int(fB * 255)
b = int(t * 255)
elif int(h) == 3:
r = int(p * 255)
g = int(q * 255)
b = int(fB * 255)
elif int(h) == 4:
r = int(t * 255)
g = int(p * 255)
b = int(fB * 255)
elif int(h) == 5:
r = int(fB * 255)
g = int(p * 255)
b = int(q * 255)
return (r, g, b)
def color_RGB_to_hsv(iR: float, iG: float, iB: float) -> Tuple[float, float, float]:
"""Convert an rgb color to its hsv representation.
Hue is scaled 0-360
Sat is scaled 0-100
Val is scaled 0-100
"""
fHSV = colorsys.rgb_to_hsv(iR / 255.0, iG / 255.0, iB / 255.0)
return round(fHSV[0] * 360, 3), round(fHSV[1] * 100, 3), round(fHSV[2] * 100, 3)
def color_RGB_to_hs(iR: float, iG: float, iB: float) -> Tuple[float, float]:
"""Convert an rgb color to its hs representation."""
return color_RGB_to_hsv(iR, iG, iB)[:2]
def color_hsv_to_RGB(iH: float, iS: float, iV: float) -> Tuple[int, int, int]:
"""Convert an hsv color into its rgb representation.
Hue is scaled 0-360
Sat is scaled 0-100
Val is scaled 0-100
"""
fRGB = colorsys.hsv_to_rgb(iH / 360, iS / 100, iV / 100)
return (int(fRGB[0] * 255), int(fRGB[1] * 255), int(fRGB[2] * 255))
def color_hs_to_RGB(iH: float, iS: float) -> Tuple[int, int, int]:
"""Convert an hsv color into its rgb representation."""
return color_hsv_to_RGB(iH, iS, 100)
def color_xy_to_hs(
vX: float, vY: float, Gamut: Optional[GamutType] = None
) -> Tuple[float, float]:
"""Convert an xy color to its hs representation."""
h, s, _ = color_RGB_to_hsv(*color_xy_to_RGB(vX, vY, Gamut))
return h, s
def color_hs_to_xy(
iH: float, iS: float, Gamut: Optional[GamutType] = None
) -> Tuple[float, float]:
"""Convert an hs color to its xy representation."""
return color_RGB_to_xy(*color_hs_to_RGB(iH, iS), Gamut)
def _match_max_scale(input_colors: Tuple, output_colors: Tuple) -> Tuple:
"""Match the maximum value of the output to the input."""
max_in = max(input_colors)
max_out = max(output_colors)
if max_out == 0:
factor = 0.0
else:
factor = max_in / max_out
return tuple(int(round(i * factor)) for i in output_colors)
def color_rgb_to_rgbw(r: int, g: int, b: int) -> Tuple[int, int, int, int]:
"""Convert an rgb color to an rgbw representation."""
# Calculate the white channel as the minimum of input rgb channels.
# Subtract the white portion from the remaining rgb channels.
w = min(r, g, b)
rgbw = (r - w, g - w, b - w, w)
# Match the output maximum value to the input. This ensures the full
# channel range is used.
return _match_max_scale((r, g, b), rgbw) # type: ignore
def color_rgbw_to_rgb(r: int, g: int, b: int, w: int) -> Tuple[int, int, int]:
"""Convert an rgbw color to an rgb representation."""
# Add the white channel back into the rgb channels.
rgb = (r + w, g + w, b + w)
# Match the output maximum value to the input. This ensures the
# output doesn't overflow.
return _match_max_scale((r, g, b, w), rgb) # type: ignore
def color_rgb_to_hex(r: int, g: int, b: int) -> str:
"""Return a RGB color from a hex color string."""
return "{:02x}{:02x}{:02x}".format(round(r), round(g), round(b))
def rgb_hex_to_rgb_list(hex_string: str) -> List[int]:
"""Return an RGB color value list from a hex color string."""
return [
int(hex_string[i : i + len(hex_string) // 3], 16)
for i in range(0, len(hex_string), len(hex_string) // 3)
]
def color_temperature_to_hs(color_temperature_kelvin: float) -> Tuple[float, float]:
"""Return an hs color from a color temperature in Kelvin."""
return color_RGB_to_hs(*color_temperature_to_rgb(color_temperature_kelvin))
def color_temperature_to_rgb(
color_temperature_kelvin: float,
) -> Tuple[float, float, float]:
"""
Return an RGB color from a color temperature in Kelvin.
This is a rough approximation based on the formula provided by T. Helland
http://www.tannerhelland.com/4435/convert-temperature-rgb-algorithm-code/
"""
# range check
if color_temperature_kelvin < 1000:
color_temperature_kelvin = 1000
elif color_temperature_kelvin > 40000:
color_temperature_kelvin = 40000
tmp_internal = color_temperature_kelvin / 100.0
red = _get_red(tmp_internal)
green = _get_green(tmp_internal)
blue = _get_blue(tmp_internal)
return red, green, blue
def _bound(color_component: float, minimum: float = 0, maximum: float = 255) -> float:
"""
Bound the given color component value between the given min and max values.
The minimum and maximum values will be included in the valid output.
i.e. Given a color_component of 0 and a minimum of 10, the returned value
will be 10.
"""
color_component_out = max(color_component, minimum)
return min(color_component_out, maximum)
def _get_red(temperature: float) -> float:
"""Get the red component of the temperature in RGB space."""
if temperature <= 66:
return 255
tmp_red = 329.698727446 * math.pow(temperature - 60, -0.1332047592)
return _bound(tmp_red)
def _get_green(temperature: float) -> float:
"""Get the green component of the given color temp in RGB space."""
if temperature <= 66:
green = 99.4708025861 * math.log(temperature) - 161.1195681661
else:
green = 288.1221695283 * math.pow(temperature - 60, -0.0755148492)
return _bound(green)
def _get_blue(temperature: float) -> float:
"""Get the blue component of the given color temperature in RGB space."""
if temperature >= 66:
return 255
if temperature <= 19:
return 0
blue = 138.5177312231 * math.log(temperature - 10) - 305.0447927307
return _bound(blue)
def color_temperature_mired_to_kelvin(mired_temperature: float) -> float:
"""Convert absolute mired shift to degrees kelvin."""
return math.floor(1000000 / mired_temperature)
def color_temperature_kelvin_to_mired(kelvin_temperature: float) -> float:
"""Convert degrees kelvin to mired shift."""
return math.floor(1000000 / kelvin_temperature)
# The following 5 functions are adapted from rgbxy provided by Benjamin Knight
# License: The MIT License (MIT), 2014.
# https://github.com/benknight/hue-python-rgb-converter
def cross_product(p1: XYPoint, p2: XYPoint) -> float:
"""Calculate the cross product of two XYPoints."""
return float(p1.x * p2.y - p1.y * p2.x)
def get_distance_between_two_points(one: XYPoint, two: XYPoint) -> float:
"""Calculate the distance between two XYPoints."""
dx = one.x - two.x
dy = one.y - two.y
return math.sqrt(dx * dx + dy * dy)
def get_closest_point_to_line(A: XYPoint, B: XYPoint, P: XYPoint) -> XYPoint:
"""
Find the closest point from P to a line defined by A and B.
This point will be reproducible by the lamp
as it is on the edge of the gamut.
"""
AP = XYPoint(P.x - A.x, P.y - A.y)
AB = XYPoint(B.x - A.x, B.y - A.y)
ab2 = AB.x * AB.x + AB.y * AB.y
ap_ab = AP.x * AB.x + AP.y * AB.y
t = ap_ab / ab2
if t < 0.0:
t = 0.0
elif t > 1.0:
t = 1.0
return XYPoint(A.x + AB.x * t, A.y + AB.y * t)
def get_closest_point_to_point(
xy_tuple: Tuple[float, float], Gamut: GamutType
) -> Tuple[float, float]:
"""
Get the closest matching color within the gamut of the light.
Should only be used if the supplied color is outside of the color gamut.
"""
xy_point = XYPoint(xy_tuple[0], xy_tuple[1])
# find the closest point on each line in the CIE 1931 'triangle'.
pAB = get_closest_point_to_line(Gamut.red, Gamut.green, xy_point)
pAC = get_closest_point_to_line(Gamut.blue, Gamut.red, xy_point)
pBC = get_closest_point_to_line(Gamut.green, Gamut.blue, xy_point)
# Get the distances per point and see which point is closer to our Point.
dAB = get_distance_between_two_points(xy_point, pAB)
dAC = get_distance_between_two_points(xy_point, pAC)
dBC = get_distance_between_two_points(xy_point, pBC)
lowest = dAB
closest_point = pAB
if dAC < lowest:
lowest = dAC
closest_point = pAC
if dBC < lowest:
lowest = dBC
closest_point = pBC
# Change the xy value to a value which is within the reach of the lamp.
cx = closest_point.x
cy = closest_point.y
return (cx, cy)
def check_point_in_lamps_reach(p: Tuple[float, float], Gamut: GamutType) -> bool:
"""Check if the provided XYPoint can be recreated by a Hue lamp."""
v1 = XYPoint(Gamut.green.x - Gamut.red.x, Gamut.green.y - Gamut.red.y)
v2 = XYPoint(Gamut.blue.x - Gamut.red.x, Gamut.blue.y - Gamut.red.y)
q = XYPoint(p[0] - Gamut.red.x, p[1] - Gamut.red.y)
s = cross_product(q, v2) / cross_product(v1, v2)
t = cross_product(v1, q) / cross_product(v1, v2)
return (s >= 0.0) and (t >= 0.0) and (s + t <= 1.0)
def check_valid_gamut(Gamut: GamutType) -> bool:
"""Check if the supplied gamut is valid."""
# Check if the three points of the supplied gamut are not on the same line.
v1 = XYPoint(Gamut.green.x - Gamut.red.x, Gamut.green.y - Gamut.red.y)
v2 = XYPoint(Gamut.blue.x - Gamut.red.x, Gamut.blue.y - Gamut.red.y)
not_on_line = cross_product(v1, v2) > 0.0001
# Check if all six coordinates of the gamut lie between 0 and 1.
red_valid = (
Gamut.red.x >= 0 and Gamut.red.x <= 1 and Gamut.red.y >= 0 and Gamut.red.y <= 1
)
green_valid = (
Gamut.green.x >= 0
and Gamut.green.x <= 1
and Gamut.green.y >= 0
and Gamut.green.y <= 1
)
blue_valid = (
Gamut.blue.x >= 0
and Gamut.blue.x <= 1
and Gamut.blue.y >= 0
and Gamut.blue.y <= 1
)
return not_on_line and red_valid and green_valid and blue_valid
|
from base64 import b64decode
from homeassistant.components.broadlink.const import DOMAIN, REMOTE_DOMAIN
from homeassistant.components.remote import (
SERVICE_SEND_COMMAND,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
)
from homeassistant.const import STATE_OFF, STATE_ON
from homeassistant.helpers.entity_registry import async_entries_for_device
from . import get_device
from tests.async_mock import call
from tests.common import mock_device_registry, mock_registry
REMOTE_DEVICES = ["Entrance", "Living Room", "Office", "Garage"]
IR_PACKET = (
"JgBGAJKVETkRORA6ERQRFBEUERQRFBE5ETkQOhAVEBUQFREUEBUQ"
"OhEUERQRORE5EBURFBA6EBUQOhE5EBUQFRA6EDoRFBEADQUAAA=="
)
async def test_remote_setup_works(hass):
"""Test a successful setup with all remotes."""
for device in map(get_device, REMOTE_DEVICES):
device_registry = mock_device_registry(hass)
entity_registry = mock_registry(hass)
mock_api, mock_entry = await device.setup_entry(hass)
device_entry = device_registry.async_get_device(
{(DOMAIN, mock_entry.unique_id)}, set()
)
entries = async_entries_for_device(entity_registry, device_entry.id)
remotes = {entry for entry in entries if entry.domain == REMOTE_DOMAIN}
assert len(remotes) == 1
remote = remotes.pop()
assert remote.original_name == f"{device.name} Remote"
assert hass.states.get(remote.entity_id).state == STATE_ON
assert mock_api.auth.call_count == 1
async def test_remote_send_command(hass):
"""Test sending a command with all remotes."""
for device in map(get_device, REMOTE_DEVICES):
device_registry = mock_device_registry(hass)
entity_registry = mock_registry(hass)
mock_api, mock_entry = await device.setup_entry(hass)
device_entry = device_registry.async_get_device(
{(DOMAIN, mock_entry.unique_id)}, set()
)
entries = async_entries_for_device(entity_registry, device_entry.id)
remotes = {entry for entry in entries if entry.domain == REMOTE_DOMAIN}
assert len(remotes) == 1
remote = remotes.pop()
await hass.services.async_call(
REMOTE_DOMAIN,
SERVICE_SEND_COMMAND,
{"entity_id": remote.entity_id, "command": "b64:" + IR_PACKET},
blocking=True,
)
assert mock_api.send_data.call_count == 1
assert mock_api.send_data.call_args == call(b64decode(IR_PACKET))
assert mock_api.auth.call_count == 1
async def test_remote_turn_off_turn_on(hass):
"""Test we do not send commands if the remotes are off."""
for device in map(get_device, REMOTE_DEVICES):
device_registry = mock_device_registry(hass)
entity_registry = mock_registry(hass)
mock_api, mock_entry = await device.setup_entry(hass)
device_entry = device_registry.async_get_device(
{(DOMAIN, mock_entry.unique_id)}, set()
)
entries = async_entries_for_device(entity_registry, device_entry.id)
remotes = {entry for entry in entries if entry.domain == REMOTE_DOMAIN}
assert len(remotes) == 1
remote = remotes.pop()
await hass.services.async_call(
REMOTE_DOMAIN,
SERVICE_TURN_OFF,
{"entity_id": remote.entity_id},
blocking=True,
)
assert hass.states.get(remote.entity_id).state == STATE_OFF
await hass.services.async_call(
REMOTE_DOMAIN,
SERVICE_SEND_COMMAND,
{"entity_id": remote.entity_id, "command": "b64:" + IR_PACKET},
blocking=True,
)
assert mock_api.send_data.call_count == 0
await hass.services.async_call(
REMOTE_DOMAIN,
SERVICE_TURN_ON,
{"entity_id": remote.entity_id},
blocking=True,
)
assert hass.states.get(remote.entity_id).state == STATE_ON
await hass.services.async_call(
REMOTE_DOMAIN,
SERVICE_SEND_COMMAND,
{"entity_id": remote.entity_id, "command": "b64:" + IR_PACKET},
blocking=True,
)
assert mock_api.send_data.call_count == 1
assert mock_api.send_data.call_args == call(b64decode(IR_PACKET))
assert mock_api.auth.call_count == 1
|
import abc
from absl import flags
from perfkitbenchmarker import resource
flags.DEFINE_string('tpu_cidr_range', None, """CIDR Range for the TPU. The IP
range that the TPU will select an IP address from. Must be
in CIDR notation and a /29 range, for example
192.168.0.0/29. Errors will occur if the CIDR range has
already been used for a currently existing TPU, the CIDR
range conflicts with any networks in the user's provided
network, or the provided network is peered with another
network that is using that CIDR range.""")
flags.DEFINE_string('tpu_accelerator_type', 'tpu-v2',
'TPU accelerator type for the TPU.')
flags.DEFINE_string('tpu_description', None,
'Specifies a text description of the TPU.')
flags.DEFINE_string('tpu_network', None,
'Specifies the network that this TPU will be a part of.')
flags.DEFINE_string('tpu_tf_version', None,
'TensorFlow version for the TPU.')
flags.DEFINE_string('tpu_zone', None,
'The zone of the tpu to create. Zone in which TPU lives.')
flags.DEFINE_string('tpu_name', None,
'The name of the TPU to create.')
flags.DEFINE_boolean('tpu_preemptible', False,
'Use preemptible TPU or not.')
flags.DEFINE_integer('tpu_cores_per_donut', 8,
'The number of cores per TPU donut. This is 8 because each'
' TPU has 4 chips each with 2 cores.')
FLAGS = flags.FLAGS
def GetTpuClass(cloud):
"""Gets the TPU class corresponding to 'cloud'.
Args:
cloud: String. name of cloud to get the class for.
Returns:
Implementation class corresponding to the argument cloud
Raises:
Exception: An invalid TPU was provided
"""
return resource.GetResourceClass(BaseTpu, CLOUD=cloud)
class BaseTpu(resource.BaseResource):
"""Object representing a TPU."""
RESOURCE_TYPE = 'BaseTpu'
def __init__(self, tpu_spec):
"""Initialize the TPU object.
Args:
tpu_spec: spec of the TPU.
"""
super(BaseTpu, self).__init__()
self.spec = tpu_spec
def _Create(self):
"""Creates the TPU."""
raise NotImplementedError()
def _Delete(self):
"""Deletes the TPU."""
raise NotImplementedError()
@abc.abstractmethod
def GetName(self):
raise NotImplementedError()
@abc.abstractmethod
def GetMasterGrpcAddress(self):
"""Gets the master grpc address of the TPU."""
raise NotImplementedError()
@abc.abstractmethod
def GetNumShards(self):
"""Gets the number of TPU shards."""
raise NotImplementedError()
@abc.abstractmethod
def GetZone(self):
"""Gets the TPU zone."""
raise NotImplementedError()
@abc.abstractmethod
def GetAcceleratorType(self):
"""Gets the TPU accelerator type."""
raise NotImplementedError()
def GetResourceMetadata(self):
"""Returns a dictionary of cluster metadata."""
metadata = {
'cidr_range': self.spec.tpu_cidr_range,
'accelerator_type': self.spec.tpu_accelerator_type,
'description': self.spec.tpu_description,
'network': self.spec.tpu_network,
'tf_version': self.spec.tpu_tf_version,
'zone': self.spec.tpu_zone,
'name': self.spec.tpu_name,
'preemptible': self.spec.tpu_preemptible
}
return metadata
|
from __future__ import print_function
import argparse
import os
import sys
def main(args):
p = argparse.ArgumentParser(description=__doc__)
p.add_argument("-p", "--parents", action="store_true", help="create parent directories as necessary")
p.add_argument("dir", action="store", nargs="+", help="the directory to be created")
ns = p.parse_args(args)
status = 0
for dir in ns.dir:
try:
(os.makedirs if ns.parents else os.mkdir)(dir)
except Exception as err:
print("mkdir: {}: {!s}".format(type(err).__name__, err), file=sys.stderr)
status = 1
sys.exit(status)
if __name__ == "__main__":
main(sys.argv[1:])
|
from typing import Any, Dict, Optional
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.const import (
DEVICE_CLASS_PRESSURE,
DEVICE_CLASS_TEMPERATURE,
PRESSURE_BAR,
TEMP_CELSIUS,
)
from homeassistant.util import slugify
from . import DOMAIN, IncomfortChild
INCOMFORT_HEATER_TEMP = "CV Temp"
INCOMFORT_PRESSURE = "CV Pressure"
INCOMFORT_TAP_TEMP = "Tap Temp"
INCOMFORT_MAP_ATTRS = {
INCOMFORT_HEATER_TEMP: ["heater_temp", "is_pumping"],
INCOMFORT_PRESSURE: ["pressure", None],
INCOMFORT_TAP_TEMP: ["tap_temp", "is_tapping"],
}
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up an InComfort/InTouch sensor device."""
if discovery_info is None:
return
client = hass.data[DOMAIN]["client"]
heaters = hass.data[DOMAIN]["heaters"]
async_add_entities(
[IncomfortPressure(client, h, INCOMFORT_PRESSURE) for h in heaters]
+ [IncomfortTemperature(client, h, INCOMFORT_HEATER_TEMP) for h in heaters]
+ [IncomfortTemperature(client, h, INCOMFORT_TAP_TEMP) for h in heaters]
)
class IncomfortSensor(IncomfortChild):
"""Representation of an InComfort/InTouch sensor device."""
def __init__(self, client, heater, name) -> None:
"""Initialize the sensor."""
super().__init__()
self._client = client
self._heater = heater
self._unique_id = f"{heater.serial_no}_{slugify(name)}"
self.entity_id = f"{SENSOR_DOMAIN}.{DOMAIN}_{slugify(name)}"
self._name = f"Boiler {name}"
self._device_class = None
self._state_attr = INCOMFORT_MAP_ATTRS[name][0]
self._unit_of_measurement = None
@property
def state(self) -> Optional[str]:
"""Return the state of the sensor."""
return self._heater.status[self._state_attr]
@property
def device_class(self) -> Optional[str]:
"""Return the device class of the sensor."""
return self._device_class
@property
def unit_of_measurement(self) -> Optional[str]:
"""Return the unit of measurement of the sensor."""
return self._unit_of_measurement
class IncomfortPressure(IncomfortSensor):
"""Representation of an InTouch CV Pressure sensor."""
def __init__(self, client, heater, name) -> None:
"""Initialize the sensor."""
super().__init__(client, heater, name)
self._device_class = DEVICE_CLASS_PRESSURE
self._unit_of_measurement = PRESSURE_BAR
class IncomfortTemperature(IncomfortSensor):
"""Representation of an InTouch Temperature sensor."""
def __init__(self, client, heater, name) -> None:
"""Initialize the signal strength sensor."""
super().__init__(client, heater, name)
self._attr = INCOMFORT_MAP_ATTRS[name][1]
self._device_class = DEVICE_CLASS_TEMPERATURE
self._unit_of_measurement = TEMP_CELSIUS
@property
def device_state_attributes(self) -> Optional[Dict[str, Any]]:
"""Return the device state attributes."""
return {self._attr: self._heater.status[self._attr]}
|
import os
import re
from itertools import chain
from django.db.models import Q
from django.utils.functional import cached_property
from django.utils.text import slugify
from weblate.logger import LOGGER
from weblate.trans.defines import COMPONENT_NAME_LENGTH
from weblate.trans.models import Component
from weblate.trans.tasks import create_component
from weblate.trans.util import path_separator
from weblate.utils.render import render_template
# Attributes to copy from main component
COPY_ATTRIBUTES = (
"project",
"vcs",
"license",
"agreement",
"source_language",
"report_source_bugs",
"allow_translation_propagation",
"enable_suggestions",
"suggestion_voting",
"suggestion_autoaccept",
"check_flags",
"new_lang",
"language_code_style",
"commit_message",
"add_message",
"delete_message",
"merge_message",
"addon_message",
"committer_name",
"committer_email",
"push_on_commit",
"commit_pending_age",
"edit_template",
"variant_regex",
)
class ComponentDiscovery:
def __init__(
self,
component,
match,
name_template,
file_format,
language_regex="^[^.]+$",
base_file_template="",
new_base_template="",
path=None,
copy_addons=True,
):
self.component = component
if path is None:
self.path = self.component.full_path
else:
self.path = path
self.path_match = self.compile_match(match)
self.name_template = name_template
self.base_file_template = base_file_template
self.new_base_template = new_base_template
self.language_re = language_regex
self.language_match = re.compile(language_regex)
self.file_format = file_format
self.copy_addons = copy_addons
@staticmethod
def extract_kwargs(params):
"""Extract kwargs for discovery from wider dict."""
attrs = (
"match",
"name_template",
"language_regex",
"base_file_template",
"new_base_template",
"file_format",
"copy_addons",
)
return {k: v for k, v in params.items() if k in attrs}
def compile_match(self, match):
parts = match.split("(?P=language)")
offset = 1
while len(parts) > 1:
parts[0:2] = [
"{}(?P<_language_{}>(?P=language)){}".format(parts[0], offset, parts[1])
]
offset += 1
return re.compile("^{}$".format(parts[0]))
@cached_property
def matches(self):
"""Return matched files together with match groups and mask."""
result = []
base = os.path.realpath(self.path)
for root, dirnames, filenames in os.walk(self.path, followlinks=True):
for filename in chain(filenames, dirnames):
fullname = os.path.join(root, filename)
# Skip files outside our root
if not os.path.realpath(fullname).startswith(base):
continue
# Calculate relative path
path = path_separator(os.path.relpath(fullname, self.path))
# Check match against our regexp
matches = self.path_match.match(path)
if not matches:
continue
# Check language regexp
if not self.language_match.match(matches.group("language")):
continue
# Calculate file mask for match
replacements = [(matches.start("language"), matches.end("language"))]
for group in matches.groupdict().keys():
if group.startswith("_language_"):
replacements.append((matches.start(group), matches.end(group)))
maskparts = []
maskpath = path
for start, end in sorted(replacements, reverse=True):
maskparts.append(maskpath[end:])
maskpath = maskpath[:start]
maskparts.append(maskpath)
mask = "*".join(reversed(maskparts))
result.append((path, matches.groupdict(), mask))
return result
@cached_property
def matched_files(self):
"""Return list of matched files."""
return [x[0] for x in self.matches]
@cached_property
def matched_components(self):
"""Return list of matched components."""
result = {}
for path, groups, mask in self.matches:
if mask not in result:
name = render_template(self.name_template, **groups)
result[mask] = {
"files": {path},
"languages": {groups["language"]},
"files_langs": {(path, groups["language"])},
"base_file": render_template(self.base_file_template, **groups),
"new_base": render_template(self.new_base_template, **groups),
"mask": mask,
"name": name,
"slug": slugify(name),
}
else:
result[mask]["files"].add(path)
result[mask]["languages"].add(groups["language"])
result[mask]["files_langs"].add((path, groups["language"]))
return result
def log(self, *args):
if self.component:
self.component.log_info(*args)
else:
LOGGER.info(*args)
def create_component(self, main, match, background=False, **kwargs):
max_length = COMPONENT_NAME_LENGTH
def get_val(key, extra=0):
result = match[key]
if len(result) > max_length - extra:
result = result[: max_length - extra]
return result
# Get name and slug
name = get_val("name")
slug = get_val("slug")
# Copy attributes from main component
for key in COPY_ATTRIBUTES:
if key not in kwargs and main is not None:
kwargs[key] = getattr(main, key)
# Fill in repository
if "repo" not in kwargs:
kwargs["repo"] = main.get_repo_link_url()
# Deal with duplicate name or slug
components = Component.objects.filter(project=kwargs["project"])
if components.filter(Q(slug__iexact=slug) | Q(name__iexact=name)).exists():
base_name = get_val("name", 4)
base_slug = get_val("slug", 4)
for i in range(1, 1000):
name = f"{base_name} {i}"
slug = f"{base_slug}-{i}"
if components.filter(
Q(slug__iexact=slug) | Q(name__iexact=name)
).exists():
continue
break
# Fill in remaining attributes
kwargs.update(
{
"name": name,
"slug": slug,
"template": match["base_file"],
"filemask": match["mask"],
"new_base": match["new_base"],
"file_format": self.file_format,
"language_regex": self.language_re,
"addons_from": main.pk if self.copy_addons and main else None,
}
)
self.log("Creating component %s", name)
# Can't pass objects, pass only IDs
kwargs["project"] = kwargs["project"].pk
kwargs["source_language"] = kwargs["source_language"].pk
if background:
create_component.delay(**kwargs, in_task=True)
return None
return create_component(**kwargs)
def cleanup(self, main, processed, preview=False):
deleted = []
for component in main.linked_childs.exclude(pk__in=processed):
if component.has_template():
# Valid template?
if os.path.exists(component.get_template_filename()):
continue
elif component.new_base:
# Valid new base?
if os.path.exists(component.get_new_base_filename()):
continue
else:
if component.get_mask_matches():
continue
# Delete as needed files seem to be missing
deleted.append((None, component))
if not preview:
component.delete()
return deleted
def check_valid(self, match):
def valid_file(name):
if not name:
return True
fullname = os.path.join(self.component.full_path, name)
return os.path.exists(fullname)
# Skip matches to main component
if match["mask"] == self.component.filemask:
return False
if not valid_file(match["base_file"]):
return False
if not valid_file(match["new_base"]):
return False
return True
def perform(self, preview=False, remove=False, background=False):
created = []
matched = []
deleted = []
processed = set()
main = self.component
for match in self.matched_components.values():
# Skip invalid matches
if not self.check_valid(match):
continue
try:
found = main.linked_childs.filter(filemask=match["mask"])[0]
# Component exists
matched.append((match, found))
processed.add(found.id)
except IndexError:
# Create new component
component = None
if not preview:
component = self.create_component(main, match, background)
if component:
processed.add(component.id)
created.append((match, component))
if remove:
deleted = self.cleanup(main, processed, preview)
return created, matched, deleted
|
from flexx import app, event, ui
class SpeedTest(app.PyComponent):
def init(self):
self.widget = SpeedTestWidget(self)
@event.action
def echo(self, data):
self.widget.receive_data(data)
class SpeedTestWidget(ui.Widget):
def init(self, pycomp):
self.pycomp = pycomp
self._start_time = 0
self._start_times = []
with ui.VBox():
with ui.HBox() as self.buttons:
ui.Button(text='1 x 1 MiB roundtrip')
ui.Button(text='1 x 5 MiB roundtrip')
ui.Button(text='10 x 1 MiB roundtrip')
ui.Button(text='10 x 5 MiB roundtrip')
ui.Button(text='100 x 1 MiB roundtrip')
ui.Button(text='100 x 5 MiB roundtrip')
self.progress = ui.ProgressBar()
self.status = ui.Label(text='Status: waiting for button press ...',
wrap=1, flex=1, style='overflow-y:scroll;')
@event.reaction('buttons.children*.pointer_down')
def run_test(self, *events):
global window, perf_counter
self.status.set_text('Test results: ')
self.progress.set_value(0)
tests = []
for ev in events:
if isinstance(ev.source, ui.Button):
sze = 5 if '5' in ev.source.text else 1
n = int(ev.source.text.split(' ')[0])
for i in range(n):
tests.append(sze)
self.progress.set_max(len(tests))
self._start_time = perf_counter()
for n in tests:
data = window.Uint8Array(n * 1024 * 1024).buffer
self.send_data(data)
@event.action
def send_data(self, data):
global perf_counter
self._start_times.append(perf_counter())
self.pycomp.echo(data)
@event.action
def receive_data(self, data):
global perf_counter
t = perf_counter() - self._start_times.pop(0)
mib = data.byteLength / 1024 / 1024
text = 'Received %i MiB in %s seconds.' % (mib, str(t)[:5])
self.status.set_html(self.status.html + ' ' + text)
self.progress.set_value(self.progress.value + 1)
if len(self._start_times) == 0:
t = perf_counter() - self._start_time
text = 'Total time %s.' % str(t)[:5]
self.status.set_html(self.status.html + ' ' + text)
if __name__ == '__main__':
m = app.launch(SpeedTest, 'firefox-browser')
app.run()
|
from typing import Any
from homeassistant.components.scene import Scene
from . import ElkAttachedEntity, create_elk_entities
from .const import DOMAIN
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Create the Elk-M1 scene platform."""
elk_data = hass.data[DOMAIN][config_entry.entry_id]
entities = []
elk = elk_data["elk"]
create_elk_entities(elk_data, elk.tasks, "task", ElkTask, entities)
async_add_entities(entities, True)
class ElkTask(ElkAttachedEntity, Scene):
"""Elk-M1 task as scene."""
async def async_activate(self, **kwargs: Any) -> None:
"""Activate the task."""
self._element.activate()
|
from __future__ import print_function
import sys
import random
import time
import math
if sys.argv[1] == "slow":
sys.settrace(sys.gettrace())
random.seed(1)
def hash_str(s):
h = 0
for c in s:
h = (h * 31 + ord(c)) & (2 ** 64 - 1)
return h
data = [
hex(random.getrandbits(1024)) for _ in range(500)
]
N_SAMPLES = 100
def mean(xs):
xs = list(xs)
return sum(xs) / len(xs)
def sd(xs):
return math.sqrt(mean(x ** 2 for x in xs) - mean(xs) ** 2)
if __name__ == '__main__':
timing = []
for _ in range(N_SAMPLES):
start = time.time()
for d in data:
hash_str(d)
timing.append(1000000 * (time.time() - start) / len(data))
print("Runtime per example:", "%.2f +/- %.2f us" % (mean(timing), sd(timing)))
|
import logging
from btsmarthub_devicelist import BTSmartHub
import voluptuous as vol
from homeassistant.components.device_tracker import (
DOMAIN,
PLATFORM_SCHEMA,
DeviceScanner,
)
from homeassistant.const import CONF_HOST
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
CONF_DEFAULT_IP = "192.168.1.254"
CONF_SMARTHUB_MODEL = "smarthub_model"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_HOST, default=CONF_DEFAULT_IP): cv.string,
vol.Optional(CONF_SMARTHUB_MODEL): vol.In([1, 2]),
}
)
def get_scanner(hass, config):
"""Return a BT Smart Hub scanner if successful."""
info = config[DOMAIN]
smarthub_client = BTSmartHub(
router_ip=info[CONF_HOST], smarthub_model=info.get(CONF_SMARTHUB_MODEL)
)
scanner = BTSmartHubScanner(smarthub_client)
return scanner if scanner.success_init else None
class BTSmartHubScanner(DeviceScanner):
"""This class queries a BT Smart Hub."""
def __init__(self, smarthub_client):
"""Initialise the scanner."""
self.smarthub = smarthub_client
self.last_results = {}
self.success_init = False
# Test the router is accessible
data = self.get_bt_smarthub_data()
if data:
self.success_init = True
else:
_LOGGER.info("Failed to connect to %s", self.smarthub.router_ip)
def scan_devices(self):
"""Scan for new devices and return a list with found device IDs."""
self._update_info()
return [client["mac"] for client in self.last_results]
def get_device_name(self, device):
"""Return the name of the given device or None if we don't know."""
if not self.last_results:
return None
for client in self.last_results:
if client["mac"] == device:
return client["host"]
return None
def _update_info(self):
"""Ensure the information from the BT Smart Hub is up to date."""
if not self.success_init:
return
_LOGGER.info("Scanning")
data = self.get_bt_smarthub_data()
if not data:
_LOGGER.warning("Error scanning devices")
return
clients = list(data.values())
self.last_results = clients
def get_bt_smarthub_data(self):
"""Retrieve data from BT Smart Hub and return parsed result."""
# Request data from bt smarthub into a list of dicts.
data = self.smarthub.get_devicelist(only_active_devices=True)
# Renaming keys from parsed result.
devices = {}
for device in data:
try:
devices[device["UserHostName"]] = {
"ip": device["IPAddress"],
"mac": device["PhysAddress"],
"host": device["UserHostName"],
"status": device["Active"],
}
except KeyError:
pass
return devices
|
import diamond.collector
import socket
import re
from diamond.collector import str_to_bool
class DarnerCollector(diamond.collector.Collector):
GAUGES = [
'curr_connections',
'curr_items',
'uptime'
]
def get_default_config_help(self):
config_help = super(DarnerCollector, self).get_default_config_help()
config_help.update({
'publish':
"Which rows of 'status' you would like to publish." +
" Telnet host port' and type stats and hit enter to see " +
" the list of possibilities. Leave unset to publish all.",
'hosts':
"List of hosts, and ports to collect. Set an alias by " +
" prefixing the host:port with alias@",
'publish_queues':
"Publish queue stats (defaults to True)",
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(DarnerCollector, self).get_default_config()
config.update({
'path': 'darner',
# Which rows of 'status' you would like to publish.
# 'telnet host port' and type stats and hit enter to see the list
# of possibilities.
# Leave unset to publish all
# 'publish': ''
'publish_queues': True,
# Connection settings
'hosts': ['localhost:22133']
})
return config
def get_raw_stats(self, host, port):
data = ''
# connect
try:
if port is None:
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.connect(host)
else:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((host, int(port)))
# request stats
sock.send('stats\n')
# something big enough to get whatever is sent back
data = sock.recv(4096)
except socket.error:
self.log.exception('Failed to get stats from %s:%s',
host, port)
return data
def get_stats(self, host, port):
# stuff that's always ignored, aren't 'stats'
ignored = ('time', 'version')
stats = {}
queues = {}
data = self.get_raw_stats(host, port)
# parse stats
for line in data.splitlines():
pieces = line.split(' ')
if pieces[0] != 'STAT' or pieces[1] in ignored:
continue
if re.match(r'^queue', pieces[1]):
queue_match = re.match(
r'^queue_(.*)_(items|waiters|open_transactions)$',
pieces[1])
queue_name = queue_match.group(1).replace('.', '_')
if queue_name not in queues:
queues[queue_name] = {}
queues[queue_name][queue_match.group(2)] = int(pieces[2])
else:
stats[pieces[1]] = int(pieces[2])
return stats, queues
def collect(self):
hosts = self.config.get('hosts')
# Convert a string config value to be an array
if isinstance(hosts, basestring):
hosts = [hosts]
for host in hosts:
matches = re.search('((.+)\@)?([^:]+)(:(\d+))?', host)
alias = matches.group(2)
hostname = matches.group(3)
port = matches.group(5)
if alias is None:
alias = hostname
stats, queues = self.get_stats(hostname, port)
# Publish queue stats if configured
if str_to_bool(self.config['publish_queues']):
for queue in queues:
for queue_stat in queues[queue]:
self.publish_gauge(
alias + ".queues." + queue + "." + queue_stat,
queues[queue][queue_stat])
# figure out what we're configured to get, defaulting to everything
desired = self.config.get('publish', stats.keys())
# for everything we want
for stat in desired:
if stat in stats:
# we have it
if stat in self.GAUGES:
self.publish_gauge(alias + "." + stat, stats[stat])
else:
self.publish_counter(alias + "." + stat, stats[stat])
else:
# we don't, must be something configured in publish so we
# should log an error about it
self.log.error("No such key '%s' available, issue 'stats' "
"for a full list", stat)
|
from unittest import TestCase
import numpy as np
import pandas as pd
from scattertext import CorpusFromFeatureDict
class TestCorpusFromFeatureDict(TestCase):
def test_build(self):
df = pd.DataFrame([
{
'text': '''The President opened the speech by welcoming the Speaker, Vice President, Members of Congress, and fellow Americans. He noted that this was his eighth speech, and promised it would be shorter than usual, joking that he knew "some of you are antsy to get back to Iowa." He recognized people's generally low expectations for meaningful legislature due to 2016 being an election year, and thanked the House Speaker Paul Ryan for his help passing the budget and making tax cuts permanent for working families. He expressed hope that progress could be made on "bipartisan priorities like criminal justice reform, and helping people who are battling prescription drug abuse." He then listed proposals for the year ahead, per tradition. They included helping students learn to write computer code, personalizing medical treatments for patients, fixing the immigration system he called broken, protecting US children from gun violence, achieving equal pay for equal work in a nod towards gender equality, implementing paid leave, and raising the minimum wage.''',
'feats': {'president': 3, 'he': 2},
'category': '2016'},
{
'text': '''He then addressed the third question, how to ensure America's safety without either becoming isolationist or having to nation-build across the world. He highlighted the strength of the US military, and criticized those who claimed America was getting weaker as its enemies were getting stronger. He pointed out that failing states were the biggest threat to the US, not evil empires. He listed as his top priority "protecting the American people and going after terrorist networks." He discussed the threat of al Qaeda and ISIL, but pointed out that they did not threaten "our national existence," and dismissed claims otherwise as harmful propaganda. He then detailed the American and 60 country coalition efforts to defeat terrorism and to "cut off ISIL’s financing, disrupt their plots, stop the flow of terrorist fighters, and stamp out their vicious ideology. With nearly 10,000 air strikes, we are taking out their leadership, their oil, their training camps, and their weapons. We are training, arming, and supporting forces who are steadily reclaiming territory in Iraq and Syria."''',
'feats': {'addressed': 5, 'he': 2},
'category': '2016'},
{
'text': '''Senator Bernie Sanders of Vermont (an independent who caucuses with the Democrats in the Senate) responded to the speech in a 14-minute video posted to Facebook, in which he criticized Trump for failing to make any mention of income inequality, criminal justice reform, or climate change.[23] Sanders also stated: "President Trump once again made it clear he plans on working with Republicans in Congress who want to repeal the Affordable Care Act, throw 20 million Americans off of health insurance, privatize Medicare, make massive cuts in Medicaid, raise the cost of prescription drugs to seniors, eliminate funding for Planned Parenthood, while at the same time, he wants to give another massive tax break to the wealthiest Americans."[23]."''',
'feats': {'medicare': 2, 'Trump': 3, 'senator bernie sanders': 8, 'he': 2},
'category': '2017'
},
{
'text': '''The 45th President of the United States, Donald Trump, gave his first public address before a joint session of the United States Congress on Tuesday, February 28, 2017. Similar to a State of the Union address, it was delivered before the 115th United States Congress in the Chamber of the United States House of Representatives in the United States Capitol.[6] Presiding over this joint session was the House Speaker, Paul Ryan. Accompanying the Speaker of the House was the President of the United States Senate, Mike Pence, the Vice President of the United States."''',
'feats': {'trump': 9, 'president': 8, 'he': 2},
'category': '2017'
},
])
corpus = CorpusFromFeatureDict(
df=df,
category_col='category',
text_col='text',
feature_col='feats'
).build()
self.assertEquals(len(corpus.get_terms()), 7)
self.assertEqual(len(corpus.get_categories()), 2)
self.assertEqual(len(corpus.get_texts()), 4)
self.assertEqual(corpus.get_texts()[0], df.text.iloc[0])
self.assertEqual(corpus.get_texts()[3], df.text.iloc[3])
self.assertFalse(np.array_equal(corpus._X[0,:], corpus._X[0,:]))
corpus.get_df()
def test_metadata(self):
df = pd.DataFrame([
{
'text': '''The President opened the speech by welcoming the Speaker, Vice President, Members of Congress, and fellow Americans. He noted that this was his eighth speech, and promised it would be shorter than usual, joking that he knew "some of you are antsy to get back to Iowa." He recognized people's generally low expectations for meaningful legislature due to 2016 being an election year, and thanked the House Speaker Paul Ryan for his help passing the budget and making tax cuts permanent for working families. He expressed hope that progress could be made on "bipartisan priorities like criminal justice reform, and helping people who are battling prescription drug abuse." He then listed proposals for the year ahead, per tradition. They included helping students learn to write computer code, personalizing medical treatments for patients, fixing the immigration system he called broken, protecting US children from gun violence, achieving equal pay for equal work in a nod towards gender equality, implementing paid leave, and raising the minimum wage.''',
'feats': {'president': 3, 'he': 2},
'meta': {'word_count': 32},
'category': '2016'},
{
'text': '''He then addressed the third question, how to ensure America's safety without either becoming isolationist or having to nation-build across the world. He highlighted the strength of the US military, and criticized those who claimed America was getting weaker as its enemies were getting stronger. He pointed out that failing states were the biggest threat to the US, not evil empires. He listed as his top priority "protecting the American people and going after terrorist networks." He discussed the threat of al Qaeda and ISIL, but pointed out that they did not threaten "our national existence," and dismissed claims otherwise as harmful propaganda. He then detailed the American and 60 country coalition efforts to defeat terrorism and to "cut off ISIL’s financing, disrupt their plots, stop the flow of terrorist fighters, and stamp out their vicious ideology. With nearly 10,000 air strikes, we are taking out their leadership, their oil, their training camps, and their weapons. We are training, arming, and supporting forces who are steadily reclaiming territory in Iraq and Syria."''',
'feats': {'addressed': 5, 'he': 2},
'meta': {'word_count': 44},
'category': '2016'},
{
'text': '''Senator Bernie Sanders of Vermont (an independent who caucuses with the Democrats in the Senate) responded to the speech in a 14-minute video posted to Facebook, in which he criticized Trump for failing to make any mention of income inequality, criminal justice reform, or climate change.[23] Sanders also stated: "President Trump once again made it clear he plans on working with Republicans in Congress who want to repeal the Affordable Care Act, throw 20 million Americans off of health insurance, privatize Medicare, make massive cuts in Medicaid, raise the cost of prescription drugs to seniors, eliminate funding for Planned Parenthood, while at the same time, he wants to give another massive tax break to the wealthiest Americans."[23]."''',
'feats': {'medicare': 2, 'Trump': 3, 'senator bernie sanders': 8, 'he': 2},
'meta': {'word_count': 20},
'category': '2017'
},
{
'text': '''The 45th President of the United States, Donald Trump, gave his first public address before a joint session of the United States Congress on Tuesday, February 28, 2017. Similar to a State of the Union address, it was delivered before the 115th United States Congress in the Chamber of the United States House of Representatives in the United States Capitol.[6] Presiding over this joint session was the House Speaker, Paul Ryan. Accompanying the Speaker of the House was the President of the United States Senate, Mike Pence, the Vice President of the United States."''',
'feats': {'trump': 9, 'president': 8, 'he': 2},
'meta': {'word_count': 10},
'category': '2017'
},
])
corpus = CorpusFromFeatureDict(
df=df,
category_col='category',
text_col='text',
feature_col='feats',
metadata_col='meta'
).build()
self.assertEquals(len(corpus.get_terms()), 7)
self.assertEqual(len(corpus.get_categories()), 2)
self.assertEqual(len(corpus.get_texts()), 4)
self.assertEqual(corpus.get_texts()[0], df.text.iloc[0])
self.assertEqual(corpus.get_texts()[3], df.text.iloc[3])
self.assertFalse(np.array_equal(corpus._X[0,:], corpus._X[0,:]))
expected = pd.DataFrame([{'term': 'word_count', '2016 freq': np.int32(76), '2017 freq': np.int32(30)}]).set_index('term').astype(np.int32)
pd.testing.assert_frame_equal(corpus.get_metadata_freq_df(), expected)
|
from datetime import timedelta
import logging
from time import mktime
import steam
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_API_KEY
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.event import track_time_interval
from homeassistant.util.dt import utc_from_timestamp
_LOGGER = logging.getLogger(__name__)
CONF_ACCOUNTS = "accounts"
ICON = "mdi:steam"
STATE_OFFLINE = "offline"
STATE_ONLINE = "online"
STATE_BUSY = "busy"
STATE_AWAY = "away"
STATE_SNOOZE = "snooze"
STATE_LOOKING_TO_TRADE = "looking_to_trade"
STATE_LOOKING_TO_PLAY = "looking_to_play"
STEAM_API_URL = "https://steamcdn-a.akamaihd.net/steam/apps/"
STEAM_HEADER_IMAGE_FILE = "header.jpg"
STEAM_MAIN_IMAGE_FILE = "capsule_616x353.jpg"
STEAM_ICON_URL = (
"https://steamcdn-a.akamaihd.net/steamcommunity/public/images/apps/%d/%s.jpg"
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_ACCOUNTS, default=[]): vol.All(cv.ensure_list, [cv.string]),
}
)
APP_LIST_KEY = "steam_online.app_list"
BASE_INTERVAL = timedelta(minutes=1)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Steam platform."""
steam.api.key.set(config.get(CONF_API_KEY))
# Initialize steammods app list before creating sensors
# to benefit from internal caching of the list.
hass.data[APP_LIST_KEY] = steam.apps.app_list()
entities = [SteamSensor(account, steam) for account in config.get(CONF_ACCOUNTS)]
if not entities:
return
add_entities(entities, True)
# Only one sensor update once every 60 seconds to avoid
# flooding steam and getting disconnected.
entity_next = 0
@callback
def do_update(time):
nonlocal entity_next
entities[entity_next].async_schedule_update_ha_state(True)
entity_next = (entity_next + 1) % len(entities)
track_time_interval(hass, do_update, BASE_INTERVAL)
class SteamSensor(Entity):
"""A class for the Steam account."""
def __init__(self, account, steamod):
"""Initialize the sensor."""
self._steamod = steamod
self._account = account
self._profile = None
self._game = None
self._game_id = None
self._extra_game_info = None
self._state = None
self._name = None
self._avatar = None
self._last_online = None
self._level = None
self._owned_games = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def entity_id(self):
"""Return the entity ID."""
return f"sensor.steam_{self._account}"
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def should_poll(self):
"""Turn off polling, will do ourselves."""
return False
def update(self):
"""Update device state."""
try:
self._profile = self._steamod.user.profile(self._account)
# Only if need be, get the owned games
if not self._owned_games:
self._owned_games = self._steamod.api.interface(
"IPlayerService"
).GetOwnedGames(steamid=self._account, include_appinfo=1)
self._game = self._get_current_game()
self._game_id = self._profile.current_game[0]
self._extra_game_info = self._get_game_info()
self._state = {
1: STATE_ONLINE,
2: STATE_BUSY,
3: STATE_AWAY,
4: STATE_SNOOZE,
5: STATE_LOOKING_TO_TRADE,
6: STATE_LOOKING_TO_PLAY,
}.get(self._profile.status, STATE_OFFLINE)
self._name = self._profile.persona
self._avatar = self._profile.avatar_medium
self._last_online = self._get_last_online()
self._level = self._profile.level
except self._steamod.api.HTTPTimeoutError as error:
_LOGGER.warning(error)
self._game = None
self._game_id = None
self._state = None
self._name = None
self._avatar = None
self._last_online = None
self._level = None
def _get_current_game(self):
"""Gather current game name from APP ID."""
game_id = self._profile.current_game[0]
game_extra_info = self._profile.current_game[2]
if game_extra_info:
return game_extra_info
if not game_id:
return None
app_list = self.hass.data[APP_LIST_KEY]
try:
_, res = app_list[game_id]
return res
except KeyError:
pass
# Try reloading the app list, must be a new app
app_list = self._steamod.apps.app_list()
self.hass.data[APP_LIST_KEY] = app_list
try:
_, res = app_list[game_id]
return res
except KeyError:
pass
_LOGGER.error("Unable to find name of app with ID=%s", game_id)
return repr(game_id)
def _get_game_info(self):
game_id = self._profile.current_game[0]
if game_id is not None:
for game in self._owned_games["response"]["games"]:
if game["appid"] == game_id:
return game
return None
def _get_last_online(self):
"""Convert last_online from the steam module into timestamp UTC."""
last_online = utc_from_timestamp(mktime(self._profile.last_online))
if last_online:
return last_online
return None
@property
def device_state_attributes(self):
"""Return the state attributes."""
attr = {}
if self._game is not None:
attr["game"] = self._game
if self._game_id is not None:
attr["game_id"] = self._game_id
game_url = f"{STEAM_API_URL}{self._game_id}/"
attr["game_image_header"] = f"{game_url}{STEAM_HEADER_IMAGE_FILE}"
attr["game_image_main"] = f"{game_url}{STEAM_MAIN_IMAGE_FILE}"
if self._extra_game_info is not None and self._game_id is not None:
attr["game_icon"] = STEAM_ICON_URL % (
self._game_id,
self._extra_game_info["img_icon_url"],
)
if self._last_online is not None:
attr["last_online"] = self._last_online
if self._level is not None:
attr["level"] = self._level
return attr
@property
def entity_picture(self):
"""Avatar of the account."""
return self._avatar
@property
def icon(self):
"""Return the icon to use in the frontend."""
return ICON
|
import asyncio
from ProgettiHWSW.ProgettiHWSWAPI import ProgettiHWSWAPI
from ProgettiHWSW.input import Input
from ProgettiHWSW.relay import Relay
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from .const import DOMAIN
PLATFORMS = ["switch", "binary_sensor"]
async def async_setup(hass, config):
"""Set up the ProgettiHWSW Automation component."""
hass.data[DOMAIN] = {}
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Set up ProgettiHWSW Automation from a config entry."""
hass.data[DOMAIN][entry.entry_id] = ProgettiHWSWAPI(
f'{entry.data["host"]}:{entry.data["port"]}'
)
# Check board validation again to load new values to API.
await hass.data[DOMAIN][entry.entry_id].check_board()
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
def setup_input(api: ProgettiHWSWAPI, input_number: int) -> Input:
"""Initialize the input pin."""
return api.get_input(input_number)
def setup_switch(api: ProgettiHWSWAPI, switch_number: int, mode: str) -> Relay:
"""Initialize the output pin."""
return api.get_relay(switch_number, mode)
|
import numpy as np
import pandas as pd
from pandas.util.testing import assert_frame_equal
from arctic.chunkstore.tools import segment_id_repair
def test_segment_repair_tool(chunkstore_lib):
"""
Issue 442 - Data already written with -1 as the segment needs to be updated on update and appends
"""
def generate_data(date):
"""
Generates a dataframe that is almost exactly the size of
a segment in chunkstore
"""
df = pd.DataFrame(np.random.randn(10000*16, 12),
columns=['beta', 'btop', 'earnyild', 'growth', 'industry', 'leverage',
'liquidty', 'momentum', 'resvol', 'sid', 'size', 'sizenl'])
df['date'] = date
return df
def get_segments():
return sorted(chunkstore_lib._collection.distinct('sg', {'sy': 'test'}))
date = pd.Timestamp('2000-01-01')
df = generate_data(date)
chunkstore_lib.write('test', df, chunk_size='A')
chunkstore_lib.write('other_data', generate_data(date), chunk_size='D')
other_data = chunkstore_lib.read('other_data')
chunkstore_lib.write('more_data', generate_data(date), chunk_size='Q')
more_data = chunkstore_lib.read('more_data')
assert(get_segments() == [0])
chunkstore_lib._collection.update_one({'sy': 'test'}, {'$set': {'sg': -1}})
assert(get_segments() == [-1])
symbols = segment_id_repair(chunkstore_lib)
assert(symbols == ['test'])
assert(get_segments() == [0])
date += pd.Timedelta(1, unit='D')
df2 = generate_data(date)
chunkstore_lib.append('test', df2)
assert(get_segments() == [0, 1])
read = chunkstore_lib.read('test')
assert_frame_equal(read, pd.concat([df, df2], ignore_index=True))
chunkstore_lib._collection.update_one({'sy': 'test', 'sg': 0}, {'$set': {'sg': -1}})
chunkstore_lib._collection.update_one({'sy': 'test', 'sg': 1}, {'$set': {'sg': 0}})
assert(get_segments() == [-1, 0])
symbols = segment_id_repair(chunkstore_lib, 'test')
assert(get_segments() == [0, 1])
assert(symbols == ['test'])
assert_frame_equal(chunkstore_lib.read('more_data'), more_data)
assert_frame_equal(chunkstore_lib.read('other_data'), other_data)
|
from fractions import Fraction
import io
import av
import numpy as np
from homeassistant.components.stream import Stream
from homeassistant.components.stream.const import ATTR_STREAMS, DOMAIN
AUDIO_SAMPLE_RATE = 8000
def generate_h264_video(container_format="mp4", audio_codec=None):
"""
Generate a test video.
See: http://docs.mikeboers.com/pyav/develop/cookbook/numpy.html
"""
def generate_audio_frame(pcm_mulaw=False):
"""Generate a blank audio frame."""
if pcm_mulaw:
audio_frame = av.AudioFrame(format="s16", layout="mono", samples=1)
audio_bytes = b"\x00\x00"
else:
audio_frame = av.AudioFrame(format="dbl", layout="mono", samples=1024)
audio_bytes = b"\x00\x00\x00\x00\x00\x00\x00\x00" * 1024
audio_frame.planes[0].update(audio_bytes)
audio_frame.sample_rate = AUDIO_SAMPLE_RATE
audio_frame.time_base = Fraction(1, AUDIO_SAMPLE_RATE)
return audio_frame
duration = 5
fps = 24
total_frames = duration * fps
output = io.BytesIO()
output.name = "test.mov" if container_format == "mov" else "test.mp4"
container = av.open(output, mode="w", format=container_format)
stream = container.add_stream("libx264", rate=fps)
stream.width = 480
stream.height = 320
stream.pix_fmt = "yuv420p"
a_packet = None
last_a_dts = -1
if audio_codec is not None:
if audio_codec == "empty": # empty we add a stream but don't mux any audio
astream = container.add_stream("aac", AUDIO_SAMPLE_RATE)
else:
astream = container.add_stream(audio_codec, AUDIO_SAMPLE_RATE)
# Need to do it multiple times for some reason
while not a_packet:
a_packets = astream.encode(
generate_audio_frame(pcm_mulaw=audio_codec == "pcm_mulaw")
)
if a_packets:
a_packet = a_packets[0]
for frame_i in range(total_frames):
img = np.empty((480, 320, 3))
img[:, :, 0] = 0.5 + 0.5 * np.sin(2 * np.pi * (0 / 3 + frame_i / total_frames))
img[:, :, 1] = 0.5 + 0.5 * np.sin(2 * np.pi * (1 / 3 + frame_i / total_frames))
img[:, :, 2] = 0.5 + 0.5 * np.sin(2 * np.pi * (2 / 3 + frame_i / total_frames))
img = np.round(255 * img).astype(np.uint8)
img = np.clip(img, 0, 255)
frame = av.VideoFrame.from_ndarray(img, format="rgb24")
for packet in stream.encode(frame):
container.mux(packet)
if a_packet is not None:
a_packet.pts = int(frame_i / (fps * a_packet.time_base))
while a_packet.pts * a_packet.time_base * fps < frame_i + 1:
a_packet.dts = a_packet.pts
if (
a_packet.dts > last_a_dts
): # avoid writing same dts twice in case of rounding
container.mux(a_packet)
last_a_dts = a_packet.dts
a_packet.pts += a_packet.duration
# Flush stream
for packet in stream.encode():
container.mux(packet)
# Close the file
container.close()
output.seek(0)
return output
def preload_stream(hass, stream_source):
"""Preload a stream for use in tests."""
stream = Stream(hass, stream_source)
hass.data[DOMAIN][ATTR_STREAMS][stream_source] = stream
return stream
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
from diamond.collector import Collector
from varnish import VarnishCollector
###############################################################################
class TestVarnishCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('VarnishCollector', {})
self.collector = VarnishCollector(config, None)
def test_import(self):
self.assertTrue(VarnishCollector)
@patch.object(Collector, 'publish')
def test_should_work_with_real_data_3_dot_0(self, publish_mock):
collector_mock = patch.object(VarnishCollector, 'poll', Mock(
return_value=self.getFixture('3.0/varnish_stats').getvalue()))
collector_mock.start()
self.collector.collect()
collector_mock.stop()
metrics = {
'client_conn': 10799,
'client_drop': 0,
'client_req': 10796,
'cache_hit': 6580,
'cache_hitpass': 0,
'cache_miss': 2566,
'backend_conn': 13363,
'backend_unhealthy': 0,
'backend_busy': 0,
'backend_fail': 0,
'backend_reuse': 0,
'backend_toolate': 0,
'backend_recycle': 0,
'backend_retry': 0,
'fetch_head': 0,
'fetch_length': 12986,
'fetch_chunked': 0,
'fetch_eof': 0,
'fetch_bad': 0,
'fetch_close': 331,
'fetch_oldhttp': 0,
'fetch_zero': 0,
'fetch_failed': 0,
'fetch_1xx': 0,
'fetch_204': 0,
'fetch_304': 45,
'n_sess_mem': 19,
'n_sess': 1,
'n_object': 9,
'n_vampireobject': 0,
'n_objectcore': 17,
'n_objecthead': 27,
'n_waitinglist': 10,
'n_vbc': 1,
'n_wrk': 10,
'n_wrk_create': 10,
'n_wrk_failed': 0,
'n_wrk_max': 11451,
'n_wrk_lqueue': 0,
'n_wrk_queued': 0,
'n_wrk_drop': 0,
'n_backend': 4,
'n_expired': 2557,
'n_lru_nuked': 0,
'n_lru_moved': 5588,
'losthdr': 0,
'n_objsendfile': 0,
'n_objwrite': 2546,
'n_objoverflow': 0,
's_sess': 10798,
's_req': 10796,
's_pipe': 0,
's_pass': 10796,
's_fetch': 13362,
's_hdrbytes': 4764593,
's_bodybytes': 23756354,
'sess_closed': 10798,
'sess_pipeline': 0,
'sess_readahead': 0,
'sess_linger': 0,
'sess_herd': 0,
'shm_records': 1286246,
'shm_writes': 102894,
'shm_flushes': 0,
'shm_cont': 0,
'shm_cycles': 0,
'sms_nreq': 0,
'sms_nobj': 0,
'sms_nbytes': 0,
'sms_balloc': 0,
'sms_bfree': 0,
'backend_req': 13363,
'n_vcl': 1,
'n_vcl_avail': 1,
'n_vcl_discard': 0,
'n_ban': 1,
'n_ban_add': 1,
'n_ban_retire': 0,
'n_ban_obj_test': 0,
'n_ban_re_test': 0,
'n_ban_dups': 0,
'hcb_nolock': 9146,
'hcb_lock': 2379,
'hcb_insert': 2379,
'esi_errors': 0,
'esi_warnings': 0,
'accept_fail': 0,
'client_drop_late': 0,
'uptime': 35440,
'dir_dns_lookups': 0,
'dir_dns_failed': 0,
'dir_dns_hit': 0,
'dir_dns_cache_full': 0,
'n_gzip': 8277,
'n_gunzip': 11982,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
@patch.object(Collector, 'publish')
def test_should_work_with_real_data_4_dot_0(self, publish_mock):
collector_mock = patch.object(VarnishCollector, 'poll', Mock(
return_value=self.getFixture('4.0/varnish_stats').getvalue()))
collector_mock.start()
self.collector.collect()
collector_mock.stop()
metrics = {
'MAIN.uptime': 851,
'MAIN.sess_conn': 3013382,
'MAIN.sess_drop': 0,
'MAIN.sess_fail': 0,
'MAIN.sess_pipe_overflow': 0,
'MAIN.client_req_400': 17,
'MAIN.client_req_411': 0,
'MAIN.client_req_413': 0,
'MAIN.client_req_417': 0,
'MAIN.client_req': 4089775,
'MAIN.cache_hit': 2341983,
'MAIN.cache_hitpass': 154893,
'MAIN.cache_miss': 1300621,
'MAIN.backend_conn': 4499,
'MAIN.backend_unhealthy': 0,
'MAIN.backend_busy': 0,
'MAIN.backend_fail': 0,
'MAIN.backend_reuse': 2419103,
'MAIN.backend_toolate': 0,
'MAIN.backend_recycle': 2419604,
'MAIN.backend_retry': 13,
'MAIN.fetch_head': 25,
'MAIN.fetch_length': 702690,
'MAIN.fetch_chunked': 1216071,
'MAIN.fetch_eof': 0,
'MAIN.fetch_bad': 0,
'MAIN.fetch_close': 137,
'MAIN.fetch_oldhttp': 0,
'MAIN.fetch_zero': 0,
'MAIN.fetch_1xx': 0,
'MAIN.fetch_204': 0,
'MAIN.fetch_304': 504930,
'MAIN.fetch_failed': 0,
'MAIN.fetch_no_thread': 0,
'MAIN.pools': 2,
'MAIN.threads': 4000,
'MAIN.threads_limited': 0,
'MAIN.threads_created': 4000,
'MAIN.threads_destroyed': 0,
'MAIN.threads_failed': 0,
'MAIN.thread_queue_len': 0,
'MAIN.busy_sleep': 1193,
'MAIN.busy_wakeup': 1193,
'MAIN.sess_queued': 0,
'MAIN.sess_dropped': 0,
'MAIN.n_object': 1120124,
'MAIN.n_vampireobject': 0,
'MAIN.n_objectcore': 1120528,
'MAIN.n_objecthead': 1176948,
'MAIN.n_waitinglist': 1380,
'MAIN.n_backend': 2,
'MAIN.n_expired': 180652,
'MAIN.n_lru_nuked': 0,
'MAIN.n_lru_moved': 1552209,
'MAIN.losthdr': 0,
'MAIN.s_sess': 3013384,
'MAIN.s_req': 4089779,
'MAIN.s_pipe': 0,
'MAIN.s_pass': 352238,
'MAIN.s_fetch': 1652859,
'MAIN.s_synth': 94934,
'MAIN.s_req_hdrbytes': 4784250600,
'MAIN.s_req_bodybytes': 52901418,
'MAIN.s_resp_hdrbytes': 2508258336,
'MAIN.s_resp_bodybytes': 16343773348,
'MAIN.s_pipe_hdrbytes': 0,
'MAIN.s_pipe_in': 0,
'MAIN.s_pipe_out': 0,
'MAIN.sess_closed': 58799,
'MAIN.sess_closed_err': 6412,
'MAIN.sess_pipeline': 53,
'MAIN.sess_readahead': 17,
'MAIN.sess_herd': 4924320,
'MAIN.shm_records': 630017674,
'MAIN.shm_writes': 162187536,
'MAIN.shm_flushes': 133825475,
'MAIN.shm_cont': 6593666,
'MAIN.shm_cycles': 472,
'MAIN.sms_nreq': 0,
'MAIN.sms_nobj': 0,
'MAIN.sms_nbytes': 0,
'MAIN.sms_balloc': 0,
'MAIN.sms_bfree': 0,
'MAIN.backend_req': 2423591,
'MAIN.n_vcl': 1,
'MAIN.n_vcl_avail': 1,
'MAIN.n_vcl_discard': 0,
'MAIN.bans': 1,
'MAIN.bans_completed': 1,
'MAIN.bans_obj': 0,
'MAIN.bans_req': 0,
'MAIN.bans_added': 1,
'MAIN.bans_deleted': 0,
'MAIN.bans_tested': 0,
'MAIN.bans_obj_killed': 0,
'MAIN.bans_lurker_tested': 0,
'MAIN.bans_tests_tested': 0,
'MAIN.bans_lurker_tests_tested': 0,
'MAIN.bans_lurker_obj_killed': 0,
'MAIN.bans_dups': 0,
'MAIN.bans_lurker_contention': 0,
'MAIN.bans_persisted_bytes': 13,
'MAIN.bans_persisted_fragmentation': 0,
'MAIN.n_purges': 0,
'MAIN.n_obj_purged': 0,
'MAIN.exp_mailed': 2842439,
'MAIN.exp_received': 2842439,
'MAIN.hcb_nolock': 3797501,
'MAIN.hcb_lock': 1266489,
'MAIN.hcb_insert': 1266484,
'MAIN.esi_errors': 0,
'MAIN.esi_warnings': 0,
'MAIN.vmods': 2,
'MAIN.n_gzip': 0,
'MAIN.n_gunzip': 1809822,
'MAIN.vsm_free': 972304,
'MAIN.vsm_used': 83962304,
'MAIN.vsm_cooling': 0,
'MAIN.vsm_overflow': 0,
'MAIN.vsm_overflowed': 0,
'MGT.uptime': 851,
'MGT.child_start': 1,
'MGT.child_exit': 0,
'MGT.child_stop': 0,
'MGT.child_died': 0,
'MGT.child_dump': 0,
'MGT.child_panic': 0,
'LCK.sms.creat': 0,
'LCK.sms.destroy': 0,
'LCK.sms.locks': 0,
'LCK.smp.creat': 0,
'LCK.smp.destroy': 0,
'LCK.smp.locks': 0,
'LCK.sma.creat': 2,
'LCK.sma.destroy': 0,
'LCK.sma.locks': 8079812,
'LCK.smf.creat': 0,
'LCK.smf.destroy': 0,
'LCK.smf.locks': 0,
'LCK.hsl.creat': 0,
'LCK.hsl.destroy': 0,
'LCK.hsl.locks': 0,
'LCK.hcb.creat': 1,
'LCK.hcb.destroy': 0,
'LCK.hcb.locks': 1430122,
'LCK.hcl.creat': 0,
'LCK.hcl.destroy': 0,
'LCK.hcl.locks': 0,
'LCK.vcl.creat': 1,
'LCK.vcl.destroy': 0,
'LCK.vcl.locks': 4852573,
'LCK.sessmem.creat': 0,
'LCK.sessmem.destroy': 0,
'LCK.sessmem.locks': 0,
'LCK.sess.creat': 3011371,
'LCK.sess.destroy': 2995725,
'LCK.sess.locks': 154803,
'LCK.wstat.creat': 1,
'LCK.wstat.destroy': 0,
'LCK.wstat.locks': 10626608,
'LCK.herder.creat': 0,
'LCK.herder.destroy': 0,
'LCK.herder.locks': 0,
'LCK.wq.creat': 3,
'LCK.wq.destroy': 0,
'LCK.wq.locks': 24257096,
'LCK.objhdr.creat': 1266966,
'LCK.objhdr.destroy': 90130,
'LCK.objhdr.locks': 40326652,
'LCK.exp.creat': 1,
'LCK.exp.destroy': 0,
'LCK.exp.locks': 10787337,
'LCK.lru.creat': 2,
'LCK.lru.destroy': 0,
'LCK.lru.locks': 7418755,
'LCK.cli.creat': 1,
'LCK.cli.destroy': 0,
'LCK.cli.locks': 297,
'LCK.ban.creat': 1,
'LCK.ban.destroy': 0,
'LCK.ban.locks': 3025891,
'LCK.vbp.creat': 1,
'LCK.vbp.destroy': 0,
'LCK.vbp.locks': 1705,
'LCK.backend.creat': 2,
'LCK.backend.destroy': 0,
'LCK.backend.locks': 4852203,
'LCK.vcapace.creat': 1,
'LCK.vcapace.destroy': 0,
'LCK.vcapace.locks': 0,
'LCK.nbusyobj.creat': 0,
'LCK.nbusyobj.destroy': 0,
'LCK.nbusyobj.locks': 0,
'LCK.busyobj.creat': 2423785,
'LCK.busyobj.destroy': 2423780,
'LCK.busyobj.locks': 20093877,
'LCK.mempool.creat': 6,
'LCK.mempool.destroy': 0,
'LCK.mempool.locks': 22218269,
'LCK.vxid.creat': 1,
'LCK.vxid.destroy': 0,
'LCK.vxid.locks': 1385,
'LCK.pipestat.creat': 1,
'LCK.pipestat.destroy': 0,
'LCK.pipestat.locks': 0,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
@patch.object(Collector, 'publish')
def test_should_fail_gracefully(self, publish_mock):
collector_mock = patch.object(VarnishCollector, 'poll', Mock(
return_value=self.getFixture(
'varnish_stats_blank').getvalue()))
collector_mock.start()
self.collector.collect()
collector_mock.stop()
self.assertPublishedMany(publish_mock, {})
###############################################################################
if __name__ == "__main__":
unittest.main()
|
import sys
from lark import Lark, Transformer, v_args
json_grammar = r"""
?start: value
?value: object
| array
| string
| SIGNED_NUMBER -> number
| "true" -> true
| "false" -> false
| "null" -> null
array : "[" [value ("," value)*] "]"
object : "{" [pair ("," pair)*] "}"
pair : string ":" value
string : ESCAPED_STRING
%import common.ESCAPED_STRING
%import common.SIGNED_NUMBER
%import common.WS
%ignore WS
"""
class TreeToJson(Transformer):
@v_args(inline=True)
def string(self, s):
return s[1:-1].replace('\\"', '"')
array = list
pair = tuple
object = dict
number = v_args(inline=True)(float)
null = lambda self, _: None
true = lambda self, _: True
false = lambda self, _: False
### Create the JSON parser with Lark, using the Earley algorithm
# json_parser = Lark(json_grammar, parser='earley', lexer='standard')
# def parse(x):
# return TreeToJson().transform(json_parser.parse(x))
### Create the JSON parser with Lark, using the LALR algorithm
json_parser = Lark(json_grammar, parser='lalr',
# Using the standard lexer isn't required, and isn't usually recommended.
# But, it's good enough for JSON, and it's slightly faster.
lexer='standard',
# Disabling propagate_positions and placeholders slightly improves speed
propagate_positions=False,
maybe_placeholders=False,
# Using an internal transformer is faster and more memory efficient
transformer=TreeToJson())
parse = json_parser.parse
def test():
test_json = '''
{
"empty_object" : {},
"empty_array" : [],
"booleans" : { "YES" : true, "NO" : false },
"numbers" : [ 0, 1, -2, 3.3, 4.4e5, 6.6e-7 ],
"strings" : [ "This", [ "And" , "That", "And a \\"b" ] ],
"nothing" : null
}
'''
j = parse(test_json)
print(j)
import json
assert j == json.loads(test_json)
if __name__ == '__main__':
# test()
with open(sys.argv[1]) as f:
print(parse(f.read()))
|
import atexit
try:
import ctypes
except ImportError:
"""Google AppEngine is shipped without ctypes
:seealso: http://stackoverflow.com/a/6523777/70170
"""
ctypes = None
import operator
import os
import sys
import threading
import time
import traceback as _traceback
import warnings
import subprocess
import functools
from more_itertools import always_iterable
# Here I save the value of os.getcwd(), which, if I am imported early enough,
# will be the directory from which the startup script was run. This is needed
# by _do_execv(), to change back to the original directory before execv()ing a
# new process. This is a defense against the application having changed the
# current working directory (which could make sys.executable "not found" if
# sys.executable is a relative-path, and/or cause other problems).
_startup_cwd = os.getcwd()
class ChannelFailures(Exception):
"""Exception raised during errors on Bus.publish()."""
delimiter = '\n'
def __init__(self, *args, **kwargs):
"""Initialize ChannelFailures errors wrapper."""
super(ChannelFailures, self).__init__(*args, **kwargs)
self._exceptions = list()
def handle_exception(self):
"""Append the current exception to self."""
self._exceptions.append(sys.exc_info()[1])
def get_instances(self):
"""Return a list of seen exception instances."""
return self._exceptions[:]
def __str__(self):
"""Render the list of errors, which happened in channel."""
exception_strings = map(repr, self.get_instances())
return self.delimiter.join(exception_strings)
__repr__ = __str__
def __bool__(self):
"""Determine whether any error happened in channel."""
return bool(self._exceptions)
__nonzero__ = __bool__
# Use a flag to indicate the state of the bus.
class _StateEnum(object):
class State(object):
name = None
def __repr__(self):
return 'states.%s' % self.name
def __setattr__(self, key, value):
if isinstance(value, self.State):
value.name = key
object.__setattr__(self, key, value)
states = _StateEnum()
states.STOPPED = states.State()
states.STARTING = states.State()
states.STARTED = states.State()
states.STOPPING = states.State()
states.EXITING = states.State()
try:
import fcntl
except ImportError:
max_files = 0
else:
try:
max_files = os.sysconf('SC_OPEN_MAX')
except AttributeError:
max_files = 1024
class Bus(object):
"""Process state-machine and messenger for HTTP site deployment.
All listeners for a given channel are guaranteed to be called even
if others at the same channel fail. Each failure is logged, but
execution proceeds on to the next listener. The only way to stop all
processing from inside a listener is to raise SystemExit and stop the
whole server.
"""
states = states
state = states.STOPPED
execv = False
max_cloexec_files = max_files
def __init__(self):
"""Initialize pub/sub bus."""
self.execv = False
self.state = states.STOPPED
channels = 'start', 'stop', 'exit', 'graceful', 'log', 'main'
self.listeners = dict(
(channel, set())
for channel in channels
)
self._priorities = {}
def subscribe(self, channel, callback=None, priority=None):
"""Add the given callback at the given channel (if not present).
If callback is None, return a partial suitable for decorating
the callback.
"""
if callback is None:
return functools.partial(
self.subscribe,
channel,
priority=priority,
)
ch_listeners = self.listeners.setdefault(channel, set())
ch_listeners.add(callback)
if priority is None:
priority = getattr(callback, 'priority', 50)
self._priorities[(channel, callback)] = priority
def unsubscribe(self, channel, callback):
"""Discard the given callback (if present)."""
listeners = self.listeners.get(channel)
if listeners and callback in listeners:
listeners.discard(callback)
del self._priorities[(channel, callback)]
def publish(self, channel, *args, **kwargs):
"""Return output of all subscribers for the given channel."""
if channel not in self.listeners:
return []
exc = ChannelFailures()
output = []
raw_items = (
(self._priorities[(channel, listener)], listener)
for listener in self.listeners[channel]
)
items = sorted(raw_items, key=operator.itemgetter(0))
for priority, listener in items:
try:
output.append(listener(*args, **kwargs))
except KeyboardInterrupt:
raise
except SystemExit:
e = sys.exc_info()[1]
# If we have previous errors ensure the exit code is non-zero
if exc and e.code == 0:
e.code = 1
raise
except Exception:
exc.handle_exception()
if channel == 'log':
# Assume any further messages to 'log' will fail.
pass
else:
self.log('Error in %r listener %r' % (channel, listener),
level=40, traceback=True)
if exc:
raise exc
return output
def _clean_exit(self):
"""Assert that the Bus is not running in atexit handler callback."""
if self.state != states.EXITING:
warnings.warn(
'The main thread is exiting, but the Bus is in the %r state; '
'shutting it down automatically now. You must either call '
'bus.block() after start(), or call bus.exit() before the '
'main thread exits.' % self.state, RuntimeWarning)
self.exit()
def start(self):
"""Start all services."""
atexit.register(self._clean_exit)
self.state = states.STARTING
self.log('Bus STARTING')
try:
self.publish('start')
self.state = states.STARTED
self.log('Bus STARTED')
except (KeyboardInterrupt, SystemExit):
raise
except Exception:
self.log('Shutting down due to error in start listener:',
level=40, traceback=True)
e_info = sys.exc_info()[1]
try:
self.exit()
except Exception:
# Any stop/exit errors will be logged inside publish().
pass
# Re-raise the original error
raise e_info
def exit(self):
"""Stop all services and prepare to exit the process."""
exitstate = self.state
EX_SOFTWARE = 70
try:
self.stop()
self.state = states.EXITING
self.log('Bus EXITING')
self.publish('exit')
# This isn't strictly necessary, but it's better than seeing
# "Waiting for child threads to terminate..." and then nothing.
self.log('Bus EXITED')
except Exception:
# This method is often called asynchronously (whether thread,
# signal handler, console handler, or atexit handler), so we
# can't just let exceptions propagate out unhandled.
# Assume it's been logged and just die.
os._exit(EX_SOFTWARE)
if exitstate == states.STARTING:
# exit() was called before start() finished, possibly due to
# Ctrl-C because a start listener got stuck. In this case,
# we could get stuck in a loop where Ctrl-C never exits the
# process, so we just call os.exit here.
os._exit(EX_SOFTWARE)
def restart(self):
"""Restart the process (may close connections).
This method does not restart the process from the calling thread;
instead, it stops the bus and asks the main thread to call execv.
"""
self.execv = True
self.exit()
def graceful(self):
"""Advise all services to reload."""
self.log('Bus graceful')
self.publish('graceful')
def block(self, interval=0.1):
"""Wait for the EXITING state, KeyboardInterrupt or SystemExit.
This function is intended to be called only by the main thread.
After waiting for the EXITING state, it also waits for all threads
to terminate, and then calls os.execv if self.execv is True. This
design allows another thread to call bus.restart, yet have the main
thread perform the actual execv call (required on some platforms).
"""
try:
self.wait(states.EXITING, interval=interval, channel='main')
except (KeyboardInterrupt, IOError):
# The time.sleep call might raise
# "IOError: [Errno 4] Interrupted function call" on KBInt.
self.log('Keyboard Interrupt: shutting down bus')
self.exit()
except SystemExit:
self.log('SystemExit raised: shutting down bus')
self.exit()
raise
# Waiting for ALL child threads to finish is necessary on OS X.
# See https://github.com/cherrypy/cherrypy/issues/581.
# It's also good to let them all shut down before allowing
# the main thread to call atexit handlers.
# See https://github.com/cherrypy/cherrypy/issues/751.
self.log('Waiting for child threads to terminate...')
for t in threading.enumerate():
# Validate the we're not trying to join the MainThread
# that will cause a deadlock and the case exist when
# implemented as a windows service and in any other case
# that another thread executes cherrypy.engine.exit()
if (
t != threading.currentThread() and
not isinstance(t, threading._MainThread) and
# Note that any dummy (external) threads are
# always daemonic.
not t.daemon
):
self.log('Waiting for thread %s.' % t.getName())
t.join()
if self.execv:
self._do_execv()
def wait(self, state, interval=0.1, channel=None):
"""Poll for the given state(s) at intervals; publish to channel."""
states = set(always_iterable(state))
while self.state not in states:
time.sleep(interval)
self.publish(channel)
def _do_execv(self):
"""Re-execute the current process.
This must be called from the main thread, because certain platforms
(OS X) don't allow execv to be called in a child thread very well.
"""
try:
args = self._get_true_argv()
except NotImplementedError:
"""It's probably win32 or GAE"""
args = [sys.executable] + self._get_interpreter_argv() + sys.argv
self.log('Re-spawning %s' % ' '.join(args))
self._extend_pythonpath(os.environ)
if sys.platform[:4] == 'java':
from _systemrestart import SystemRestart
raise SystemRestart
else:
if sys.platform == 'win32':
args = ['"%s"' % arg for arg in args]
os.chdir(_startup_cwd)
if self.max_cloexec_files:
self._set_cloexec()
os.execv(sys.executable, args)
@staticmethod
def _get_interpreter_argv():
"""Retrieve current Python interpreter's arguments.
Returns empty tuple in case of frozen mode, uses built-in arguments
reproduction function otherwise.
Frozen mode is possible for the app has been packaged into a binary
executable using py2exe. In this case the interpreter's arguments are
already built-in into that executable.
:seealso: https://github.com/cherrypy/cherrypy/issues/1526
Ref: https://pythonhosted.org/PyInstaller/runtime-information.html
"""
return ([]
if getattr(sys, 'frozen', False)
else subprocess._args_from_interpreter_flags())
@staticmethod
def _get_true_argv():
"""Retrieve all real arguments of the python interpreter.
...even those not listed in ``sys.argv``
:seealso: http://stackoverflow.com/a/28338254/595220
:seealso: http://stackoverflow.com/a/6683222/595220
:seealso: http://stackoverflow.com/a/28414807/595220
"""
try:
char_p = ctypes.c_wchar_p
argv = ctypes.POINTER(char_p)()
argc = ctypes.c_int()
ctypes.pythonapi.Py_GetArgcArgv(
ctypes.byref(argc),
ctypes.byref(argv),
)
_argv = argv[:argc.value]
# The code below is trying to correctly handle special cases.
# `-c`'s argument interpreted by Python itself becomes `-c` as
# well. Same applies to `-m`. This snippet is trying to survive
# at least the case with `-m`
# Ref: https://github.com/cherrypy/cherrypy/issues/1545
# Ref: python/cpython@418baf9
argv_len, is_command, is_module = len(_argv), False, False
try:
m_ind = _argv.index('-m')
if m_ind < argv_len - 1 and _argv[m_ind + 1] in ('-c', '-m'):
"""
In some older Python versions `-m`'s argument may be
substituted with `-c`, not `-m`
"""
is_module = True
except (IndexError, ValueError):
m_ind = None
try:
c_ind = _argv.index('-c')
if c_ind < argv_len - 1 and _argv[c_ind + 1] == '-c':
is_command = True
except (IndexError, ValueError):
c_ind = None
if is_module:
"""It's containing `-m -m` sequence of arguments"""
if is_command and c_ind < m_ind:
"""There's `-c -c` before `-m`"""
raise RuntimeError(
"Cannot reconstruct command from '-c'. Ref: "
'https://github.com/cherrypy/cherrypy/issues/1545')
# Survive module argument here
original_module = sys.argv[0]
if not os.access(original_module, os.R_OK):
"""There's no such module exist"""
raise AttributeError(
"{} doesn't seem to be a module "
'accessible by current user'.format(original_module))
del _argv[m_ind:m_ind + 2] # remove `-m -m`
# ... and substitute it with the original module path:
_argv.insert(m_ind, original_module)
elif is_command:
"""It's containing just `-c -c` sequence of arguments"""
raise RuntimeError(
"Cannot reconstruct command from '-c'. "
'Ref: https://github.com/cherrypy/cherrypy/issues/1545')
except AttributeError:
"""It looks Py_GetArgcArgv is completely absent in some environments
It is known, that there's no Py_GetArgcArgv in MS Windows and
``ctypes`` module is completely absent in Google AppEngine
:seealso: https://github.com/cherrypy/cherrypy/issues/1506
:seealso: https://github.com/cherrypy/cherrypy/issues/1512
:ref: http://bit.ly/2gK6bXK
"""
raise NotImplementedError
else:
return _argv
@staticmethod
def _extend_pythonpath(env):
"""Prepend current working dir to PATH environment variable if needed.
If sys.path[0] is an empty string, the interpreter was likely
invoked with -m and the effective path is about to change on
re-exec. Add the current directory to $PYTHONPATH to ensure
that the new process sees the same path.
This issue cannot be addressed in the general case because
Python cannot reliably reconstruct the
original command line (http://bugs.python.org/issue14208).
(This idea filched from tornado.autoreload)
"""
path_prefix = '.' + os.pathsep
existing_path = env.get('PYTHONPATH', '')
needs_patch = (
sys.path[0] == '' and
not existing_path.startswith(path_prefix)
)
if needs_patch:
env['PYTHONPATH'] = path_prefix + existing_path
def _set_cloexec(self):
"""Set the CLOEXEC flag on all open files (except stdin/out/err).
If self.max_cloexec_files is an integer (the default), then on
platforms which support it, it represents the max open files setting
for the operating system. This function will be called just before
the process is restarted via os.execv() to prevent open files
from persisting into the new process.
Set self.max_cloexec_files to 0 to disable this behavior.
"""
for fd in range(3, self.max_cloexec_files): # skip stdin/out/err
try:
flags = fcntl.fcntl(fd, fcntl.F_GETFD)
except IOError:
continue
fcntl.fcntl(fd, fcntl.F_SETFD, flags | fcntl.FD_CLOEXEC)
def stop(self):
"""Stop all services."""
self.state = states.STOPPING
self.log('Bus STOPPING')
self.publish('stop')
self.state = states.STOPPED
self.log('Bus STOPPED')
def start_with_callback(self, func, args=None, kwargs=None):
"""Start 'func' in a new thread T, then start self (and return T)."""
if args is None:
args = ()
if kwargs is None:
kwargs = {}
args = (func,) + args
def _callback(func, *a, **kw):
self.wait(states.STARTED)
func(*a, **kw)
t = threading.Thread(target=_callback, args=args, kwargs=kwargs)
t.setName('Bus Callback ' + t.getName())
t.start()
self.start()
return t
def log(self, msg='', level=20, traceback=False):
"""Log the given message. Append the last traceback if requested."""
if traceback:
msg += '\n' + ''.join(_traceback.format_exception(*sys.exc_info()))
self.publish('log', msg, level)
bus = Bus()
|
from typing import Any, Dict, List
import voluptuous as vol
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_SUPPORTED_FEATURES,
CONF_ABOVE,
CONF_BELOW,
CONF_CONDITION,
CONF_DEVICE_ID,
CONF_DOMAIN,
CONF_ENTITY_ID,
CONF_TYPE,
STATE_CLOSED,
STATE_CLOSING,
STATE_OPEN,
STATE_OPENING,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import (
condition,
config_validation as cv,
entity_registry,
template,
)
from homeassistant.helpers.config_validation import DEVICE_CONDITION_BASE_SCHEMA
from homeassistant.helpers.typing import ConfigType, TemplateVarsType
from . import (
DOMAIN,
SUPPORT_CLOSE,
SUPPORT_OPEN,
SUPPORT_SET_POSITION,
SUPPORT_SET_TILT_POSITION,
)
POSITION_CONDITION_TYPES = {"is_position", "is_tilt_position"}
STATE_CONDITION_TYPES = {"is_open", "is_closed", "is_opening", "is_closing"}
POSITION_CONDITION_SCHEMA = vol.All(
DEVICE_CONDITION_BASE_SCHEMA.extend(
{
vol.Required(CONF_ENTITY_ID): cv.entity_id,
vol.Required(CONF_TYPE): vol.In(POSITION_CONDITION_TYPES),
vol.Optional(CONF_ABOVE): vol.All(
vol.Coerce(int), vol.Range(min=0, max=100)
),
vol.Optional(CONF_BELOW): vol.All(
vol.Coerce(int), vol.Range(min=0, max=100)
),
}
),
cv.has_at_least_one_key(CONF_BELOW, CONF_ABOVE),
)
STATE_CONDITION_SCHEMA = DEVICE_CONDITION_BASE_SCHEMA.extend(
{
vol.Required(CONF_ENTITY_ID): cv.entity_id,
vol.Required(CONF_TYPE): vol.In(STATE_CONDITION_TYPES),
}
)
CONDITION_SCHEMA = vol.Any(POSITION_CONDITION_SCHEMA, STATE_CONDITION_SCHEMA)
async def async_get_conditions(hass: HomeAssistant, device_id: str) -> List[dict]:
"""List device conditions for Cover devices."""
registry = await entity_registry.async_get_registry(hass)
conditions: List[Dict[str, Any]] = []
# Get all the integrations entities for this device
for entry in entity_registry.async_entries_for_device(registry, device_id):
if entry.domain != DOMAIN:
continue
state = hass.states.get(entry.entity_id)
if not state or ATTR_SUPPORTED_FEATURES not in state.attributes:
continue
supported_features = state.attributes[ATTR_SUPPORTED_FEATURES]
supports_open_close = supported_features & (SUPPORT_OPEN | SUPPORT_CLOSE)
# Add conditions for each entity that belongs to this integration
if supports_open_close:
conditions.append(
{
CONF_CONDITION: "device",
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "is_open",
}
)
conditions.append(
{
CONF_CONDITION: "device",
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "is_closed",
}
)
conditions.append(
{
CONF_CONDITION: "device",
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "is_opening",
}
)
conditions.append(
{
CONF_CONDITION: "device",
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "is_closing",
}
)
if supported_features & SUPPORT_SET_POSITION:
conditions.append(
{
CONF_CONDITION: "device",
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "is_position",
}
)
if supported_features & SUPPORT_SET_TILT_POSITION:
conditions.append(
{
CONF_CONDITION: "device",
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "is_tilt_position",
}
)
return conditions
async def async_get_condition_capabilities(hass: HomeAssistant, config: dict) -> dict:
"""List condition capabilities."""
if config[CONF_TYPE] not in ["is_position", "is_tilt_position"]:
return {}
return {
"extra_fields": vol.Schema(
{
vol.Optional(CONF_ABOVE, default=0): vol.All(
vol.Coerce(int), vol.Range(min=0, max=100)
),
vol.Optional(CONF_BELOW, default=100): vol.All(
vol.Coerce(int), vol.Range(min=0, max=100)
),
}
)
}
@callback
def async_condition_from_config(
config: ConfigType, config_validation: bool
) -> condition.ConditionCheckerType:
"""Create a function to test a device condition."""
if config_validation:
config = CONDITION_SCHEMA(config)
if config[CONF_TYPE] in STATE_CONDITION_TYPES:
if config[CONF_TYPE] == "is_open":
state = STATE_OPEN
elif config[CONF_TYPE] == "is_closed":
state = STATE_CLOSED
elif config[CONF_TYPE] == "is_opening":
state = STATE_OPENING
elif config[CONF_TYPE] == "is_closing":
state = STATE_CLOSING
def test_is_state(hass: HomeAssistant, variables: TemplateVarsType) -> bool:
"""Test if an entity is a certain state."""
return condition.state(hass, config[ATTR_ENTITY_ID], state)
return test_is_state
if config[CONF_TYPE] == "is_position":
position = "current_position"
if config[CONF_TYPE] == "is_tilt_position":
position = "current_tilt_position"
min_pos = config.get(CONF_ABOVE)
max_pos = config.get(CONF_BELOW)
value_template = template.Template( # type: ignore
f"{{{{ state.attributes.{position} }}}}"
)
@callback
def template_if(hass: HomeAssistant, variables: TemplateVarsType = None) -> bool:
"""Validate template based if-condition."""
value_template.hass = hass
return condition.async_numeric_state(
hass, config[ATTR_ENTITY_ID], max_pos, min_pos, value_template
)
return template_if
|
import unittest
from pgmpy.factors import FactorSet
from pgmpy.factors.discrete import DiscreteFactor
class TestFactorSet(unittest.TestCase):
def setUp(self):
self.phi1 = DiscreteFactor(["x1", "x2", "x3"], [2, 3, 2], range(12))
self.phi2 = DiscreteFactor(["x3", "x4", "x1"], [2, 2, 2], range(8))
self.phi3 = DiscreteFactor(["x5", "x6", "x7"], [2, 2, 2], range(8))
self.phi4 = DiscreteFactor(["x5", "x7", "x8"], [2, 2, 2], range(8))
def test_class_init(self):
phi1 = DiscreteFactor(["x1", "x2", "x3"], [2, 3, 2], range(12))
phi2 = DiscreteFactor(["x3", "x4", "x1"], [2, 2, 2], range(8))
factor_set1 = FactorSet(phi1, phi2)
self.assertEqual({phi1, phi2}, factor_set1.get_factors())
def test_factorset_add_remove_factors(self):
self.factor_set1 = FactorSet()
self.factor_set1.add_factors(self.phi1, self.phi2)
self.assertEqual({self.phi1, self.phi2}, self.factor_set1.get_factors())
self.factor_set1.remove_factors(self.phi2)
self.assertEqual({self.phi1}, self.factor_set1.get_factors())
def test_factorset_product(self):
factor_set1 = FactorSet(self.phi1, self.phi2)
factor_set2 = FactorSet(self.phi3, self.phi4)
factor_set3 = factor_set2.product(factor_set1, inplace=False)
self.assertEqual(
{self.phi1, self.phi2, self.phi3, self.phi4}, factor_set3.factors
)
def test_factorset_divide(self):
phi1 = DiscreteFactor(["x1", "x2", "x3"], [2, 3, 2], range(1, 13))
phi2 = DiscreteFactor(["x3", "x4", "x1"], [2, 2, 2], range(1, 9))
factor_set1 = FactorSet(phi1, phi2)
phi3 = DiscreteFactor(["x5", "x6", "x7"], [2, 2, 2], range(1, 9))
phi4 = DiscreteFactor(["x5", "x7", "x8"], [2, 2, 2], range(1, 9))
factor_set2 = FactorSet(phi3, phi4)
factor_set3 = factor_set2.divide(factor_set1, inplace=False)
self.assertEqual(
{phi3, phi4, phi1.identity_factor() / phi1, phi2.identity_factor() / phi2},
factor_set3.factors,
)
def test_factorset_marginalize_inplace(self):
factor_set = FactorSet(self.phi1, self.phi2, self.phi3, self.phi4)
factor_set.marginalize(["x1", "x5"], inplace=True)
phi1_equivalent_in_factor_set = list(
filter(lambda x: set(x.scope()) == {"x2", "x3"}, factor_set.factors)
)[0]
self.assertEqual(
self.phi1.marginalize(["x1"], inplace=False), phi1_equivalent_in_factor_set
)
phi2_equivalent_in_factor_set = list(
filter(lambda x: set(x.scope()) == {"x4", "x3"}, factor_set.factors)
)[0]
self.assertEqual(
self.phi2.marginalize(["x1"], inplace=False), phi2_equivalent_in_factor_set
)
phi3_equivalent_in_factor_set = list(
filter(lambda x: set(x.scope()) == {"x6", "x7"}, factor_set.factors)
)[0]
self.assertEqual(
self.phi3.marginalize(["x5"], inplace=False), phi3_equivalent_in_factor_set
)
phi4_equivalent_in_factor_set = list(
filter(lambda x: set(x.scope()) == {"x8", "x7"}, factor_set.factors)
)[0]
self.assertEqual(
self.phi4.marginalize(["x5"], inplace=False), phi4_equivalent_in_factor_set
)
def test_factorset_marginalize_not_inplace(self):
factor_set = FactorSet(self.phi1, self.phi2, self.phi3, self.phi4)
new_factor_set = factor_set.marginalize(["x1", "x5"], inplace=False)
phi1_equivalent_in_factor_set = list(
filter(lambda x: set(x.scope()) == {"x2", "x3"}, new_factor_set.factors)
)[0]
self.assertEqual(
self.phi1.marginalize(["x1"], inplace=False), phi1_equivalent_in_factor_set
)
phi2_equivalent_in_factor_set = list(
filter(lambda x: set(x.scope()) == {"x4", "x3"}, new_factor_set.factors)
)[0]
self.assertEqual(
self.phi2.marginalize(["x1"], inplace=False), phi2_equivalent_in_factor_set
)
phi3_equivalent_in_factor_set = list(
filter(lambda x: set(x.scope()) == {"x6", "x7"}, new_factor_set.factors)
)[0]
self.assertEqual(
self.phi3.marginalize(["x5"], inplace=False), phi3_equivalent_in_factor_set
)
phi4_equivalent_in_factor_set = list(
filter(lambda x: set(x.scope()) == {"x8", "x7"}, new_factor_set.factors)
)[0]
self.assertEqual(
self.phi4.marginalize(["x5"], inplace=False), phi4_equivalent_in_factor_set
)
|
from .const import ERR_CHALLENGE_NEEDED
class SmartHomeError(Exception):
"""Google Assistant Smart Home errors.
https://developers.google.com/actions/smarthome/create-app#error_responses
"""
def __init__(self, code, msg):
"""Log error code."""
super().__init__(msg)
self.code = code
def to_response(self):
"""Convert to a response format."""
return {"errorCode": self.code}
class ChallengeNeeded(SmartHomeError):
"""Google Assistant Smart Home errors.
https://developers.google.com/actions/smarthome/create-app#error_responses
"""
def __init__(self, challenge_type):
"""Initialize challenge needed error."""
super().__init__(ERR_CHALLENGE_NEEDED, f"Challenge needed: {challenge_type}")
self.challenge_type = challenge_type
def to_response(self):
"""Convert to a response format."""
return {
"errorCode": self.code,
"challengeNeeded": {"type": self.challenge_type},
}
|
import diamond.collector
import os
import re
_RE = re.compile(r'([A-Za-z0-9._-]+)[\s=:]+(-?[0-9]+)(\.?\d*)')
class FilesCollector(diamond.collector.Collector):
def get_default_config_help(self):
config_help = super(FilesCollector, self).get_default_config_help()
config_help.update({
'path': 'Prefix added to all stats collected by this module, a '
'single dot means don''t add prefix',
'dir': 'The directory that the performance files are in',
'delete': 'Delete files after they are picked up',
})
return config_help
def get_default_config(self):
"""
Returns default collector settings.
"""
config = super(FilesCollector, self).get_default_config()
config.update({
'path': '.',
'dir': '/tmp/diamond',
'delete': False,
})
return config
def collect(self):
if os.path.exists(self.config['dir']):
for fn in os.listdir(self.config['dir']):
if os.path.isfile(os.path.join(self.config['dir'], fn)):
try:
fh = open(os.path.join(self.config['dir'], fn))
found = False
for line in fh:
m = _RE.match(line)
if (m):
self.publish(
m.groups()[0],
m.groups()[1] + m.groups()[2],
precision=max(0, len(m.groups()[2]) - 1))
found = True
fh.close()
if (found and self.config['delete']):
os.unlink(os.path.join(self.config['dir'], fn))
except:
pass
|
import diamond.collector
import diamond.convertor
import os
import re
_KEY_MAPPING = [
'cache',
'rss',
'swap',
'total_rss',
'total_cache',
'total_swap',
]
class MemoryCgroupCollector(diamond.collector.Collector):
def process_config(self):
super(MemoryCgroupCollector, self).process_config()
self.memory_path = self.config['memory_path']
self.skip = self.config['skip']
if not isinstance(self.skip, list):
self.skip = [self.skip]
self.skip = [re.compile(e) for e in self.skip]
def should_skip(self, path):
for skip_re in self.skip:
if skip_re.search(path):
return True
return False
def get_default_config_help(self):
config_help = super(
MemoryCgroupCollector, self).get_default_config_help()
config_help.update({
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(MemoryCgroupCollector, self).get_default_config()
config.update({
'path': 'memory_cgroup',
'memory_path': '/sys/fs/cgroup/memory/',
'skip': [],
})
return config
def collect(self):
# find all memory.stat files
matches = []
for root, dirnames, filenames in os.walk(self.memory_path):
if not self.should_skip(root):
for filename in filenames:
if filename == 'memory.stat':
# matches will contain a tuple contain path to
# cpuacct.stat and the parent of the stat
parent = root.replace(self.memory_path,
"").replace("/", ".")
if parent == '':
parent = 'system'
matches.append((parent, os.path.join(root, filename)))
# Read metrics from cpuacct files
results = {}
for match in matches:
results[match[0]] = {}
stat_file = open(match[1])
elements = [line.split() for line in stat_file]
stat_file.close()
for el in elements:
name, value = el
if name not in _KEY_MAPPING:
continue
for unit in self.config['byte_unit']:
value = diamond.convertor.binary.convert(
value=value, oldUnit='B', newUnit=unit)
results[match[0]][name] = value
# TODO: We only support one unit node here. Fix it!
break
# create metrics from collected utimes and stimes for cgroups
for parent, cpuacct in results.iteritems():
for key, value in cpuacct.iteritems():
metric_name = '.'.join([parent, key])
self.publish(metric_name, value, metric_type='GAUGE')
return True
|
DOMAIN = "cloud"
REQUEST_TIMEOUT = 10
PREF_ENABLE_ALEXA = "alexa_enabled"
PREF_ENABLE_GOOGLE = "google_enabled"
PREF_ENABLE_REMOTE = "remote_enabled"
PREF_GOOGLE_SECURE_DEVICES_PIN = "google_secure_devices_pin"
PREF_CLOUDHOOKS = "cloudhooks"
PREF_CLOUD_USER = "cloud_user"
PREF_GOOGLE_ENTITY_CONFIGS = "google_entity_configs"
PREF_GOOGLE_REPORT_STATE = "google_report_state"
PREF_ALEXA_ENTITY_CONFIGS = "alexa_entity_configs"
PREF_ALEXA_REPORT_STATE = "alexa_report_state"
PREF_OVERRIDE_NAME = "override_name"
PREF_DISABLE_2FA = "disable_2fa"
PREF_ALIASES = "aliases"
PREF_SHOULD_EXPOSE = "should_expose"
PREF_GOOGLE_LOCAL_WEBHOOK_ID = "google_local_webhook_id"
PREF_USERNAME = "username"
PREF_ALEXA_DEFAULT_EXPOSE = "alexa_default_expose"
PREF_GOOGLE_DEFAULT_EXPOSE = "google_default_expose"
DEFAULT_DISABLE_2FA = False
DEFAULT_ALEXA_REPORT_STATE = False
DEFAULT_GOOGLE_REPORT_STATE = False
DEFAULT_EXPOSED_DOMAINS = [
"climate",
"cover",
"fan",
"humidifier",
"light",
"lock",
"scene",
"script",
"sensor",
"switch",
"vacuum",
"water_heater",
]
CONF_ALEXA = "alexa"
CONF_ALIASES = "aliases"
CONF_COGNITO_CLIENT_ID = "cognito_client_id"
CONF_ENTITY_CONFIG = "entity_config"
CONF_FILTER = "filter"
CONF_GOOGLE_ACTIONS = "google_actions"
CONF_RELAYER = "relayer"
CONF_USER_POOL_ID = "user_pool_id"
CONF_SUBSCRIPTION_INFO_URL = "subscription_info_url"
CONF_CLOUDHOOK_CREATE_URL = "cloudhook_create_url"
CONF_REMOTE_API_URL = "remote_api_url"
CONF_ACME_DIRECTORY_SERVER = "acme_directory_server"
CONF_ALEXA_ACCESS_TOKEN_URL = "alexa_access_token_url"
CONF_GOOGLE_ACTIONS_REPORT_STATE_URL = "google_actions_report_state_url"
CONF_ACCOUNT_LINK_URL = "account_link_url"
CONF_VOICE_API_URL = "voice_api_url"
MODE_DEV = "development"
MODE_PROD = "production"
DISPATCHER_REMOTE_UPDATE = "cloud_remote_update"
class InvalidTrustedNetworks(Exception):
"""Raised when invalid trusted networks config."""
class InvalidTrustedProxies(Exception):
"""Raised when invalid trusted proxies config."""
class RequireRelink(Exception):
"""The skill needs to be relinked."""
|
from homeassistant import config_entries, data_entry_flow, setup
from homeassistant.components.home_connect.const import (
DOMAIN,
OAUTH2_AUTHORIZE,
OAUTH2_TOKEN,
)
from homeassistant.const import CONF_CLIENT_ID, CONF_CLIENT_SECRET
from homeassistant.helpers import config_entry_oauth2_flow
from tests.async_mock import patch
CLIENT_ID = "1234"
CLIENT_SECRET = "5678"
async def test_full_flow(hass, aiohttp_client, aioclient_mock, current_request):
"""Check full flow."""
assert await setup.async_setup_component(
hass,
"home_connect",
{
"home_connect": {
CONF_CLIENT_ID: CLIENT_ID,
CONF_CLIENT_SECRET: CLIENT_SECRET,
},
"http": {"base_url": "https://example.com"},
},
)
result = await hass.config_entries.flow.async_init(
"home_connect", context={"source": config_entries.SOURCE_USER}
)
state = config_entry_oauth2_flow._encode_jwt(hass, {"flow_id": result["flow_id"]})
assert result["type"] == data_entry_flow.RESULT_TYPE_EXTERNAL_STEP
assert result["url"] == (
f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}"
"&redirect_uri=https://example.com/auth/external/callback"
f"&state={state}"
)
client = await aiohttp_client(hass.http.app)
resp = await client.get(f"/auth/external/callback?code=abcd&state={state}")
assert resp.status == 200
assert resp.headers["content-type"] == "text/html; charset=utf-8"
aioclient_mock.post(
OAUTH2_TOKEN,
json={
"refresh_token": "mock-refresh-token",
"access_token": "mock-access-token",
"type": "Bearer",
"expires_in": 60,
},
)
with patch(
"homeassistant.components.home_connect.async_setup_entry", return_value=True
) as mock_setup_entry:
result = await hass.config_entries.flow.async_configure(result["flow_id"])
await hass.async_block_till_done()
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
assert len(mock_setup_entry.mock_calls) == 1
|
import zigpy.zcl.clusters.protocol as protocol
from .. import registries
from .base import ZigbeeChannel
@registries.ZIGBEE_CHANNEL_REGISTRY.register(protocol.AnalogInputExtended.cluster_id)
class AnalogInputExtended(ZigbeeChannel):
"""Analog Input Extended channel."""
@registries.ZIGBEE_CHANNEL_REGISTRY.register(protocol.AnalogInputRegular.cluster_id)
class AnalogInputRegular(ZigbeeChannel):
"""Analog Input Regular channel."""
@registries.ZIGBEE_CHANNEL_REGISTRY.register(protocol.AnalogOutputExtended.cluster_id)
class AnalogOutputExtended(ZigbeeChannel):
"""Analog Output Regular channel."""
@registries.ZIGBEE_CHANNEL_REGISTRY.register(protocol.AnalogOutputRegular.cluster_id)
class AnalogOutputRegular(ZigbeeChannel):
"""Analog Output Regular channel."""
@registries.ZIGBEE_CHANNEL_REGISTRY.register(protocol.AnalogValueExtended.cluster_id)
class AnalogValueExtended(ZigbeeChannel):
"""Analog Value Extended edition channel."""
@registries.ZIGBEE_CHANNEL_REGISTRY.register(protocol.AnalogValueRegular.cluster_id)
class AnalogValueRegular(ZigbeeChannel):
"""Analog Value Regular channel."""
@registries.ZIGBEE_CHANNEL_REGISTRY.register(protocol.BacnetProtocolTunnel.cluster_id)
class BacnetProtocolTunnel(ZigbeeChannel):
"""Bacnet Protocol Tunnel channel."""
@registries.ZIGBEE_CHANNEL_REGISTRY.register(protocol.BinaryInputExtended.cluster_id)
class BinaryInputExtended(ZigbeeChannel):
"""Binary Input Extended channel."""
@registries.ZIGBEE_CHANNEL_REGISTRY.register(protocol.BinaryInputRegular.cluster_id)
class BinaryInputRegular(ZigbeeChannel):
"""Binary Input Regular channel."""
@registries.ZIGBEE_CHANNEL_REGISTRY.register(protocol.BinaryOutputExtended.cluster_id)
class BinaryOutputExtended(ZigbeeChannel):
"""Binary Output Extended channel."""
@registries.ZIGBEE_CHANNEL_REGISTRY.register(protocol.BinaryOutputRegular.cluster_id)
class BinaryOutputRegular(ZigbeeChannel):
"""Binary Output Regular channel."""
@registries.ZIGBEE_CHANNEL_REGISTRY.register(protocol.BinaryValueExtended.cluster_id)
class BinaryValueExtended(ZigbeeChannel):
"""Binary Value Extended channel."""
@registries.ZIGBEE_CHANNEL_REGISTRY.register(protocol.BinaryValueRegular.cluster_id)
class BinaryValueRegular(ZigbeeChannel):
"""Binary Value Regular channel."""
@registries.ZIGBEE_CHANNEL_REGISTRY.register(protocol.GenericTunnel.cluster_id)
class GenericTunnel(ZigbeeChannel):
"""Generic Tunnel channel."""
@registries.ZIGBEE_CHANNEL_REGISTRY.register(
protocol.MultistateInputExtended.cluster_id
)
class MultiStateInputExtended(ZigbeeChannel):
"""Multistate Input Extended channel."""
@registries.ZIGBEE_CHANNEL_REGISTRY.register(protocol.MultistateInputRegular.cluster_id)
class MultiStateInputRegular(ZigbeeChannel):
"""Multistate Input Regular channel."""
@registries.ZIGBEE_CHANNEL_REGISTRY.register(
protocol.MultistateOutputExtended.cluster_id
)
class MultiStateOutputExtended(ZigbeeChannel):
"""Multistate Output Extended channel."""
@registries.ZIGBEE_CHANNEL_REGISTRY.register(
protocol.MultistateOutputRegular.cluster_id
)
class MultiStateOutputRegular(ZigbeeChannel):
"""Multistate Output Regular channel."""
@registries.ZIGBEE_CHANNEL_REGISTRY.register(
protocol.MultistateValueExtended.cluster_id
)
class MultiStateValueExtended(ZigbeeChannel):
"""Multistate Value Extended channel."""
@registries.ZIGBEE_CHANNEL_REGISTRY.register(protocol.MultistateValueRegular.cluster_id)
class MultiStateValueRegular(ZigbeeChannel):
"""Multistate Value Regular channel."""
|
from pyhap.accessory_driver import AccessoryDriver
import pytest
from homeassistant.components.homekit.const import EVENT_HOMEKIT_CHANGED
from homeassistant.core import callback as ha_callback
from tests.async_mock import patch
@pytest.fixture
def hk_driver(loop):
"""Return a custom AccessoryDriver instance for HomeKit accessory init."""
with patch("pyhap.accessory_driver.Zeroconf"), patch(
"pyhap.accessory_driver.AccessoryEncoder"
), patch("pyhap.accessory_driver.HAPServer"), patch(
"pyhap.accessory_driver.AccessoryDriver.publish"
), patch(
"pyhap.accessory_driver.AccessoryDriver.persist"
):
yield AccessoryDriver(pincode=b"123-45-678", address="127.0.0.1", loop=loop)
@pytest.fixture
def events(hass):
"""Yield caught homekit_changed events."""
events = []
hass.bus.async_listen(
EVENT_HOMEKIT_CHANGED, ha_callback(lambda e: events.append(e))
)
yield events
|
from lemur.plugins.base import Plugin
class SourcePlugin(Plugin):
type = "source"
default_options = [
{
"name": "pollRate",
"type": "int",
"required": False,
"helpMessage": "Rate in seconds to poll source for new information.",
"default": "60",
}
]
def get_certificates(self, options, **kwargs):
raise NotImplementedError
def get_endpoints(self, options, **kwargs):
raise NotImplementedError
def clean(self, certificate, options, **kwargs):
raise NotImplementedError
@property
def options(self):
return self.default_options + self.additional_options
|
from homeassistant.components import smhi
from .common import AsyncMock
from tests.async_mock import Mock
TEST_CONFIG = {
"config": {
"name": "0123456789ABCDEF",
"longitude": "62.0022",
"latitude": "17.0022",
}
}
async def test_setup_always_return_true() -> None:
"""Test async_setup always returns True."""
hass = Mock()
# Returns true with empty config
assert await smhi.async_setup(hass, {}) is True
# Returns true with a config provided
assert await smhi.async_setup(hass, TEST_CONFIG) is True
async def test_forward_async_setup_entry() -> None:
"""Test that it will forward setup entry."""
hass = Mock()
assert await smhi.async_setup_entry(hass, {}) is True
assert len(hass.config_entries.async_forward_entry_setup.mock_calls) == 1
async def test_forward_async_unload_entry() -> None:
"""Test that it will forward unload entry."""
hass = AsyncMock()
assert await smhi.async_unload_entry(hass, {}) is True
assert len(hass.config_entries.async_forward_entry_unload.mock_calls) == 1
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import flags
from absl.flags import _helpers
from absl.flags.tests import module_bar
FLAGS = flags.FLAGS
DECLARED_KEY_FLAGS = ['tmod_bar_x', 'tmod_bar_z', 'tmod_bar_t',
# Special (not user-defined) flag:
'flagfile']
def define_flags(flag_values=FLAGS):
"""Defines a few flags."""
module_bar.define_flags(flag_values=flag_values)
# The 'tmod_foo_' prefix (short for 'test_module_foo') ensures that we
# have no name clash with existing flags.
flags.DEFINE_boolean('tmod_foo_bool', True, 'Boolean flag from module foo.',
flag_values=flag_values)
flags.DEFINE_string('tmod_foo_str', 'default', 'String flag.',
flag_values=flag_values)
flags.DEFINE_integer('tmod_foo_int', 3, 'Sample int flag.',
flag_values=flag_values)
def declare_key_flags(flag_values=FLAGS):
"""Declares a few key flags."""
for flag_name in DECLARED_KEY_FLAGS:
flags.declare_key_flag(flag_name, flag_values=flag_values)
def declare_extra_key_flags(flag_values=FLAGS):
"""Declares some extra key flags."""
flags.adopt_module_key_flags(module_bar, flag_values=flag_values)
def names_of_defined_flags():
"""Returns: list of names of flags defined by this module."""
return ['tmod_foo_bool', 'tmod_foo_str', 'tmod_foo_int']
def names_of_declared_key_flags():
"""Returns: list of names of key flags for this module."""
return names_of_defined_flags() + DECLARED_KEY_FLAGS
def names_of_declared_extra_key_flags():
"""Returns the list of names of additional key flags for this module.
These are the flags that became key for this module only as a result
of a call to declare_extra_key_flags() above. I.e., the flags declared
by module_bar, that were not already declared as key for this
module.
Returns:
The list of names of additional key flags for this module.
"""
names_of_extra_key_flags = list(module_bar.names_of_defined_flags())
for flag_name in names_of_declared_key_flags():
while flag_name in names_of_extra_key_flags:
names_of_extra_key_flags.remove(flag_name)
return names_of_extra_key_flags
def remove_flags(flag_values=FLAGS):
"""Deletes the flag definitions done by the above define_flags()."""
for flag_name in names_of_defined_flags():
module_bar.remove_one_flag(flag_name, flag_values=flag_values)
module_bar.remove_flags(flag_values=flag_values)
def get_module_name():
"""Uses get_calling_module() to return the name of this module.
For checking that _get_calling_module works as expected.
Returns:
A string, the name of this module.
"""
return _helpers.get_calling_module()
def duplicate_flags(flagnames=None):
"""Returns a new FlagValues object with the requested flagnames.
Used to test DuplicateFlagError detection.
Args:
flagnames: str, A list of flag names to create.
Returns:
A FlagValues object with one boolean flag for each name in flagnames.
"""
flag_values = flags.FlagValues()
for name in flagnames:
flags.DEFINE_boolean(name, False, 'Flag named %s' % (name,),
flag_values=flag_values)
return flag_values
def define_bar_flags(flag_values=FLAGS):
"""Defines flags from module_bar."""
module_bar.define_flags(flag_values)
|
from datetime import timedelta
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.const import (
CONF_HOST,
CONF_PORT,
DEVICE_CLASS_TIMESTAMP,
EVENT_HOMEASSISTANT_START,
TIME_DAYS,
)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import async_call_later
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from homeassistant.util import dt
from .const import DEFAULT_PORT, DOMAIN
SCAN_INTERVAL = timedelta(hours=12)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up certificate expiry sensor."""
@callback
def schedule_import(_):
"""Schedule delayed import after HA is fully started."""
async_call_later(hass, 10, do_import)
@callback
def do_import(_):
"""Process YAML import."""
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=dict(config)
)
)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, schedule_import)
async def async_setup_entry(hass, entry, async_add_entities):
"""Add cert-expiry entry."""
coordinator = hass.data[DOMAIN][entry.entry_id]
sensors = [
SSLCertificateDays(coordinator),
SSLCertificateTimestamp(coordinator),
]
async_add_entities(sensors, True)
class CertExpiryEntity(CoordinatorEntity):
"""Defines a base Cert Expiry entity."""
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return "mdi:certificate"
@property
def device_state_attributes(self):
"""Return additional sensor state attributes."""
return {
"is_valid": self.coordinator.is_cert_valid,
"error": str(self.coordinator.cert_error),
}
class SSLCertificateDays(CertExpiryEntity):
"""Implementation of the Cert Expiry days sensor."""
@property
def name(self):
"""Return the name of the sensor."""
return f"Cert Expiry ({self.coordinator.name})"
@property
def state(self):
"""Return the state of the sensor."""
if not self.coordinator.is_cert_valid:
return 0
expiry = self.coordinator.data - dt.utcnow()
return expiry.days
@property
def unique_id(self):
"""Return a unique id for the sensor."""
return f"{self.coordinator.host}:{self.coordinator.port}"
@property
def unit_of_measurement(self):
"""Return the unit this state is expressed in."""
return TIME_DAYS
class SSLCertificateTimestamp(CertExpiryEntity):
"""Implementation of the Cert Expiry timestamp sensor."""
@property
def device_class(self):
"""Return the device class of the sensor."""
return DEVICE_CLASS_TIMESTAMP
@property
def name(self):
"""Return the name of the sensor."""
return f"Cert Expiry Timestamp ({self.coordinator.name})"
@property
def state(self):
"""Return the state of the sensor."""
if self.coordinator.data:
return self.coordinator.data.isoformat()
return None
@property
def unique_id(self):
"""Return a unique id for the sensor."""
return f"{self.coordinator.host}:{self.coordinator.port}-timestamp"
|
import json
import os
import time
import mock
import requests
import requests_cache
import service_configuration_lib
from marathon import NotFoundError
from paasta_tools import marathon_tools
from paasta_tools import mesos_tools
from paasta_tools.marathon_tools import app_has_tasks
from paasta_tools.marathon_tools import MarathonServiceConfig
from paasta_tools.utils import timeout
def update_context_marathon_config(context):
whitelist_keys = {
"id",
"backoff_factor",
"backoff_seconds",
"max_instances",
"mem",
"cpus",
"instances",
"marathon_shard",
"previous_marathon_shards",
}
with mock.patch.object(
MarathonServiceConfig, "get_min_instances", autospec=True, return_value=1
), mock.patch.object(
MarathonServiceConfig, "get_max_instances", autospec=True
) as mock_get_max_instances:
mock_get_max_instances.return_value = (
context.max_instances if "max_instances" in context else None
)
service_configuration_lib._yaml_cache = {}
context.job_config = marathon_tools.load_marathon_service_config_no_cache(
service=context.service,
instance=context.instance,
cluster=context.system_paasta_config.get_cluster(),
soa_dir=context.soa_dir,
)
context.current_client = context.marathon_clients.get_current_client_for_service(
context.job_config
)
context.marathon_complete_config = {
key: value
for key, value in context.job_config.format_marathon_app_dict().items()
if key in whitelist_keys
}
context.marathon_complete_config.update(
{
"cmd": "/bin/sleep 1m",
"constraints": None,
"container": {
"type": "DOCKER",
"docker": {"network": "BRIDGE", "image": "busybox"},
},
}
)
if "max_instances" not in context:
context.marathon_complete_config["instances"] = context.instances
def get_service_connection_string(service):
"""Given a container name this function returns
the host and ephemeral port that you need to use to connect to. For example
if you are spinning up a 'web' container that inside listens on 80, this
function would return 0.0.0.0:23493 or whatever ephemeral forwarded port
it has from docker-compose"""
service = service.upper()
raw_host_port = os.environ["%s_PORT" % service]
# Remove leading tcp:// or similar
host_port = raw_host_port.split("://")[1]
return host_port
@timeout(
30, error_message="Marathon service is not available. Cancelling integration tests"
)
def wait_for_marathon():
"""Waits for marathon to start. Maximum 30 seconds"""
marathon_service = get_service_connection_string("marathon")
while True:
print("Connecting marathon on %s" % marathon_service)
try:
response = requests.get("http://%s/ping" % marathon_service, timeout=5)
except (requests.exceptions.ConnectionError, requests.exceptions.Timeout):
time.sleep(5)
continue
if response.status_code == 200:
print("Marathon is up and running!")
break
@timeout(30)
def wait_for_app_to_launch_tasks(
client, app_id, expected_tasks, exact_matches_only=False
):
""" Wait for an app to have num_tasks tasks launched. If the app isn't found, then this will swallow the exception
and retry. Times out after 30 seconds.
:param client: The marathon client
:param app_id: The app id to which the tasks belong
:param expected_tasks: The number of tasks to wait for
:param exact_matches_only: a boolean indicating whether we require exactly expected_tasks to be running
"""
found = False
with requests_cache.disabled():
while not found:
try:
found = app_has_tasks(
client, app_id, expected_tasks, exact_matches_only
)
except NotFoundError:
pass
if found:
time.sleep(3) # Give it a bit more time to actually launch
return
else:
print(
"waiting for app %s to have %d tasks. retrying"
% (app_id, expected_tasks)
)
time.sleep(0.5)
def setup_mesos_cli_config(config_file, cluster):
"""Creates a mesos-cli.json config file for mesos.cli module.
Sets up the environment dictionary to point to that file"""
zookeeper_service = get_service_connection_string("zookeeper")
mesos_cli_config = {
"profile": "default",
"default": {
"master": f"zk://{zookeeper_service}/mesos-{cluster}",
"log_file": "None",
"response_timeout": 5,
},
}
print("Generating mesos.cli config file: %s" % config_file)
with open(config_file, "w") as fp:
json.dump(mesos_cli_config, fp)
os.environ["MESOS_CLI_CONFIG"] = config_file
def cleanup_file(path_to_file):
"""Removes the given file"""
print("Removing generated file: %s" % path_to_file)
os.remove(path_to_file)
def clear_mesos_tools_cache():
try:
del mesos_tools.master.CURRENT._cache
print("cleared mesos_tools.master.CURRENT._cache")
except AttributeError:
pass
|
import asyncio
from datetime import timedelta
import logging
from aio_geojson_geonetnz_quakes import GeonetnzQuakesFeedManager
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.const import (
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_RADIUS,
CONF_SCAN_INTERVAL,
CONF_UNIT_SYSTEM_IMPERIAL,
LENGTH_MILES,
)
from homeassistant.core import callback
from homeassistant.helpers import aiohttp_client, config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.event import async_track_time_interval
from homeassistant.util.unit_system import METRIC_SYSTEM
from .const import (
CONF_MINIMUM_MAGNITUDE,
CONF_MMI,
DEFAULT_FILTER_TIME_INTERVAL,
DEFAULT_MINIMUM_MAGNITUDE,
DEFAULT_MMI,
DEFAULT_RADIUS,
DEFAULT_SCAN_INTERVAL,
DOMAIN,
FEED,
PLATFORMS,
)
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Inclusive(CONF_LATITUDE, "coordinates"): cv.latitude,
vol.Inclusive(CONF_LONGITUDE, "coordinates"): cv.longitude,
vol.Optional(CONF_MMI, default=DEFAULT_MMI): vol.All(
vol.Coerce(int), vol.Range(min=-1, max=8)
),
vol.Optional(CONF_RADIUS, default=DEFAULT_RADIUS): vol.Coerce(float),
vol.Optional(
CONF_MINIMUM_MAGNITUDE, default=DEFAULT_MINIMUM_MAGNITUDE
): cv.positive_float,
vol.Optional(
CONF_SCAN_INTERVAL, default=DEFAULT_SCAN_INTERVAL
): cv.time_period,
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Set up the GeoNet NZ Quakes component."""
if DOMAIN not in config:
return True
conf = config[DOMAIN]
latitude = conf.get(CONF_LATITUDE, hass.config.latitude)
longitude = conf.get(CONF_LONGITUDE, hass.config.longitude)
mmi = conf[CONF_MMI]
scan_interval = conf[CONF_SCAN_INTERVAL]
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={
CONF_LATITUDE: latitude,
CONF_LONGITUDE: longitude,
CONF_RADIUS: conf[CONF_RADIUS],
CONF_MINIMUM_MAGNITUDE: conf[CONF_MINIMUM_MAGNITUDE],
CONF_MMI: mmi,
CONF_SCAN_INTERVAL: scan_interval,
},
)
)
return True
async def async_setup_entry(hass, config_entry):
"""Set up the GeoNet NZ Quakes component as config entry."""
hass.data.setdefault(DOMAIN, {})
feeds = hass.data[DOMAIN].setdefault(FEED, {})
radius = config_entry.data[CONF_RADIUS]
if hass.config.units.name == CONF_UNIT_SYSTEM_IMPERIAL:
radius = METRIC_SYSTEM.length(radius, LENGTH_MILES)
# Create feed entity manager for all platforms.
manager = GeonetnzQuakesFeedEntityManager(hass, config_entry, radius)
feeds[config_entry.entry_id] = manager
_LOGGER.debug("Feed entity manager added for %s", config_entry.entry_id)
await manager.async_init()
return True
async def async_unload_entry(hass, config_entry):
"""Unload an GeoNet NZ Quakes component config entry."""
manager = hass.data[DOMAIN][FEED].pop(config_entry.entry_id)
await manager.async_stop()
await asyncio.wait(
[
hass.config_entries.async_forward_entry_unload(config_entry, domain)
for domain in PLATFORMS
]
)
return True
class GeonetnzQuakesFeedEntityManager:
"""Feed Entity Manager for GeoNet NZ Quakes feed."""
def __init__(self, hass, config_entry, radius_in_km):
"""Initialize the Feed Entity Manager."""
self._hass = hass
self._config_entry = config_entry
coordinates = (
config_entry.data[CONF_LATITUDE],
config_entry.data[CONF_LONGITUDE],
)
websession = aiohttp_client.async_get_clientsession(hass)
self._feed_manager = GeonetnzQuakesFeedManager(
websession,
self._generate_entity,
self._update_entity,
self._remove_entity,
coordinates,
mmi=config_entry.data[CONF_MMI],
filter_radius=radius_in_km,
filter_minimum_magnitude=config_entry.data[CONF_MINIMUM_MAGNITUDE],
filter_time=DEFAULT_FILTER_TIME_INTERVAL,
status_callback=self._status_update,
)
self._config_entry_id = config_entry.entry_id
self._scan_interval = timedelta(seconds=config_entry.data[CONF_SCAN_INTERVAL])
self._track_time_remove_callback = None
self._status_info = None
self.listeners = []
async def async_init(self):
"""Schedule initial and regular updates based on configured time interval."""
for domain in PLATFORMS:
self._hass.async_create_task(
self._hass.config_entries.async_forward_entry_setup(
self._config_entry, domain
)
)
async def update(event_time):
"""Update."""
await self.async_update()
# Trigger updates at regular intervals.
self._track_time_remove_callback = async_track_time_interval(
self._hass, update, self._scan_interval
)
_LOGGER.debug("Feed entity manager initialized")
async def async_update(self):
"""Refresh data."""
await self._feed_manager.update()
_LOGGER.debug("Feed entity manager updated")
async def async_stop(self):
"""Stop this feed entity manager from refreshing."""
for unsub_dispatcher in self.listeners:
unsub_dispatcher()
self.listeners = []
if self._track_time_remove_callback:
self._track_time_remove_callback()
_LOGGER.debug("Feed entity manager stopped")
@callback
def async_event_new_entity(self):
"""Return manager specific event to signal new entity."""
return f"geonetnz_quakes_new_geolocation_{self._config_entry_id}"
def get_entry(self, external_id):
"""Get feed entry by external id."""
return self._feed_manager.feed_entries.get(external_id)
def status_info(self):
"""Return latest status update info received."""
return self._status_info
async def _generate_entity(self, external_id):
"""Generate new entity."""
async_dispatcher_send(
self._hass,
self.async_event_new_entity(),
self,
self._config_entry.unique_id,
external_id,
)
async def _update_entity(self, external_id):
"""Update entity."""
async_dispatcher_send(self._hass, f"geonetnz_quakes_update_{external_id}")
async def _remove_entity(self, external_id):
"""Remove entity."""
async_dispatcher_send(self._hass, f"geonetnz_quakes_delete_{external_id}")
async def _status_update(self, status_info):
"""Propagate status update."""
_LOGGER.debug("Status update received: %s", status_info)
self._status_info = status_info
async_dispatcher_send(
self._hass, f"geonetnz_quakes_status_{self._config_entry_id}"
)
|
import logging
import posixpath
from absl import flags
from perfkitbenchmarker import configs
from perfkitbenchmarker import flag_util
from perfkitbenchmarker import linux_packages
from perfkitbenchmarker import regex_util
from perfkitbenchmarker import sample
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.linux_packages import tensorflow_serving
FLAGS = flags.FLAGS
CLIENT_SCRIPT = 'tensorflow_serving_client_workload.py'
RESNET_NHWC_SAVEDMODEL_TGZ = 'resnet_v2_fp32_savedmodel_NHWC_jpg.tar.gz'
ILSVRC_VALIDATION_IMAGES_TAR = 'ILSVRC2012_img_val.tar'
SERVER_PORT = 8500
TF_SERVING_BASE_DIRECTORY = tensorflow_serving.TF_SERVING_BASE_DIRECTORY
BENCHMARK_DATA = {
# This ResNet SavedModel (ResNet-50 v2, fp32, Accuracy 76.47%) is from the
# official TF models repo. It takes in JPG as input and is channels-last
# (NHWC), which is generally better for CPU. It is available here:
# http://download.tensorflow.org/models/official/20181001_resnet/savedmodels/resnet_v2_fp32_savedmodel_NHWC_jpg.tar.gz
RESNET_NHWC_SAVEDMODEL_TGZ:
'545965f0f85c87386e51076abc7ef4f9f1decaf641e8a90906f98c6774547e3f',
# Collection of 50,000 imagenet 2012 validation images.
# Available here:
# http://www.image-net.org/challenges/LSVRC/2012/nnoupb/ILSVRC2012_img_val.tar
ILSVRC_VALIDATION_IMAGES_TAR:
'c7e06a6c0baccf06d8dbeb6577d71efff84673a5dbdd50633ab44f8ea0456ae0',
}
BENCHMARK_NAME = 'tensorflow_serving'
BENCHMARK_CONFIG = """
tensorflow_serving:
description: Runs a Tensorflow Serving benchmark.
vm_groups:
clients:
vm_spec:
GCP:
boot_disk_size: 200
machine_type: n1-standard-8
zone: us-central1-a
Azure:
machine_type: Standard_F8s_v2
zone: eastus2
AWS:
boot_disk_size: 200
machine_type: m5.2xlarge
zone: us-east-1f
os_type: ubuntu1604
servers:
vm_spec:
GCP:
boot_disk_size: 200
machine_type: n1-standard-8
zone: us-central1-a
min_cpu_platform: skylake
Azure:
machine_type: Standard_F8s_v2
zone: eastus2
AWS:
boot_disk_size: 200
machine_type: m5.2xlarge
zone: us-east-1f
os_type: ubuntu1604
"""
flags.DEFINE_integer(
'tf_serving_runtime', 60, 'benchmark runtime in seconds', lower_bound=1)
flag_util.DEFINE_integerlist(
'tf_serving_client_thread_counts', [16, 32],
'number of client worker threads',
module_name=__name__)
class ClientWorkloadScriptExecutionError(Exception):
pass
def GetConfig(user_config):
"""Loads and returns benchmark config.
Args:
user_config: user supplied configuration (flags and config file)
Returns:
loaded benchmark configuration
"""
return configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
def CheckPrerequisites(_):
"""Verifies that the required resources are present.
Raises:
perfkitbenchmarker.data.ResourceNotFound: On missing resource.
"""
pass
def _UpdateBenchmarkSpecWithFlags(benchmark_spec):
"""Update the benchmark_spec with supplied command line flags.
Args:
benchmark_spec: benchmark specification to update
"""
del benchmark_spec
def _PrepareClient(vm):
"""Installs Tensorflow Serving on a single client vm.
Args:
vm: client vm to operate on
"""
logging.info('Installing Tensorflow Serving on client %s', vm)
vm.Install('tensorflow_serving')
vm.InstallPreprovisionedBenchmarkData(BENCHMARK_NAME,
[ILSVRC_VALIDATION_IMAGES_TAR],
linux_packages.INSTALL_DIR)
# The image tarball does not contain a subfolder, so create one
# using the filename of the tarball, minus the extension and extract
# it there.
extract_dir = posixpath.join(
linux_packages.INSTALL_DIR,
posixpath.splitext(ILSVRC_VALIDATION_IMAGES_TAR)[0])
vm.RemoteCommand('mkdir {0}'.format(extract_dir))
vm.RemoteCommand('cd {0} && tar xvf {1} --directory {2}'.format(
linux_packages.INSTALL_DIR, ILSVRC_VALIDATION_IMAGES_TAR, extract_dir))
def _PrepareServer(vm):
"""Installs Tensorflow Serving on a single server vm.
Args:
vm: server vm to operate on
"""
logging.info('Installing Tensorflow Serving on server %s', vm)
vm.Install('tensorflow_serving')
vm.InstallPreprovisionedBenchmarkData(
BENCHMARK_NAME, [RESNET_NHWC_SAVEDMODEL_TGZ], TF_SERVING_BASE_DIRECTORY)
extract_dir = posixpath.join(
TF_SERVING_BASE_DIRECTORY, "resnet")
vm.RemoteCommand('mkdir {0}'.format(extract_dir))
vm.RemoteCommand('cd {0} && tar --strip-components=2 --directory {1} -xvzf '
'{2}'.format(TF_SERVING_BASE_DIRECTORY, extract_dir,
RESNET_NHWC_SAVEDMODEL_TGZ))
def Prepare(benchmark_spec):
"""Installs and prepares Tensorflow Serving on the target vms.
Clients and servers are prepared in parallel using RunThreaded.
Args:
benchmark_spec: The benchmark specification
"""
servers = benchmark_spec.vm_groups['servers']
clients = benchmark_spec.vm_groups['clients']
vms = []
# Create tuples of (function_to_run, vm) in order to dispatch
# to the appropriate prepare function in parallel.
for s in servers:
vms.append(((_PrepareServer, s), {}))
for c in clients:
vms.append(((_PrepareClient, c), {}))
vm_util.RunThreaded(lambda prepare_function, vm: prepare_function(vm), vms)
def _CreateMetadataDict(benchmark_spec, client_thread_count):
"""Creates a metadata dict to be added to run results samples.
Args:
benchmark_spec: The benchmark specification.
client_thread_count: The client thread count used for this particular run.
Returns:
A dict of metadata to be added to samples.
"""
del benchmark_spec
metadata = dict()
metadata['scheduled_runtime'] = FLAGS.tf_serving_runtime
metadata['client_thread_count'] = client_thread_count
return metadata
def _StartServer(vm):
"""Starts the tensorflow_model_server binary.
Args:
vm: The server VM.
"""
model_download_directory = posixpath.join(TF_SERVING_BASE_DIRECTORY, 'resnet')
# Use the docker development image to build the inception model
vm.RemoteCommand(
'sudo docker run -d --rm --name tfserving-server --network host '
'--mount type=bind,source={0},target=/models/resnet '
'-e MODEL_NAME=resnet '
'-t benchmarks/tensorflow-serving --port={1}'.format(
model_download_directory, SERVER_PORT),
should_log=True)
def _StartClient(vm, server_ip, client_thread_count):
"""Pushes and starts the client workload script.
Args:
vm: The client VM.
server_ip: The server's ip address.
client_thread_count: The client thread count used for this particular run.
Returns:
Stdout from CLIENT_SCRIPT
Raises:
ClientWorkloadScriptExecutionError: if an error occurred during execution
of CLIENT_SCRIPT (detected by looking at stderr).
"""
stdout, stderr = vm.RemoteCommand(
'python {0} --server={1}:{2} --image_directory={3} '
'--runtime={4} --num_threads={5}'.format(
posixpath.join(linux_packages.INSTALL_DIR,
CLIENT_SCRIPT), server_ip, SERVER_PORT,
posixpath.join(linux_packages.INSTALL_DIR,
posixpath.splitext(ILSVRC_VALIDATION_IMAGES_TAR)[0]),
FLAGS.tf_serving_runtime, client_thread_count),
should_log=True)
# Ensure that stderr from the client script is empty.
# If it is, stderr from the remote command should contain a single line:
# Warning: Permanently added {ip} (ECDSA) to the list of known hosts.
if len(stderr.splitlines()) > 1:
raise ClientWorkloadScriptExecutionError(
'Exception occurred during execution of client script: {0}'.format(
stderr))
return stdout
def _CreateSingleSample(sample_name, sample_units, metadata, client_stdout):
"""Creates a sample from the tensorflow_serving_client_workload stdout.
client_stdout is expected to contain output in the following format:
key1: int_or_float_value_1
key2: int_or_float_value_2
Args:
sample_name: Name of the sample. Used to create a regex to extract the value
from client_stdout. Also used as the returned sample's name.
sample_units: Units to be specified in the returned sample
metadata: Metadata to be added to the returned sample
client_stdout: Stdout from tensorflow_serving_client_workload.py
Returns:
A single floating point sample.
Raises:
regex_util.NoMatchError: when no line beginning with sample_name: is found
in client_stdout
"""
regex = sample_name + r'\:\s*(\w+\.?\w*)'
value = regex_util.ExtractFloat(regex, client_stdout)
return sample.Sample(sample_name, value, sample_units, metadata)
def _CreateLatenciesSample(metadata, client_stdout):
"""Extracts latency samples from client_stdout.
Assumes latency samples start one line after 'Latencies:'
and continue until the end of the file, and that each latency sample
is on its own line.
Args:
metadata: Metadata to be added to the returned sample
client_stdout: Stdout from tensorflow_serving_client_workload.py
Returns:
A single sample containing an array of latencies.
"""
updated_metadata = metadata.copy()
lines = client_stdout.splitlines()
latency_start = lines.index('Latency:') + 1
latencies = [float(line) for line in lines[latency_start:]]
updated_metadata.update({'latency_array': latencies})
return sample.Sample('Latency', -1, 'seconds', updated_metadata)
def _MakeSamplesFromClientOutput(benchmark_spec, client_stdout,
client_thread_count):
"""Returns an array of samples extracted from client_stdout.
Args:
benchmark_spec: The benchmark specification.
client_stdout: Stdout from tensorflow_serving_client_workload.py.
client_thread_count: The client thread count used for this particular run.
Returns:
A list of samples extracted from client_stdout.
"""
metadata = _CreateMetadataDict(benchmark_spec, client_thread_count)
samples = []
metrics_to_extract = [
# (sample_name, units)
('Completed requests', 'requests'),
('Failed requests', 'requests'),
('Throughput', 'images_per_second'),
('Runtime', 'seconds'),
]
for metric in metrics_to_extract:
samples.append(
_CreateSingleSample(metric[0], metric[1], metadata, client_stdout))
samples.append(_CreateLatenciesSample(metadata, client_stdout))
return samples
def Run(benchmark_spec):
"""Runs Tensorflow Serving benchmark.
Args:
benchmark_spec: The benchmark specification.
Returns:
A list of sample.Sample objects.
"""
server = benchmark_spec.vm_groups['servers'][0]
client = benchmark_spec.vm_groups['clients'][0]
_StartServer(server)
client.PushDataFile(
CLIENT_SCRIPT, remote_path=linux_packages.INSTALL_DIR)
samples = []
for thread_count in FLAGS.tf_serving_client_thread_counts:
client_stdout = _StartClient(client, server.internal_ip, thread_count)
samples.extend(
_MakeSamplesFromClientOutput(benchmark_spec, client_stdout,
thread_count))
return samples
def Cleanup(benchmark_spec):
"""Cleans up Tensorflow Serving.
Args:
benchmark_spec: The benchmark specification.
"""
servers = benchmark_spec.vm_groups['servers']
clients = benchmark_spec.vm_groups['clients']
def _CleanupServer(vm):
vm.RemoteCommand('sudo docker stop tfserving-server || true')
vm.Uninstall('tensorflow_serving')
def _CleanupClient(vm):
vm.Uninstall('tensorflow_serving')
vms = []
# Create tuples of (function_to_run, vm) in order to dispatch
# to the appropriate prepare function in parallel.
for s in servers:
vms.append(((_CleanupServer, s), {}))
for c in clients:
vms.append(((_CleanupClient, c), {}))
vm_util.RunThreaded(lambda cleanup_function, vm: cleanup_function(vm), vms)
del benchmark_spec
|
from django.conf import settings
from weblate.machinery.base import MachineTranslation
class MyMemoryTranslation(MachineTranslation):
"""MyMemory machine translation support."""
name = "MyMemory"
do_cleanup = False
def map_language_code(self, code):
"""Convert language to service specific code."""
return super().map_language_code(code).replace("_", "-")
def is_supported(self, source, language):
"""Check whether given language combination is supported."""
return (
self.lang_supported(source)
and self.lang_supported(language)
and source != language
)
@staticmethod
def lang_supported(language):
"""Almost any language without modifiers is supported."""
if language in ("ia", "tt", "ug"):
return False
return "@" not in language
def format_match(self, match):
"""Reformat match to (translation, quality) tuple."""
if isinstance(match["quality"], int):
quality = match["quality"]
elif match["quality"] is not None and match["quality"].isdigit():
quality = int(match["quality"])
else:
quality = 0
result = {
"text": match["translation"],
"quality": int(quality * match["match"]),
"service": self.name,
"source": match["segment"],
}
if match["last-updated-by"]:
result["origin"] = match["last-updated-by"]
if match["reference"]:
result["origin_detail"] = match["reference"]
return result
def download_translations(
self,
source,
language,
text: str,
unit,
user,
search: bool,
threshold: int = 75,
):
"""Download list of possible translations from MyMemory."""
args = {
"q": text.split(". ")[0][:500],
"langpair": f"{source}|{language}",
}
if settings.MT_MYMEMORY_EMAIL is not None:
args["de"] = settings.MT_MYMEMORY_EMAIL
if settings.MT_MYMEMORY_USER is not None:
args["user"] = settings.MT_MYMEMORY_USER
if settings.MT_MYMEMORY_KEY is not None:
args["key"] = settings.MT_MYMEMORY_KEY
response = self.request_status(
"get", "https://mymemory.translated.net/api/get", params=args
)
for match in response["matches"]:
yield self.format_match(match)
|
import logging
from homeassistant.components.sensor import ENTITY_ID_FORMAT
from homeassistant.const import CONF_ID
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity, async_generate_entity_id
from . import DOMAIN
from .const import DATA_GATEWAYS, DATA_OPENTHERM_GW, SENSOR_INFO
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the OpenTherm Gateway sensors."""
sensors = []
for var, info in SENSOR_INFO.items():
device_class = info[0]
unit = info[1]
friendly_name_format = info[2]
sensors.append(
OpenThermSensor(
hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][config_entry.data[CONF_ID]],
var,
device_class,
unit,
friendly_name_format,
)
)
async_add_entities(sensors)
class OpenThermSensor(Entity):
"""Representation of an OpenTherm Gateway sensor."""
def __init__(self, gw_dev, var, device_class, unit, friendly_name_format):
"""Initialize the OpenTherm Gateway sensor."""
self.entity_id = async_generate_entity_id(
ENTITY_ID_FORMAT, f"{var}_{gw_dev.gw_id}", hass=gw_dev.hass
)
self._gateway = gw_dev
self._var = var
self._value = None
self._device_class = device_class
self._unit = unit
self._friendly_name = friendly_name_format.format(gw_dev.name)
self._unsub_updates = None
async def async_added_to_hass(self):
"""Subscribe to updates from the component."""
_LOGGER.debug("Added OpenTherm Gateway sensor %s", self._friendly_name)
self._unsub_updates = async_dispatcher_connect(
self.hass, self._gateway.update_signal, self.receive_report
)
async def async_will_remove_from_hass(self):
"""Unsubscribe from updates from the component."""
_LOGGER.debug("Removing OpenTherm Gateway sensor %s", self._friendly_name)
self._unsub_updates()
@property
def available(self):
"""Return availability of the sensor."""
return self._value is not None
@property
def entity_registry_enabled_default(self):
"""Disable sensors by default."""
return False
@callback
def receive_report(self, status):
"""Handle status updates from the component."""
value = status.get(self._var)
if isinstance(value, float):
value = f"{value:2.1f}"
self._value = value
self.async_write_ha_state()
@property
def name(self):
"""Return the friendly name of the sensor."""
return self._friendly_name
@property
def device_info(self):
"""Return device info."""
return {
"identifiers": {(DOMAIN, self._gateway.gw_id)},
"name": self._gateway.name,
"manufacturer": "Schelte Bron",
"model": "OpenTherm Gateway",
"sw_version": self._gateway.gw_version,
}
@property
def unique_id(self):
"""Return a unique ID."""
return f"{self._gateway.gw_id}-{self._var}"
@property
def device_class(self):
"""Return the device class."""
return self._device_class
@property
def state(self):
"""Return the state of the device."""
return self._value
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return self._unit
@property
def should_poll(self):
"""Return False because entity pushes its state."""
return False
|
import unittest
import mock
from kalliope.core import OrderListener
from kalliope.core.Models import Resources
from kalliope.core.Models.settings.Stt import Stt
class TestOrderListener(unittest.TestCase):
"""Test case for the OrderListener Class"""
def setUp(self):
pass
def test_load_stt_plugin(self):
# Test getting default stt
ol = OrderListener()
stt1 = Stt(name="default-stt",
parameters=dict())
stt2 = Stt(name="second-stt",
parameters=dict())
stt3 = Stt(name="third-stt",
parameters=dict())
resources = Resources(stt_folder="/tmp")
ol.settings = mock.MagicMock(default_stt_name="default-stt",
stts=[stt1, stt2, stt3],
resources=resources)
callback = mock.MagicMock()
ol.callback = callback
with mock.patch("kalliope.core.Utils.get_dynamic_class_instantiation") as mock_get_dynamic_class_instantiation:
mock_get_dynamic_class_instantiation.return_value = 'class_instance'
self.assertEqual(ol.load_stt_plugin(),
"class_instance",
"Fail getting the proper value")
mock_get_dynamic_class_instantiation.assert_called_once_with(package_name="stt",
module_name="Default-stt",
parameters={'callback': callback,
'audio_file_path': None},
resources_dir="/tmp")
if __name__ == '__main__':
unittest.main()
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from ip import IPCollector
###############################################################################
class TestIPCollector(CollectorTestCase):
def setUp(self, allowed_names=None):
if not allowed_names:
allowed_names = []
config = get_collector_config('IPCollector', {
'allowed_names': allowed_names,
'interval': 1,
})
self.collector = IPCollector(config, None)
def test_import(self):
self.assertTrue(IPCollector)
@patch('os.access', Mock(return_value=True))
@patch('__builtin__.open')
@patch('diamond.collector.Collector.publish')
def test_should_open_proc_net_snmp(self, publish_mock, open_mock):
IPCollector.PROC = ['/proc/net/snmp']
open_mock.return_value = StringIO('')
self.collector.collect()
open_mock.assert_called_once_with('/proc/net/snmp')
@patch('os.access', Mock(return_value=True))
@patch('__builtin__.open')
@patch('diamond.collector.Collector.publish')
def test_should_work_with_synthetic_data(self, publish_mock, open_mock):
IPCollector.PROC = ['/proc/net/snmp']
self.setUp(['A', 'C'])
open_mock.return_value = StringIO('''
Ip: A B C
Ip: 0 0 0
'''.strip())
self.collector.collect()
open_mock.return_value = StringIO('''
Ip: A B C
Ip: 0 1 2
'''.strip())
publish_mock.call_args_list = []
self.collector.collect()
self.assertEqual(len(publish_mock.call_args_list), 2)
metrics = {
'A': 0,
'C': 2,
}
self.assertPublishedMany(publish_mock, metrics)
@patch('diamond.collector.Collector.publish')
def test_should_work_with_real_data(self, publish_mock):
self.setUp(['InDiscards', 'InReceives', 'OutDiscards', 'OutRequests'])
IPCollector.PROC = [self.getFixturePath('proc_net_snmp_1')]
self.collector.collect()
self.assertPublishedMany(publish_mock, {})
IPCollector.PROC = [self.getFixturePath('proc_net_snmp_2')]
self.collector.collect()
metrics = {
'InDiscards': 0,
'InReceives': 2,
'OutDiscards': 0,
'OutRequests': 1,
}
self.assertPublishedMany(publish_mock, metrics)
@patch('diamond.collector.Collector.publish')
def test_should_work_with_all_data(self, publish_mock):
metrics = {
'Forwarding': 2,
'DefaultTTL': 64,
'InReceives': 2,
'InHdrErrors': 0,
'InAddrErrors': 0,
'ForwDatagrams': 0,
'InUnknownProtos': 0,
'InDiscards': 0,
'InDelivers': 2,
'OutRequests': 1,
'OutDiscards': 0,
'OutNoRoutes': 0,
'ReasmTimeout': 0,
'ReasmReqds': 0,
'ReasmOKs': 0,
'ReasmFails': 0,
'FragOKs': 0,
'FragFails': 0,
'FragCreates': 0,
}
self.setUp(allowed_names=metrics.keys())
IPCollector.PROC = [
self.getFixturePath('proc_net_snmp_1'),
]
self.collector.collect()
self.assertPublishedMany(publish_mock, {})
IPCollector.PROC = [
self.getFixturePath('proc_net_snmp_2'),
]
self.collector.collect()
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
###############################################################################
if __name__ == '__main__':
unittest.main()
|
import logging
import voluptuous as vol
from homeassistant.components import lock, mqtt
from homeassistant.components.lock import LockEntity
from homeassistant.const import (
CONF_DEVICE,
CONF_NAME,
CONF_OPTIMISTIC,
CONF_UNIQUE_ID,
CONF_VALUE_TEMPLATE,
)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.reload import async_setup_reload_service
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
from . import (
ATTR_DISCOVERY_HASH,
CONF_COMMAND_TOPIC,
CONF_QOS,
CONF_RETAIN,
CONF_STATE_TOPIC,
DOMAIN,
PLATFORMS,
MqttAttributes,
MqttAvailability,
MqttDiscoveryUpdate,
MqttEntityDeviceInfo,
subscription,
)
from .debug_info import log_messages
from .discovery import MQTT_DISCOVERY_NEW, clear_discovery_hash
_LOGGER = logging.getLogger(__name__)
CONF_PAYLOAD_LOCK = "payload_lock"
CONF_PAYLOAD_UNLOCK = "payload_unlock"
CONF_STATE_LOCKED = "state_locked"
CONF_STATE_UNLOCKED = "state_unlocked"
DEFAULT_NAME = "MQTT Lock"
DEFAULT_OPTIMISTIC = False
DEFAULT_PAYLOAD_LOCK = "LOCK"
DEFAULT_PAYLOAD_UNLOCK = "UNLOCK"
DEFAULT_STATE_LOCKED = "LOCKED"
DEFAULT_STATE_UNLOCKED = "UNLOCKED"
PLATFORM_SCHEMA = (
mqtt.MQTT_RW_PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_DEVICE): mqtt.MQTT_ENTITY_DEVICE_INFO_SCHEMA,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean,
vol.Optional(CONF_PAYLOAD_LOCK, default=DEFAULT_PAYLOAD_LOCK): cv.string,
vol.Optional(
CONF_PAYLOAD_UNLOCK, default=DEFAULT_PAYLOAD_UNLOCK
): cv.string,
vol.Optional(CONF_STATE_LOCKED, default=DEFAULT_STATE_LOCKED): cv.string,
vol.Optional(
CONF_STATE_UNLOCKED, default=DEFAULT_STATE_UNLOCKED
): cv.string,
vol.Optional(CONF_UNIQUE_ID): cv.string,
}
)
.extend(mqtt.MQTT_AVAILABILITY_SCHEMA.schema)
.extend(mqtt.MQTT_JSON_ATTRS_SCHEMA.schema)
)
async def async_setup_platform(
hass: HomeAssistantType, config: ConfigType, async_add_entities, discovery_info=None
):
"""Set up MQTT lock panel through configuration.yaml."""
await async_setup_reload_service(hass, DOMAIN, PLATFORMS)
await _async_setup_entity(hass, config, async_add_entities)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up MQTT lock dynamically through MQTT discovery."""
async def async_discover(discovery_payload):
"""Discover and add an MQTT lock."""
discovery_data = discovery_payload.discovery_data
try:
config = PLATFORM_SCHEMA(discovery_payload)
await _async_setup_entity(
hass, config, async_add_entities, config_entry, discovery_data
)
except Exception:
clear_discovery_hash(hass, discovery_data[ATTR_DISCOVERY_HASH])
raise
async_dispatcher_connect(
hass, MQTT_DISCOVERY_NEW.format(lock.DOMAIN, "mqtt"), async_discover
)
async def _async_setup_entity(
hass, config, async_add_entities, config_entry=None, discovery_data=None
):
"""Set up the MQTT Lock platform."""
async_add_entities([MqttLock(hass, config, config_entry, discovery_data)])
class MqttLock(
MqttAttributes,
MqttAvailability,
MqttDiscoveryUpdate,
MqttEntityDeviceInfo,
LockEntity,
):
"""Representation of a lock that can be toggled using MQTT."""
def __init__(self, hass, config, config_entry, discovery_data):
"""Initialize the lock."""
self.hass = hass
self._unique_id = config.get(CONF_UNIQUE_ID)
self._state = False
self._sub_state = None
self._optimistic = False
# Load config
self._setup_from_config(config)
device_config = config.get(CONF_DEVICE)
MqttAttributes.__init__(self, config)
MqttAvailability.__init__(self, config)
MqttDiscoveryUpdate.__init__(self, discovery_data, self.discovery_update)
MqttEntityDeviceInfo.__init__(self, device_config, config_entry)
async def async_added_to_hass(self):
"""Subscribe to MQTT events."""
await super().async_added_to_hass()
await self._subscribe_topics()
async def discovery_update(self, discovery_payload):
"""Handle updated discovery message."""
config = PLATFORM_SCHEMA(discovery_payload)
self._setup_from_config(config)
await self.attributes_discovery_update(config)
await self.availability_discovery_update(config)
await self.device_info_discovery_update(config)
await self._subscribe_topics()
self.async_write_ha_state()
def _setup_from_config(self, config):
"""(Re)Setup the entity."""
self._config = config
self._optimistic = config[CONF_OPTIMISTIC]
value_template = self._config.get(CONF_VALUE_TEMPLATE)
if value_template is not None:
value_template.hass = self.hass
async def _subscribe_topics(self):
"""(Re)Subscribe to topics."""
@callback
@log_messages(self.hass, self.entity_id)
def message_received(msg):
"""Handle new MQTT messages."""
payload = msg.payload
value_template = self._config.get(CONF_VALUE_TEMPLATE)
if value_template is not None:
payload = value_template.async_render_with_possible_json_value(payload)
if payload == self._config[CONF_STATE_LOCKED]:
self._state = True
elif payload == self._config[CONF_STATE_UNLOCKED]:
self._state = False
self.async_write_ha_state()
if self._config.get(CONF_STATE_TOPIC) is None:
# Force into optimistic mode.
self._optimistic = True
else:
self._sub_state = await subscription.async_subscribe_topics(
self.hass,
self._sub_state,
{
"state_topic": {
"topic": self._config.get(CONF_STATE_TOPIC),
"msg_callback": message_received,
"qos": self._config[CONF_QOS],
}
},
)
async def async_will_remove_from_hass(self):
"""Unsubscribe when removed."""
self._sub_state = await subscription.async_unsubscribe_topics(
self.hass, self._sub_state
)
await MqttAttributes.async_will_remove_from_hass(self)
await MqttAvailability.async_will_remove_from_hass(self)
await MqttDiscoveryUpdate.async_will_remove_from_hass(self)
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def name(self):
"""Return the name of the lock."""
return self._config[CONF_NAME]
@property
def unique_id(self):
"""Return a unique ID."""
return self._unique_id
@property
def is_locked(self):
"""Return true if lock is locked."""
return self._state
@property
def assumed_state(self):
"""Return true if we do optimistic updates."""
return self._optimistic
async def async_lock(self, **kwargs):
"""Lock the device.
This method is a coroutine.
"""
mqtt.async_publish(
self.hass,
self._config[CONF_COMMAND_TOPIC],
self._config[CONF_PAYLOAD_LOCK],
self._config[CONF_QOS],
self._config[CONF_RETAIN],
)
if self._optimistic:
# Optimistically assume that the lock has changed state.
self._state = True
self.async_write_ha_state()
async def async_unlock(self, **kwargs):
"""Unlock the device.
This method is a coroutine.
"""
mqtt.async_publish(
self.hass,
self._config[CONF_COMMAND_TOPIC],
self._config[CONF_PAYLOAD_UNLOCK],
self._config[CONF_QOS],
self._config[CONF_RETAIN],
)
if self._optimistic:
# Optimistically assume that the lock has changed state.
self._state = False
self.async_write_ha_state()
|
import json
import os
from distutils.dir_util import copy_tree
from paasta_tools.cli.utils import pick_random_port
def main():
print("-------------------------------------------------------")
print(
"Please run export PAASTA_SYSTEM_CONFIG_DIR=etc_paasta_for_development to continue"
)
print(
"Please set environment variable PAASTA_TEST_CLUSTER to the cluster you want to use."
)
print("This is necessary for tron jobs")
print("-------------------------------------------------------")
cluster = os.environ.get("PAASTA_TEST_CLUSTER", "norcal-devc")
config_path = "etc_paasta_for_development"
copy_tree("/etc/paasta", os.path.join(os.getcwd(), config_path))
# Generate tron.json
tron_config = {"tron": {"url": f"http://tron-{cluster}:8089"}}
with open(config_path + "/tron.json", "w") as f:
json.dump(tron_config, f)
# find unused port
port = pick_random_port("paasta-dev-api")
# Generate api endpoints
api_endpoints = {"api_endpoints": {cluster: f"http://localhost:{port}"}}
api_endpoints_path = os.path.join(os.getcwd(), config_path, "api_endpoints.json")
os.chmod(api_endpoints_path, 0o777)
with open(api_endpoints_path, "w") as f:
json.dump(api_endpoints, f)
# export config path
os.environ["PAASTA_SYSTEM_CONFIG_DIR"] = config_path
os.execl(
".tox/py36-linux/bin/python",
".tox/py36-linux/bin/python",
"-m",
"paasta_tools.api.api",
*["-D", "-c", cluster, str(port)],
)
if __name__ == "__main__":
main()
|
from django.conf import settings
from django.core.management import call_command
from django.core.management.base import CommandError
from django.urls import reverse
from weblate.trans.models import Component
from weblate.trans.tests.test_views import ViewTestCase
class AutoTranslationTest(ViewTestCase):
def setUp(self):
super().setUp()
# Need extra power
self.user.is_superuser = True
self.user.save()
self.component2 = Component.objects.create(
name="Test 2",
slug="test-2",
project=self.project,
repo=self.git_repo_path,
push=self.git_repo_path,
vcs="git",
filemask="po/*.po",
template="",
file_format="po",
new_base="",
allow_translation_propagation=False,
)
def test_none(self):
"""Test for automatic translation with no content."""
response = self.client.post(
reverse("auto_translation", kwargs=self.kw_translation)
)
self.assertRedirects(response, self.translation_url)
def make_different(self):
self.edit_unit("Hello, world!\n", "Nazdar svete!\n")
def perform_auto(self, expected=1, expected_count=None, **kwargs):
self.make_different()
params = {"project": "test", "lang": "cs", "component": "test-2"}
url = reverse("auto_translation", kwargs=params)
kwargs["auto_source"] = "others"
kwargs["threshold"] = "100"
if "filter_type" not in kwargs:
kwargs["filter_type"] = "todo"
if "mode" not in kwargs:
kwargs["mode"] = "translate"
response = self.client.post(url, kwargs, follow=True)
if expected == 1:
self.assertContains(
response, "Automatic translation completed, 1 string was updated."
)
else:
self.assertContains(
response, "Automatic translation completed, no strings were updated."
)
self.assertRedirects(response, reverse("translation", kwargs=params))
# Check we've translated something
translation = self.component2.translation_set.get(language_code="cs")
translation.invalidate_cache()
if expected_count is None:
expected_count = expected
if kwargs["mode"] == "suggest":
self.assertEqual(translation.stats.suggestions, expected_count)
else:
self.assertEqual(translation.stats.translated, expected_count)
def test_different(self):
"""Test for automatic translation with different content."""
self.perform_auto()
def test_suggest(self):
"""Test for automatic suggestion."""
self.perform_auto(mode="suggest")
self.perform_auto(0, 1, mode="suggest")
def test_inconsistent(self):
self.perform_auto(0, filter_type="check:inconsistent")
def test_overwrite(self):
self.perform_auto(overwrite="1")
def test_command(self):
call_command("auto_translate", "test", "test", "cs")
def test_command_add_error(self):
with self.assertRaises(CommandError):
call_command("auto_translate", "test", "test", "ia", add=True)
def test_command_mt(self):
call_command("auto_translate", "--mt", "weblate", "test", "test", "cs")
def test_command_mt_error(self):
with self.assertRaises(CommandError):
call_command("auto_translate", "--mt", "invalid", "test", "test", "ia")
with self.assertRaises(CommandError):
call_command(
"auto_translate", "--threshold", "invalid", "test", "test", "ia"
)
def test_command_add(self):
self.component.file_format = "po"
self.component.new_lang = "add"
self.component.new_base = "po/cs.po"
self.component.clean()
self.component.save()
call_command("auto_translate", "test", "test", "ia", add=True)
self.assertTrue(
self.component.translation_set.filter(language__code="ia").exists()
)
def test_command_different(self):
self.make_different()
call_command("auto_translate", "test", "test-2", "cs", source="test/test")
def test_command_errors(self):
with self.assertRaises(CommandError):
call_command("auto_translate", "test", "test", "cs", user="invalid")
with self.assertRaises(CommandError):
call_command("auto_translate", "test", "test", "cs", source="invalid")
with self.assertRaises(CommandError):
call_command("auto_translate", "test", "test", "cs", source="test/invalid")
with self.assertRaises(CommandError):
call_command("auto_translate", "test", "test", "xxx")
class AutoTranslationMtTest(ViewTestCase):
@classmethod
def _databases_support_transactions(cls):
# This is workaroud for MySQL as FULL TEXT index does not work
# well inside a transaction, so we avoid using transactions for
# tests. Otherwise we end up with no matches for the query.
# See https://dev.mysql.com/doc/refman/5.6/en/innodb-fulltext-index.html
if settings.DATABASES["default"]["ENGINE"] == "django.db.backends.mysql":
return False
return super()._databases_support_transactions()
def setUp(self):
super().setUp()
# Need extra power
self.user.is_superuser = True
self.user.save()
self.component3 = Component.objects.create(
name="Test 3",
slug="test-3",
project=self.project,
repo=self.git_repo_path,
push=self.git_repo_path,
vcs="git",
filemask="po/*.po",
template="",
file_format="po",
new_base="",
allow_translation_propagation=False,
)
self.update_fulltext_index()
def test_none(self):
"""Test for automatic translation with no content."""
url = reverse("auto_translation", kwargs=self.kw_translation)
response = self.client.post(url)
self.assertRedirects(response, self.translation_url)
def make_different(self):
self.edit_unit("Hello, world!\n", "Nazdar svete!\n")
def perform_auto(self, expected=1, **kwargs):
self.make_different()
params = {"project": "test", "lang": "cs", "component": "test-3"}
url = reverse("auto_translation", kwargs=params)
kwargs["auto_source"] = "mt"
if "filter_type" not in kwargs:
kwargs["filter_type"] = "todo"
if "mode" not in kwargs:
kwargs["mode"] = "translate"
response = self.client.post(url, kwargs, follow=True)
if expected == 1:
self.assertContains(
response, "Automatic translation completed, 1 string was updated."
)
else:
self.assertContains(
response, "Automatic translation completed, no strings were updated."
)
self.assertRedirects(response, reverse("translation", kwargs=params))
# Check we've translated something
translation = self.component3.translation_set.get(language_code="cs")
translation.invalidate_cache()
self.assertEqual(translation.stats.translated, expected)
def test_different(self):
"""Test for automatic translation with different content."""
self.perform_auto(engines=["weblate"], threshold=80)
def test_inconsistent(self):
self.perform_auto(
0, filter_type="check:inconsistent", engines=["weblate"], threshold=80
)
def test_overwrite(self):
self.perform_auto(overwrite="1", engines=["weblate"], threshold=80)
|
import filelock
import numpy as np
import os
from chainer.dataset import download
from chainercv.chainer_experimental.datasets.sliceable import GetterDataset
from chainercv import utils
root = 'pfnet/chainercv/online_products'
url = 'http://ftp.cs.stanford.edu/cs/cvgl/Stanford_Online_Products.zip'
online_products_super_label_names = (
'bicycle',
'cabinet',
'chair',
'coffee_maker',
'fan',
'kettle',
'lamp',
'mug',
'sofa',
'stapler',
'table',
'toaster'
)
def _get_online_products():
# To support ChainerMN, the target directory should be locked.
with filelock.FileLock(os.path.join(download.get_dataset_directory(
'pfnet/chainercv/.lock'), 'online_products.lock')):
data_root = download.get_dataset_directory(root)
base_path = os.path.join(data_root, 'Stanford_Online_Products')
if os.path.exists(base_path):
# skip downloading
return base_path
download_file_path = utils.cached_download(url)
ext = os.path.splitext(url)[1]
utils.extractall(download_file_path, data_root, ext)
return base_path
class OnlineProductsDataset(GetterDataset):
"""Dataset class for `Stanford Online Products Dataset`_.
.. _`Stanford Online Products Dataset`:
http://cvgl.stanford.edu/projects/lifted_struct
The :obj:`split` selects train and test split of the dataset as done in
[#]_. The train split contains the first 11318 classes and the test
split contains the remaining 11316 classes.
.. [#] Hyun Oh Song, Yu Xiang, Stefanie Jegelka, Silvio Savarese.
`Deep Metric Learning via Lifted Structured Feature Embedding\
<https://arxiv.org/abs/1511.06452>`_. arXiv 2015.
Args:
data_dir (string): Path to the root of the training data. If this is
:obj:`auto`, this class will automatically download data for you
under :obj:`$CHAINER_DATASET_ROOT/pfnet/chainercv/online_products`.
split ({'train', 'test'}): Select a split of the dataset.
This dataset returns the following data.
.. csv-table::
:header: name, shape, dtype, format
:obj:`img`, ":math:`(3, H, W)`", :obj:`float32`, \
"RGB, :math:`[0, 255]`"
:obj:`label`, scalar, :obj:`int32`, ":math:`[0, \#class - 1]`"
:obj:`super_label`, scalar, :obj:`int32`, \
":math:`[0, \#super\_class - 1]`"
"""
def __init__(self, data_dir='auto', split='train'):
super(OnlineProductsDataset, self).__init__()
if data_dir == 'auto':
data_dir = _get_online_products()
self.data_dir = data_dir
self.paths = []
# for split in ['train', 'test']:
id_list_file = os.path.join(data_dir, 'Ebay_{}.txt'.format(split))
ids_tmp = [id_.strip().split() for id_ in open(id_list_file)][1:]
# ids start from 0
self.class_ids = np.array(
[int(id_[1]) - 1 for id_ in ids_tmp], dtype=np.int32)
self.super_class_ids = np.array(
[int(id_[2]) - 1 for id_ in ids_tmp], dtype=np.int32)
self.paths += [os.path.join(data_dir, id_[3]) for id_ in ids_tmp]
self.add_getter('img', self._get_image)
self.add_getter('label', self._get_label)
self.add_getter('super_label', self._get_super_label)
def __len__(self):
return len(self.paths)
def _get_image(self, i):
return utils.read_image(self.paths[i], color=True)
def _get_label(self, i):
return self.class_ids[i]
def _get_super_label(self, i):
return self.super_class_ids[i]
|
import os.path as op
import numpy as np
import mne
from mne.datasets import sample
print(__doc__)
# In this example, raw data will be simulated for the sample subject, so its
# information needs to be loaded. This step will download the data if it not
# already on your machine. Subjects directory is also set so it doesn't need
# to be given to functions.
data_path = sample.data_path()
subjects_dir = op.join(data_path, 'subjects')
subject = 'sample'
meg_path = op.join(data_path, 'MEG', subject)
# First, we get an info structure from the sample subject.
fname_info = op.join(meg_path, 'sample_audvis_raw.fif')
info = mne.io.read_info(fname_info)
tstep = 1 / info['sfreq']
# To simulate sources, we also need a source space. It can be obtained from the
# forward solution of the sample subject.
fwd_fname = op.join(meg_path, 'sample_audvis-meg-eeg-oct-6-fwd.fif')
fwd = mne.read_forward_solution(fwd_fname)
src = fwd['src']
# To simulate raw data, we need to define when the activity occurs using events
# matrix and specify the IDs of each event.
# Noise covariance matrix also needs to be defined.
# Here, both are loaded from the sample dataset, but they can also be specified
# by the user.
fname_event = op.join(meg_path, 'sample_audvis_raw-eve.fif')
fname_cov = op.join(meg_path, 'sample_audvis-cov.fif')
events = mne.read_events(fname_event)
noise_cov = mne.read_cov(fname_cov)
# Standard sample event IDs. These values will correspond to the third column
# in the events matrix.
event_id = {'auditory/left': 1, 'auditory/right': 2, 'visual/left': 3,
'visual/right': 4, 'smiley': 5, 'button': 32}
# Take only a few events for speed
events = events[:80]
###############################################################################
# In order to simulate source time courses, labels of desired active regions
# need to be specified for each of the 4 simulation conditions.
# Make a dictionary that maps conditions to activation strengths within
# aparc.a2009s [1]_ labels. In the aparc.a2009s parcellation:
#
# - 'G_temp_sup-G_T_transv' is the label for primary auditory area
# - 'S_calcarine' is the label for primary visual area
#
# In each of the 4 conditions, only the primary area is activated. This means
# that during the activations of auditory areas, there are no activations in
# visual areas and vice versa.
# Moreover, for each condition, contralateral region is more active (here, 2
# times more) than the ipsilateral.
activations = {
'auditory/left':
[('G_temp_sup-G_T_transv-lh', 30), # label, activation (nAm)
('G_temp_sup-G_T_transv-rh', 60)],
'auditory/right':
[('G_temp_sup-G_T_transv-lh', 60),
('G_temp_sup-G_T_transv-rh', 30)],
'visual/left':
[('S_calcarine-lh', 30),
('S_calcarine-rh', 60)],
'visual/right':
[('S_calcarine-lh', 60),
('S_calcarine-rh', 30)],
}
annot = 'aparc.a2009s'
# Load the 4 necessary label names.
label_names = sorted(set(activation[0]
for activation_list in activations.values()
for activation in activation_list))
region_names = list(activations.keys())
###############################################################################
# Create simulated source activity
# --------------------------------
#
# Generate source time courses for each region. In this example, we want to
# simulate source activity for a single condition at a time. Therefore, each
# evoked response will be parametrized by latency and duration.
def data_fun(times, latency, duration):
"""Function to generate source time courses for evoked responses,
parametrized by latency and duration."""
f = 15 # oscillating frequency, beta band [Hz]
sigma = 0.375 * duration
sinusoid = np.sin(2 * np.pi * f * (times - latency))
gf = np.exp(- (times - latency - (sigma / 4.) * rng.rand(1)) ** 2 /
(2 * (sigma ** 2)))
return 1e-9 * sinusoid * gf
###############################################################################
# Here, :class:`~mne.simulation.SourceSimulator` is used, which allows to
# specify where (label), what (source_time_series), and when (events) event
# type will occur.
#
# We will add data for 4 areas, each of which contains 2 labels. Since add_data
# method accepts 1 label per call, it will be called 2 times per area.
#
# Evoked responses are generated such that the main component peaks at 100ms
# with a duration of around 30ms, which first appears in the contralateral
# cortex. This is followed by a response in the ipsilateral cortex with a peak
# about 15ms after. The amplitude of the activations will be 2 times higher in
# the contralateral region, as explained before.
#
# When the activity occurs is defined using events. In this case, they are
# taken from the original raw data. The first column is the sample of the
# event, the second is not used. The third one is the event id, which is
# different for each of the 4 areas.
times = np.arange(150, dtype=np.float64) / info['sfreq']
duration = 0.03
rng = np.random.RandomState(7)
source_simulator = mne.simulation.SourceSimulator(src, tstep=tstep)
for region_id, region_name in enumerate(region_names, 1):
events_tmp = events[np.where(events[:, 2] == region_id)[0], :]
for i in range(2):
label_name = activations[region_name][i][0]
label_tmp = mne.read_labels_from_annot(subject, annot,
subjects_dir=subjects_dir,
regexp=label_name,
verbose=False)
label_tmp = label_tmp[0]
amplitude_tmp = activations[region_name][i][1]
if region_name.split('/')[1][0] == label_tmp.hemi[0]:
latency_tmp = 0.115
else:
latency_tmp = 0.1
wf_tmp = data_fun(times, latency_tmp, duration)
source_simulator.add_data(label_tmp,
amplitude_tmp * wf_tmp,
events_tmp)
# To obtain a SourceEstimate object, we need to use `get_stc()` method of
# SourceSimulator class.
stc_data = source_simulator.get_stc()
###############################################################################
# Simulate raw data
# -----------------
#
# Project the source time series to sensor space. Three types of noise will be
# added to the simulated raw data:
#
# - multivariate Gaussian noise obtained from the noise covariance from the
# sample data
# - blink (EOG) noise
# - ECG noise
#
# The :class:`~mne.simulation.SourceSimulator` can be given directly to the
# :func:`~mne.simulation.simulate_raw` function.
raw_sim = mne.simulation.simulate_raw(info, source_simulator, forward=fwd)
raw_sim.set_eeg_reference(projection=True)
mne.simulation.add_noise(raw_sim, cov=noise_cov, random_state=0)
mne.simulation.add_eog(raw_sim, random_state=0)
mne.simulation.add_ecg(raw_sim, random_state=0)
# Plot original and simulated raw data.
raw_sim.plot(title='Simulated raw data')
###############################################################################
# Extract epochs and compute evoked responsses
# --------------------------------------------
#
epochs = mne.Epochs(raw_sim, events, event_id, tmin=-0.2, tmax=0.3,
baseline=(None, 0))
evoked_aud_left = epochs['auditory/left'].average()
evoked_vis_right = epochs['visual/right'].average()
# Visualize the evoked data
evoked_aud_left.plot(spatial_colors=True)
evoked_vis_right.plot(spatial_colors=True)
###############################################################################
# Reconstruct simulated source time courses using dSPM inverse operator
# ---------------------------------------------------------------------
#
# Here, source time courses for auditory and visual areas are reconstructed
# separately and their difference is shown. This was done merely for better
# visual representation of source reconstruction.
# As expected, when high activations appear in primary auditory areas, primary
# visual areas will have low activations and vice versa.
method, lambda2 = 'dSPM', 1. / 9.
inv = mne.minimum_norm.make_inverse_operator(epochs.info, fwd, noise_cov)
stc_aud = mne.minimum_norm.apply_inverse(
evoked_aud_left, inv, lambda2, method)
stc_vis = mne.minimum_norm.apply_inverse(
evoked_vis_right, inv, lambda2, method)
stc_diff = stc_aud - stc_vis
brain = stc_diff.plot(subjects_dir=subjects_dir, initial_time=0.1,
hemi='split', views=['lat', 'med'])
###############################################################################
# References
# ----------
# .. [1] Destrieux C, Fischl B, Dale A, Halgren E (2010). Automatic
# parcellation of human cortical gyri and sulci using standard
# anatomical nomenclature, vol. 53(1), 1-15, NeuroImage.
|
import xarray as xr
@xr.register_dataset_accessor("geo")
class GeoAccessor:
def __init__(self, xarray_obj):
self._obj = xarray_obj
self._center = None
@property
def center(self):
"""Return the geographic center point of this dataset."""
if self._center is None:
# we can use a cache on our accessor objects, because accessors
# themselves are cached on instances that access them.
lon = self._obj.latitude
lat = self._obj.longitude
self._center = (float(lon.mean()), float(lat.mean()))
return self._center
def plot(self):
"""Plot data on a map."""
return "plotting!"
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import matplotlib.pyplot as plt
from math import sqrt
import numpy as np
from filterpy.kalman import ExtendedKalmanFilter
from numpy import array, eye, asarray
from filterpy.common import Saver
from filterpy.examples import RadarSim
from pytest import approx
from scipy.spatial.distance import mahalanobis as scipy_mahalanobis
DO_PLOT = False
def test_ekf():
def H_of(x):
""" compute Jacobian of H matrix for state x """
horiz_dist = x[0]
altitude = x[2]
denom = sqrt(horiz_dist**2 + altitude**2)
return array([[horiz_dist/denom, 0., altitude/denom]])
def hx(x):
""" takes a state variable and returns the measurement that would
correspond to that state.
"""
return sqrt(x[0]**2 + x[2]**2)
dt = 0.05
proccess_error = 0.05
rk = ExtendedKalmanFilter(dim_x=3, dim_z=1)
rk.F = eye(3) + array ([[0, 1, 0],
[0, 0, 0],
[0, 0, 0]])*dt
def fx(x, dt):
return np.dot(rk.F, x)
rk.x = array([-10., 90., 1100.])
rk.R *= 10
rk.Q = array([[0, 0, 0],
[0, 1, 0],
[0, 0, 1]]) * 0.001
rk.P *= 50
rs = []
xs = []
radar = RadarSim(dt)
ps = []
pos = []
s = Saver(rk)
for i in range(int(20/dt)):
z = radar.get_range(proccess_error)
pos.append(radar.pos)
rk.update(asarray([z]), H_of, hx, R=hx(rk.x)*proccess_error)
ps.append(rk.P)
rk.predict()
xs.append(rk.x)
rs.append(z)
s.save()
# test mahalanobis
a = np.zeros(rk.y.shape)
maha = scipy_mahalanobis(a, rk.y, rk.SI)
assert rk.mahalanobis == approx(maha)
s.to_array()
xs = asarray(xs)
ps = asarray(ps)
rs = asarray(rs)
p_pos = ps[:, 0, 0]
p_vel = ps[:, 1, 1]
p_alt = ps[:, 2, 2]
pos = asarray(pos)
if DO_PLOT:
plt.subplot(311)
plt.plot(xs[:, 0])
plt.ylabel('position')
plt.subplot(312)
plt.plot(xs[:, 1])
plt.ylabel('velocity')
plt.subplot(313)
#plt.plot(xs[:,2])
#plt.ylabel('altitude')
plt.plot(p_pos)
plt.plot(-p_pos)
plt.plot(xs[:, 0] - pos)
if __name__ == '__main__':
test_ekf()
|
import responses
from django.test import SimpleTestCase
from weblate.trans.tests.utils import get_test_file
from weblate.utils.version import (
PYPI,
download_version_info,
flush_version_cache,
get_latest_version,
get_version_info,
)
class VersionTest(SimpleTestCase):
def setUp(self):
super().setUp()
flush_version_cache()
@staticmethod
def mock_pypi():
with open(get_test_file("pypi.json")) as handle:
responses.add(responses.GET, PYPI, body=handle.read())
@responses.activate
def test_download(self):
self.mock_pypi()
data = download_version_info()
self.assertEqual(len(data), 47)
@responses.activate
def test_get(self):
self.mock_pypi()
data = get_version_info()
self.assertEqual(len(data), 47)
responses.replace(responses.GET, PYPI, body="")
data = get_version_info()
self.assertEqual(len(data), 47)
@responses.activate
def test_latest(self):
self.mock_pypi()
latest = get_latest_version()
self.assertEqual(latest.version, "3.10.3")
|
from homeassistant.components.met.const import DOMAIN
from homeassistant.const import CONF_ELEVATION, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME
from tests.async_mock import patch
from tests.common import MockConfigEntry
async def init_integration(hass) -> MockConfigEntry:
"""Set up the Met integration in Home Assistant."""
entry_data = {
CONF_NAME: "test",
CONF_LATITUDE: 0,
CONF_LONGITUDE: 0,
CONF_ELEVATION: 0,
}
entry = MockConfigEntry(domain=DOMAIN, data=entry_data)
with patch(
"homeassistant.components.met.metno.MetWeatherData.fetching_data",
return_value=True,
):
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
return entry
|
import logging
from typing import Any, Dict
from garminconnect import (
GarminConnectAuthenticationError,
GarminConnectConnectionError,
GarminConnectTooManyRequestsError,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ATTR_ATTRIBUTION, CONF_ID
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.typing import HomeAssistantType
from .alarm_util import calculate_next_active_alarms
from .const import ATTRIBUTION, DOMAIN, GARMIN_ENTITY_LIST
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(
hass: HomeAssistantType, entry: ConfigEntry, async_add_entities
) -> None:
"""Set up Garmin Connect sensor based on a config entry."""
garmin_data = hass.data[DOMAIN][entry.entry_id]
unique_id = entry.data[CONF_ID]
try:
await garmin_data.async_update()
except (
GarminConnectConnectionError,
GarminConnectAuthenticationError,
GarminConnectTooManyRequestsError,
) as err:
_LOGGER.error("Error occurred during Garmin Connect Client update: %s", err)
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unknown error occurred during Garmin Connect Client update")
entities = []
for (
sensor_type,
(name, unit, icon, device_class, enabled_by_default),
) in GARMIN_ENTITY_LIST.items():
_LOGGER.debug(
"Registering entity: %s, %s, %s, %s, %s, %s",
sensor_type,
name,
unit,
icon,
device_class,
enabled_by_default,
)
entities.append(
GarminConnectSensor(
garmin_data,
unique_id,
sensor_type,
name,
unit,
icon,
device_class,
enabled_by_default,
)
)
async_add_entities(entities, True)
class GarminConnectSensor(Entity):
"""Representation of a Garmin Connect Sensor."""
def __init__(
self,
data,
unique_id,
sensor_type,
name,
unit,
icon,
device_class,
enabled_default: bool = True,
):
"""Initialize."""
self._data = data
self._unique_id = unique_id
self._type = sensor_type
self._name = name
self._unit = unit
self._icon = icon
self._device_class = device_class
self._enabled_default = enabled_default
self._available = True
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def icon(self):
"""Return the icon to use in the frontend."""
return self._icon
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unique_id(self) -> str:
"""Return the unique ID for this sensor."""
return f"{self._unique_id}_{self._type}"
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return self._unit
@property
def device_state_attributes(self):
"""Return attributes for sensor."""
if not self._data.data:
return {}
attributes = {
"source": self._data.data["source"],
"last_synced": self._data.data["lastSyncTimestampGMT"],
ATTR_ATTRIBUTION: ATTRIBUTION,
}
if self._type == "nextAlarm":
attributes["next_alarms"] = calculate_next_active_alarms(
self._data.data[self._type]
)
return attributes
@property
def device_info(self) -> Dict[str, Any]:
"""Return device information."""
return {
"identifiers": {(DOMAIN, self._unique_id)},
"name": "Garmin Connect",
"manufacturer": "Garmin Connect",
}
@property
def entity_registry_enabled_default(self) -> bool:
"""Return if the entity should be enabled when first added to the entity registry."""
return self._enabled_default
@property
def available(self) -> bool:
"""Return True if entity is available."""
return self._available
@property
def device_class(self):
"""Return the device class of the sensor."""
return self._device_class
async def async_update(self):
"""Update the data from Garmin Connect."""
if not self.enabled:
return
await self._data.async_update()
data = self._data.data
if not data:
_LOGGER.error("Didn't receive data from Garmin Connect")
return
if data.get(self._type) is None:
_LOGGER.debug("Entity type %s not set in fetched data", self._type)
self._available = False
return
self._available = True
if "Duration" in self._type or "Seconds" in self._type:
self._state = data[self._type] // 60
elif "Mass" in self._type or self._type == "weight":
self._state = round((data[self._type] / 1000), 2)
elif (
self._type == "bodyFat" or self._type == "bodyWater" or self._type == "bmi"
):
self._state = round(data[self._type], 2)
elif self._type == "nextAlarm":
active_alarms = calculate_next_active_alarms(data[self._type])
if active_alarms:
self._state = active_alarms[0]
else:
self._available = False
else:
self._state = data[self._type]
_LOGGER.debug(
"Entity %s set to state %s %s", self._type, self._state, self._unit
)
|
import os
import click
import click_completion
import molecule
from molecule import command
from molecule.config import MOLECULE_DEBUG
click_completion.init()
LOCAL_CONFIG = os.path.expanduser('~/.config/molecule/config.yml')
ENV_FILE = '.env.yml'
@click.group()
@click.option(
'--debug/--no-debug',
default=MOLECULE_DEBUG,
help='Enable or disable debug mode. Default is disabled.')
@click.option(
'--base-config',
'-c',
default=LOCAL_CONFIG,
help=('Path to a base config. If provided Molecule will load '
"this config first, and deep merge each scenario's "
'molecule.yml on top. ({})').format(LOCAL_CONFIG))
@click.option(
'--env-file',
'-e',
default=ENV_FILE,
help=('The file to read variables from when rendering molecule.yml. '
'(.env.yml)'))
@click.version_option(version=molecule.__version__)
@click.pass_context
def main(ctx, debug, base_config, env_file): # pragma: no cover
"""
\b
_____ _ _
| |___| |___ ___ _ _| |___
| | | | . | | -_| _| | | | -_|
|_|_|_|___|_|___|___|___|_|___|
Molecule aids in the development and testing of Ansible roles.
Enable autocomplete issue:
eval "$(_MOLECULE_COMPLETE=source molecule)"
"""
ctx.obj = {}
ctx.obj['args'] = {}
ctx.obj['args']['debug'] = debug
ctx.obj['args']['base_config'] = base_config
ctx.obj['args']['env_file'] = env_file
main.add_command(command.cleanup.cleanup)
main.add_command(command.check.check)
main.add_command(command.converge.converge)
main.add_command(command.create.create)
main.add_command(command.dependency.dependency)
main.add_command(command.destroy.destroy)
main.add_command(command.idempotence.idempotence)
main.add_command(command.init.init)
main.add_command(command.lint.lint)
main.add_command(command.list.list)
main.add_command(command.login.login)
main.add_command(command.matrix.matrix)
main.add_command(command.prepare.prepare)
main.add_command(command.side_effect.side_effect)
main.add_command(command.syntax.syntax)
main.add_command(command.test.test)
main.add_command(command.verify.verify)
|
from test import unittest
from mock import patch
from diamond.metric import Metric
import urllib2
import configobj
import StringIO
import gzip
import contextlib
from diamond.handler.tsdb import TSDBHandler
@patch('diamond.handler.tsdb.urllib2.urlopen')
@patch('diamond.handler.tsdb.urllib2.Request')
class TestTSDBdHandler(unittest.TestCase):
def setUp(self):
self.url = 'http://127.0.0.1:4242/api/put'
def decompress(self, input):
infile = StringIO.StringIO()
infile.write(input)
with contextlib.closing(gzip.GzipFile(fileobj=infile, mode="r")) as f:
f.rewind()
out = f.read()
return out
def test_HTTPError(self, mock_urlopen, mock_request):
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
metric = Metric('servers.myhostname.cpu.cpu_count',
123, raw_value=123, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
header = {'Content-Type': 'application/json'}
exception = urllib2.HTTPError(url=self.url, code=404, msg="Error",
hdrs=header, fp=None)
handler.side_effect = exception
handler.process(metric)
def test_single_metric(self, mock_urlopen, mock_request):
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
metric = Metric('servers.myhostname.cpu.cpu_count',
123, raw_value=123, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
handler.process(metric)
body = ('[{"timestamp": 1234567, "metric": "cpu.cpu_count", "value": '
'123, "tags": {"hostname": "myhostname"}}]')
header = {'Content-Type': 'application/json'}
mock_urlopen.assert_called_with(self.url, body, header)
def test_compression(self, mock_urlopen, mock_request):
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
config['compression'] = 1
metric = Metric('servers.myhostname.cpu.cpu_count',
123, raw_value=123, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
handler.process(metric)
body = ('[{"timestamp": 1234567, "metric": "cpu.cpu_count", "value": '
'123, "tags": {"hostname": "myhostname"}}]')
passed_headers = mock_urlopen.call_args[0][2]
passed_body = mock_urlopen.call_args[0][1]
assert passed_headers['Content-Encoding'] == 'gzip'
assert passed_headers['Content-Type'] == 'application/json'
assert self.decompress(passed_body) == body
def test_user_password(self, mock_urlopen, mock_request):
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
config['user'] = 'John Doe'
config['password'] = '123456789'
metric = Metric('servers.myhostname.cpu.cpu_count',
123, raw_value=123, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
handler.process(metric)
body = ('[{"timestamp": 1234567, "metric": "cpu.cpu_count", "value": '
'123, "tags": {"hostname": "myhostname"}}]')
header = {'Content-Type': 'application/json',
'Authorization': 'Basic Sm9obiBEb2U6MTIzNDU2Nzg5'}
mock_urlopen.assert_called_with(self.url, body, header)
def test_batch(self, mock_urlopen, mock_request):
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
config['batch'] = 2
metric = Metric('servers.myhostname.cpu.cpu_count',
123, raw_value=123, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
metric2 = Metric('servers.myhostname.cpu.cpu_time',
123, raw_value=456, timestamp=5678910,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
handler.process(metric)
handler.process(metric2)
body = ('[{"timestamp": 1234567, "metric": "cpu.cpu_count", "value": '
'123, "tags": {"hostname": "myhostname"}}, {"timestamp": 567891'
'0, "metric": "cpu.cpu_time", "value": 123, "tags": {"hostname"'
': "myhostname"}}]')
header = {'Content-Type': 'application/json'}
mock_urlopen.assert_called_with(self.url, body, header)
def test_tags(self, mock_urlopen, mock_request):
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
config['tags'] = 'tag1=tagv1 tag2=tagv2'
metric = Metric('servers.myhostname.cpu.cpu_count',
123, raw_value=123, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
handler.process(metric)
body = ('[{"timestamp": 1234567, "metric": "cpu.cpu_count", "value": '
'123, "tags": {"hostname": "myhostname", "tag1": "tagv1", '
'"tag2": "tagv2"}}]')
header = {'Content-Type': 'application/json'}
mock_urlopen.assert_called_with(self.url, body, header)
def test_prefix(self, mock_urlopen, mock_request):
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
config['prefix'] = 'diamond'
metric = Metric('servers.myhostname.cpu.cpu_count',
123, raw_value=123, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
handler.process(metric)
body = ('[{"timestamp": 1234567, "metric": "diamond.cpu.cpu_count", '
'"value": 123, "tags": {"hostname": "myhostname"}}]')
header = {'Content-Type': 'application/json'}
mock_urlopen.assert_called_with(self.url, body, header)
def test_cpu_metrics_taghandling_default(self, mock_urlopen, mock_request):
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
config['tags'] = ['myFirstTag=myValue']
metric = Metric('servers.myhostname.cpu.cpu0.user',
123, raw_value=123, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
handler.process(metric)
body = ('[{"timestamp": 1234567, "metric": "cpu.user", "value": '
'123, "tags": {"cpuId": "cpu0", "myFirstTag": "myValue", '
'"hostname": "myhostname"}}]')
header = {'Content-Type': 'application/json'}
mock_urlopen.assert_called_with(self.url, body, header)
def test_cpu_metrics_taghandling_0(self, mock_urlopen, mock_request):
"""
deactivate
"""
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
config['tags'] = ['myFirstTag=myValue']
config['cleanMetrics'] = False
metric = Metric('servers.myhostname.cpu.cpu0.user',
123, raw_value=123, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
handler.process(metric)
body = ('[{"timestamp": 1234567, "metric": "cpu.cpu0.user", "value": '
'123, "tags": {"myFirstTag": "myValue", "hostname": '
'"myhostname"}}]')
header = {'Content-Type': 'application/json'}
mock_urlopen.assert_called_with(self.url, body, header)
def test_cpu_metrics_taghandling_default2(self, mock_urlopen, mock_request):
"""
aggregate default
"""
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
config['tags'] = ['myFirstTag=myValue']
metric = Metric('servers.myhostname.cpu.total.user',
123, raw_value=123, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
handler.process(metric)
assert not mock_urlopen.called, "should not process"
def test_cpu_metrics_taghandling_1(self, mock_urlopen, mock_request):
"""
aggregate deactivate
"""
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
config['tags'] = ['myFirstTag=myValue']
config['cleanMetrics'] = False
metric = Metric('servers.myhostname.cpu.total.user',
123, raw_value=123, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
handler.process(metric)
body = ('[{"timestamp": 1234567, "metric": "cpu.total.user", "value": '
'123, "tags": {"myFirstTag": "myValue", "hostname": '
'"myhostname"}}]')
header = {'Content-Type': 'application/json'}
mock_urlopen.assert_called_with(self.url, body, header)
def test_cpu_metrics_taghandling_2(self, mock_urlopen, mock_request):
"""
aggregate deactivate
"""
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
config['tags'] = ['myFirstTag=myValue']
config['cleanMetrics'] = True
config['skipAggregates'] = False
metric = Metric('servers.myhostname.cpu.total.user',
123, raw_value=123, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
handler.process(metric)
body = ('[{"timestamp": 1234567, "metric": "cpu.user", "value": '
'123, "tags": {"cpuId": "total", "myFirstTag": "myValue", '
'"hostname": "myhostname"}}]')
header = {'Content-Type': 'application/json'}
mock_urlopen.assert_called_with(self.url, body, header)
def test_haproxy_metrics_default(self, mock_urlopen, mock_request):
"""
taghandling default
"""
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
config['tags'] = ['myFirstTag=myValue']
metric = Metric('servers.myhostname.haproxy.SOME-BACKEND.SOME-SERVER.'
'bin',
123, raw_value=123, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
handler.process(metric)
body = ('[{"timestamp": 1234567, "metric": "haproxy.bin",'
' "value": 123, "tags": {"backend": "SOME-BACKEND",'
' "myFirstTag": "myValue", "hostname": "myhostname", "server": '
'"SOME-SERVER"}}]')
header = {'Content-Type': 'application/json'}
mock_urlopen.assert_called_with(self.url, body, header)
def test_haproxy_metrics(self, mock_urlopen, mock_request):
"""
taghandling deactivate
"""
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
config['tags'] = ['myFirstTag=myValue']
config['cleanMetrics'] = False
metric = Metric('servers.myhostname.haproxy.SOME-BACKEND.SOME-SERVER.'
'bin',
123, raw_value=123, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
handler.process(metric)
body = ('[{"timestamp": 1234567, "metric": "haproxy.SOME-BACKEND.SOME-'
'SERVER.bin", "value": 123, "tags": {"myFirstTag": "myValue", '
'"hostname": "myhostname"}}]')
header = {'Content-Type': 'application/json'}
mock_urlopen.assert_called_with(self.url, body, header)
def test_diskspace_metrics_default(self, mock_urlopen, mock_request):
"""
taghandling default
"""
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
config['tags'] = ['myFirstTag=myValue']
metric = Metric('servers.myhostname.diskspace.MOUNT_POINT.byte_percent'
'free',
80, raw_value=80, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
handler.process(metric)
body = ('[{"timestamp": 1234567, "metric": "diskspace.'
'byte_percentfree", "value": 80, "tags": {"mountpoint": '
'"MOUNT_POINT", "myFirstTag": "myValue", "hostname": '
'"myhostname"}}]')
header = {'Content-Type': 'application/json'}
mock_urlopen.assert_called_with(self.url, body, header)
def test_diskspace_metrics(self, mock_urlopen, mock_request):
"""
taghandling deactivate
"""
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
config['tags'] = ['myFirstTag=myValue']
config['cleanMetrics'] = False
metric = Metric('servers.myhostname.diskspace.MOUNT_POINT.byte_'
'percentfree',
80, raw_value=80, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
handler.process(metric)
body = ('[{"timestamp": 1234567, "metric": "diskspace.MOUNT_POINT'
'.byte_percentfree", "value": 80, "tags": {"myFirstTag": '
'"myValue", "hostname": "myhostname"}}]')
header = {'Content-Type': 'application/json'}
mock_urlopen.assert_called_with(self.url, body, header)
def test_iostat_metrics_default(self, mock_urlopen, mock_request):
"""
taghandling default
"""
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
config['tags'] = ['myFirstTag=myValue']
metric = Metric('servers.myhostname.iostat.DEV.io_in_progress',
80, raw_value=80, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
handler.process(metric)
body = ('[{"timestamp": 1234567, "metric": "iostat.io_in_progress", '
'"value": 80, "tags": {"device": "DEV", "myFirstTag": '
'"myValue", "hostname": "myhostname"}}]')
header = {'Content-Type': 'application/json'}
mock_urlopen.assert_called_with(self.url, body, header)
def test_iostat_metrics(self, mock_urlopen, mock_request):
"""
taghandling deactivate
"""
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
config['tags'] = ['myFirstTag=myValue']
config['cleanMetrics'] = False
metric = Metric('servers.myhostname.iostat.DEV.io_in_progress',
80, raw_value=80, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
handler.process(metric)
body = ('[{"timestamp": 1234567, "metric": "iostat.DEV.io_in_progress"'
', "value": 80, "tags": {"myFirstTag": "myValue", "hostname": '
'"myhostname"}}]')
header = {'Content-Type': 'application/json'}
mock_urlopen.assert_called_with(self.url, body, header)
def test_network_metrics_default(self, mock_urlopen, mock_request):
"""
taghandling default
"""
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
config['tags'] = ['myFirstTag=myValue']
metric = Metric('servers.myhostname.network.IF.rx_packets',
80, raw_value=80, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
handler.process(metric)
body = ('[{"timestamp": 1234567, "metric": "network.rx_packets", '
'"value": 80, "tags": {"interface": "IF", "myFirstTag": '
'"myValue", "hostname": "myhostname"}}]')
header = {'Content-Type': 'application/json'}
mock_urlopen.assert_called_with(self.url, body, header)
def test_network_metrics(self, mock_urlopen, mock_request):
"""
taghandling deactivate
"""
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
config['tags'] = ['myFirstTag=myValue']
config['cleanMetrics'] = False
metric = Metric('servers.myhostname.network.IF.rx_packets',
80, raw_value=80, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
handler.process(metric)
body = ('[{"timestamp": 1234567, "metric": "network.IF.rx_packets", '
'"value": 80, "tags": {"myFirstTag": "myValue", "hostname": '
'"myhostname"}}]')
header = {'Content-Type': 'application/json'}
mock_urlopen.assert_called_with(self.url, body, header)
|
from django.core.exceptions import ImproperlyConfigured
class CallableQuerysetMixin(object):
"""
Mixin for handling a callable queryset,
which will force the update of the queryset.
Related to issue http://code.djangoproject.com/ticket/8378
"""
queryset = None
def get_queryset(self):
"""
Check that the queryset is defined and call it.
"""
if self.queryset is None:
raise ImproperlyConfigured(
"'%s' must define 'queryset'" % self.__class__.__name__)
return self.queryset()
|
from kalliope.core.Utils import Utils
class APIResponse(object):
def __init__(self):
self.user_order = None
self.list_processed_matched_synapse = list()
self.status = None
def __str__(self):
return str(self.serialize())
def serialize(self):
"""
This method allows to serialize in a proper way this object
:return: A dict of name and parameters
:rtype: Dict
"""
self.user_order = Utils.encode_text_utf8(self.user_order)
return {
'user_order': self.user_order,
'matched_synapses': [e.serialize() for e in self.list_processed_matched_synapse],
'status': self.status
}
|
from formtools.wizard.views import normalize_name
from django.contrib.auth import get_user_model
from django.core.exceptions import ValidationError
from django.forms import widgets
from django.utils.encoding import force_str
from django.utils.functional import cached_property
from django.utils.safestring import mark_safe
from django.utils.translation import gettext_lazy as _
from cms.utils.helpers import classproperty
from djng.forms import fields, NgModelFormMixin, NgFormValidationMixin
from djng.styling.bootstrap3.forms import Bootstrap3Form, Bootstrap3ModelForm
class DialogFormMixin(NgModelFormMixin, NgFormValidationMixin):
required_css_class = 'djng-field-required'
def __init__(self, *args, **kwargs):
kwargs.pop('cart', None) # cart object must be removed, otherwise underlying methods complain
auto_name = self.form_name ## .replace('_form', '')
kwargs.setdefault('auto_id', '{}-%s'.format(auto_name))
super().__init__(*args, **kwargs)
@classproperty
def form_name(cls):
return normalize_name(cls.__name__)
def clean(self):
cleaned_data = dict(super().clean())
cleaned_data.pop('plugin_id', None)
if cleaned_data.pop('plugin_order', None) is None:
msg = "Field 'plugin_order' is a hidden but required field in each form inheriting from DialogFormMixin"
raise ValidationError(msg)
return cleaned_data
def as_text(self):
"""
Dialog Forms rendered as summary just display their values instead of input fields.
This is useful to render a summary of a previously filled out form.
"""
try:
return mark_safe(self.instance.as_text())
except (AttributeError, TypeError):
output = []
for name in self.fields.keys():
bound_field = self[name]
value = bound_field.value()
if bound_field.is_hidden:
continue
if isinstance(value, (list, tuple)):
line = []
cast_to = type(tuple(bound_field.field.choices)[0][0])
for v in value:
try:
line.append(dict(bound_field.field.choices)[cast_to(v)])
except (AttributeError, KeyError):
pass
output.append(force_str(', '.join(line)))
elif value:
try:
value = dict(bound_field.field.choices)[value]
except (AttributeError, KeyError):
pass
output.append(force_str(value))
return mark_safe('\n'.join(output))
def get_response_data(self):
"""
Hook to respond with an updated version of the form data. This response then shall
override the forms content.
"""
class DialogForm(DialogFormMixin, Bootstrap3Form):
"""
Base class for all dialog forms used with a DialogFormPlugin.
"""
label_css_classes = 'control-label font-weight-bold'
plugin_id = fields.CharField(
widget=widgets.HiddenInput,
required=False,
)
plugin_order = fields.CharField(
widget=widgets.HiddenInput,
)
class DialogModelForm(DialogFormMixin, Bootstrap3ModelForm):
"""
Base class for all dialog model forms used with a DialogFormPlugin.
"""
plugin_id = fields.CharField(
widget=widgets.HiddenInput,
required=False,
)
plugin_order = fields.CharField(widget=widgets.HiddenInput)
@cached_property
def field_css_classes(self):
css_classes = {'*': getattr(Bootstrap3ModelForm, 'field_css_classes')}
for name, field in self.fields.items():
if not field.widget.is_hidden:
css_classes[name] = [css_classes['*']]
css_classes[name].append('{}-{}'.format(self.scope_prefix, name))
return css_classes
class UniqueEmailValidationMixin:
"""
A mixin added to forms which have to validate for the uniqueness of email addresses.
"""
def clean_email(self):
if not self.cleaned_data['email']:
raise ValidationError(_("Please provide a valid e-mail address"))
# check for uniqueness of email address
if get_user_model().objects.filter(is_active=True, email=self.cleaned_data['email']).exists():
msg = _("A customer with the e-mail address '{email}' already exists.\n"
"If you have used this address previously, try to reset the password.")
raise ValidationError(msg.format(**self.cleaned_data))
return self.cleaned_data['email']
|
from homeassistant.helpers.entity import ToggleEntity
from . import (
ATTR_DISCOVER_CONFIG,
ATTR_DISCOVER_DEVICES,
DATA_TELLSTICK,
DEFAULT_SIGNAL_REPETITIONS,
TellstickDevice,
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up Tellstick switches."""
if discovery_info is None or discovery_info[ATTR_DISCOVER_DEVICES] is None:
return
# Allow platform level override, fallback to module config
signal_repetitions = discovery_info.get(
ATTR_DISCOVER_CONFIG, DEFAULT_SIGNAL_REPETITIONS
)
add_entities(
[
TellstickSwitch(hass.data[DATA_TELLSTICK][tellcore_id], signal_repetitions)
for tellcore_id in discovery_info[ATTR_DISCOVER_DEVICES]
],
True,
)
class TellstickSwitch(TellstickDevice, ToggleEntity):
"""Representation of a Tellstick switch."""
def _parse_ha_data(self, kwargs):
"""Turn the value from HA into something useful."""
def _parse_tellcore_data(self, tellcore_data):
"""Turn the value received from tellcore into something useful."""
def _update_model(self, new_state, data):
"""Update the device entity state to match the arguments."""
self._state = new_state
def _send_device_command(self, requested_state, requested_data):
"""Let tellcore update the actual device to the requested state."""
if requested_state:
self._tellcore_device.turn_on()
else:
self._tellcore_device.turn_off()
@property
def force_update(self) -> bool:
"""Will trigger anytime the state property is updated."""
return True
|
from pydeconz.sensor import CarbonMonoxide, Fire, OpenClose, Presence, Vibration, Water
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_GAS,
DEVICE_CLASS_MOISTURE,
DEVICE_CLASS_MOTION,
DEVICE_CLASS_OPENING,
DEVICE_CLASS_SMOKE,
DEVICE_CLASS_VIBRATION,
DOMAIN,
BinarySensorEntity,
)
from homeassistant.const import ATTR_TEMPERATURE
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .const import ATTR_DARK, ATTR_ON, NEW_SENSOR
from .deconz_device import DeconzDevice
from .gateway import get_gateway_from_config_entry
ATTR_ORIENTATION = "orientation"
ATTR_TILTANGLE = "tiltangle"
ATTR_VIBRATIONSTRENGTH = "vibrationstrength"
DEVICE_CLASS = {
CarbonMonoxide: DEVICE_CLASS_GAS,
Fire: DEVICE_CLASS_SMOKE,
OpenClose: DEVICE_CLASS_OPENING,
Presence: DEVICE_CLASS_MOTION,
Vibration: DEVICE_CLASS_VIBRATION,
Water: DEVICE_CLASS_MOISTURE,
}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the deCONZ binary sensor."""
gateway = get_gateway_from_config_entry(hass, config_entry)
gateway.entities[DOMAIN] = set()
@callback
def async_add_sensor(sensors):
"""Add binary sensor from deCONZ."""
entities = []
for sensor in sensors:
if (
sensor.BINARY
and sensor.uniqueid not in gateway.entities[DOMAIN]
and (
gateway.option_allow_clip_sensor
or not sensor.type.startswith("CLIP")
)
):
entities.append(DeconzBinarySensor(sensor, gateway))
if entities:
async_add_entities(entities)
gateway.listeners.append(
async_dispatcher_connect(
hass, gateway.async_signal_new_device(NEW_SENSOR), async_add_sensor
)
)
async_add_sensor(
[gateway.api.sensors[key] for key in sorted(gateway.api.sensors, key=int)]
)
class DeconzBinarySensor(DeconzDevice, BinarySensorEntity):
"""Representation of a deCONZ binary sensor."""
TYPE = DOMAIN
@callback
def async_update_callback(self, force_update=False):
"""Update the sensor's state."""
keys = {"on", "reachable", "state"}
if force_update or self._device.changed_keys.intersection(keys):
super().async_update_callback(force_update=force_update)
@property
def is_on(self):
"""Return true if sensor is on."""
return self._device.is_tripped
@property
def device_class(self):
"""Return the class of the sensor."""
return DEVICE_CLASS.get(type(self._device))
@property
def device_state_attributes(self):
"""Return the state attributes of the sensor."""
attr = {}
if self._device.on is not None:
attr[ATTR_ON] = self._device.on
if self._device.secondary_temperature is not None:
attr[ATTR_TEMPERATURE] = self._device.secondary_temperature
if self._device.type in Presence.ZHATYPE:
if self._device.dark is not None:
attr[ATTR_DARK] = self._device.dark
elif self._device.type in Vibration.ZHATYPE:
attr[ATTR_ORIENTATION] = self._device.orientation
attr[ATTR_TILTANGLE] = self._device.tiltangle
attr[ATTR_VIBRATIONSTRENGTH] = self._device.vibrationstrength
return attr
|
import string
import pytest
import hypothesis
import hypothesis.strategies as hst
from PyQt5.QtCore import QUrl
from qutebrowser.utils import urlmatch
@pytest.mark.parametrize('pattern, error', [
### Chromium: kMissingSchemeSeparator
## TEST(ExtensionURLPatternTest, ParseInvalid)
# ("http", "No scheme given"),
("http:", "Invalid port: Port is empty"),
("http:/", "Invalid port: Port is empty"),
("about://", "Pattern without path"),
("http:/bar", "Invalid port: Port is empty"),
### Chromium: kEmptyHost
## TEST(ExtensionURLPatternTest, ParseInvalid)
("http://", "Pattern without host"),
("http:///", "Pattern without host"),
("http://:1234/", "Pattern without host"),
("http://*./", "Pattern without host"),
## TEST(ExtensionURLPatternTest, IPv6Patterns)
("http://[]:8888/*", "Pattern without host"),
### Chromium: kEmptyPath
## TEST(ExtensionURLPatternTest, ParseInvalid)
# We deviate from Chromium and allow this for ease of use
# ("http://bar", "..."),
### Chromium: kInvalidHost
## TEST(ExtensionURLPatternTest, ParseInvalid)
("http://\0www/", "May not contain NUL byte"),
## TEST(ExtensionURLPatternTest, IPv6Patterns)
# No closing bracket (`]`).
("http://[2607:f8b0:4005:805::200e/*", "Invalid IPv6 URL"),
# Two closing brackets (`]]`).
pytest.param("http://[2607:f8b0:4005:805::200e]]/*", "Invalid IPv6 URL", marks=pytest.mark.xfail(reason="https://bugs.python.org/issue34360")),
# Two open brackets (`[[`).
("http://[[2607:f8b0:4005:805::200e]/*", r"""Expected '\]' to match '\[' in hostname; source was "\[2607:f8b0:4005:805::200e"; host = """""),
# Too few colons in the last chunk.
("http://[2607:f8b0:4005:805:200e]/*", 'Invalid IPv6 address; source was "2607:f8b0:4005:805:200e"; host = ""'),
# Non-hex piece.
("http://[2607:f8b0:4005:805:200e:12:bogus]/*", 'Invalid IPv6 address; source was "2607:f8b0:4005:805:200e:12:bogus"; host = ""'),
### Chromium: kInvalidHostWildcard
## TEST(ExtensionURLPatternTest, ParseInvalid)
("http://*foo/bar", "Invalid host wildcard"),
("http://foo.*.bar/baz", "Invalid host wildcard"),
("http://fo.*.ba:123/baz", "Invalid host wildcard"),
("http://foo.*/bar", "Invalid host wildcard"),
### Chromium: kInvalidPort
## TEST(ExtensionURLPatternTest, Ports)
("http://foo:/", "Invalid port: Port is empty"),
("http://*.foo:/", "Invalid port: Port is empty"),
("http://foo:com/", "Invalid port: .* 'com'"),
("http://foo:123456/", "Invalid port: Port out of range 0-65535"),
("http://foo:80:80/monkey", "Invalid port: .* '80:80'"),
("chrome://foo:1234/bar", "Ports are unsupported with chrome scheme"),
# No port specified, but port separator.
("http://[2607:f8b0:4005:805::200e]:/*", "Invalid port: Port is empty"),
### Additional tests
("http://[", "Invalid IPv6 URL"),
("http://[fc2e::bb88::edac]", 'Invalid IPv6 address; source was "fc2e::bb88::edac"; host = ""'),
("http://[fc2e:0e35:bb88::edac:fc2e:0e35:bb88:edac]", 'Invalid IPv6 address; source was "fc2e:0e35:bb88::edac:fc2e:0e35:bb88:edac"; host = ""'),
("http://[fc2e:0e35:bb88:af:edac:fc2e:0e35:bb88:edac]", 'Invalid IPv6 address; source was "fc2e:0e35:bb88:af:edac:fc2e:0e35:bb88:edac"; host = ""'),
("http://[127.0.0.1:fc2e::bb88:edac]", r'Invalid IPv6 address; source was "127\.0\.0\.1:fc2e::bb88:edac'),
("http://[fc2e::bb88", "Invalid IPv6 URL"),
("http://[fc2e:bb88:edac]", 'Invalid IPv6 address; source was "fc2e:bb88:edac"; host = ""'),
("http://[fc2e:bb88:edac::z]", 'Invalid IPv6 address; source was "fc2e:bb88:edac::z"; host = ""'),
("http://[fc2e:bb88:edac::2]:2a2", "Invalid port: .* '2a2'"),
("://", "Missing scheme"),
])
def test_invalid_patterns(pattern, error):
with pytest.raises(urlmatch.ParseError, match=error):
urlmatch.UrlPattern(pattern)
@pytest.mark.parametrize('host', ['.', ' ', ' .', '. ', '. .', '. . .', ' . '])
def test_whitespace_hosts(host):
"""Test that whitespace dot hosts are invalid.
This is a deviation from Chromium.
"""
template = 'https://{}/*'
url = QUrl(template.format(host))
assert not url.isValid()
with pytest.raises(urlmatch.ParseError,
match='Invalid host|Pattern without host'):
urlmatch.UrlPattern(template.format(host))
@pytest.mark.parametrize('pattern, port', [
## TEST(ExtensionURLPatternTest, Ports)
("http://foo:1234/", 1234),
("http://foo:1234/bar", 1234),
("http://*.foo:1234/", 1234),
("http://*.foo:1234/bar", 1234),
("http://*:1234/", 1234),
("http://*:*/", None),
("http://foo:*/", None),
("file://foo:1234/bar", None),
# Port-like strings in the path should not trigger a warning.
("http://*/:1234", None),
("http://*.foo/bar:1234", None),
("http://foo/bar:1234/path", None),
])
def test_port(pattern, port):
up = urlmatch.UrlPattern(pattern)
assert up._port == port
@pytest.mark.parametrize('pattern, path', [
("http://foo/", '/'),
("http://foo/*", None),
])
def test_parse_path(pattern, path):
up = urlmatch.UrlPattern(pattern)
assert up._path == path
@pytest.mark.parametrize('pattern, scheme, host, path', [
("http://example.com", 'http', 'example.com', None), # no path
("example.com/path", None, 'example.com', '/path'), # no scheme
("example.com", None, 'example.com', None), # no scheme and no path
("example.com:1234", None, 'example.com', None), # no scheme/path but port
("data:monkey", 'data', None, 'monkey'), # existing scheme
])
def test_lightweight_patterns(pattern, scheme, host, path):
"""Make sure we can leave off parts of a URL.
This is a deviation from Chromium to make patterns more user-friendly.
"""
up = urlmatch.UrlPattern(pattern)
assert up._scheme == scheme
assert up.host == host
assert up._path == path
class TestMatchAllPagesForGivenScheme:
"""Based on TEST(ExtensionURLPatternTest, Match1)."""
@pytest.fixture
def up(self):
return urlmatch.UrlPattern("http://*/*")
def test_attrs(self, up):
assert up._scheme == 'http'
assert up.host is None
assert up._match_subdomains
assert not up._match_all
assert up._path is None
@pytest.mark.parametrize('url, expected', [
("http://google.com", True),
("http://yahoo.com", True),
("http://google.com/foo", True),
("https://google.com", False),
("http://74.125.127.100/search", True),
# Additional tests
("http://google.com:80", True),
("http://google.com.", True),
("http://[fc2e:0e35:bb88::edac]", True),
("http://[fc2e:e35:bb88::edac]", True),
("http://[fc2e:e35:bb88::127.0.0.1]", True),
("http://[::1]/bar", True),
])
def test_urls(self, up, url, expected):
assert up.matches(QUrl(url)) == expected
class TestMatchAllDomains:
"""Based on TEST(ExtensionURLPatternTest, Match2)."""
@pytest.fixture
def up(self):
return urlmatch.UrlPattern("https://*/foo*")
def test_attrs(self, up):
assert up._scheme == 'https'
assert up.host is None
assert up._match_subdomains
assert not up._match_all
assert up._path == '/foo*'
@pytest.mark.parametrize('url, expected', [
("https://google.com/foo", True),
("https://google.com/foobar", True),
("http://google.com/foo", False),
("https://google.com/", False),
])
def test_urls(self, up, url, expected):
assert up.matches(QUrl(url)) == expected
class TestMatchSubdomains:
"""Based on TEST(ExtensionURLPatternTest, Match3)."""
@pytest.fixture
def up(self):
return urlmatch.UrlPattern("http://*.google.com/foo*bar")
def test_attrs(self, up):
assert up._scheme == 'http'
assert up.host == 'google.com'
assert up._match_subdomains
assert not up._match_all
assert up._path == '/foo*bar'
@pytest.mark.parametrize('url, expected', [
("http://google.com/foobar", True),
# FIXME The ?bar seems to be treated as path by GURL but as query by
# QUrl.
# ("http://www.google.com/foo?bar", True),
("http://monkey.images.google.com/foooobar", True),
("http://yahoo.com/foobar", False),
])
def test_urls(self, up, url, expected):
assert up.matches(QUrl(url)) == expected
class TestMatchGlobEscaping:
"""Based on TEST(ExtensionURLPatternTest, Match5)."""
@pytest.fixture
def up(self):
return urlmatch.UrlPattern(r"file:///foo-bar\*baz")
def test_attrs(self, up):
assert up._scheme == 'file'
assert up.host is None
assert not up._match_subdomains
assert not up._match_all
assert up._path == r'/foo-bar\*baz'
@pytest.mark.parametrize('url, expected', [
## TEST(ExtensionURLPatternTest, Match5)
# We use - instead of ? so it doesn't get treated as query
(r"file:///foo-bar\hellobaz", True),
(r"file:///fooXbar\hellobaz", False),
])
def test_urls(self, up, url, expected):
assert up.matches(QUrl(url)) == expected
class TestMatchIpAddresses:
"""Based on TEST(ExtensionURLPatternTest, Match6/7)."""
@pytest.mark.parametrize('pattern, host, match_subdomains', [
("http://127.0.0.1/*", "127.0.0.1", False),
("http://*.0.0.1/*", "0.0.1", True),
## Others
("http://[::1]/*", "::1", False),
("http://[0::1]/*", "::1", False),
("http://[::01]/*", "::1", False),
("http://[0:0:0:0:20::1]/*", "::20:0:0:1", False),
])
def test_attrs(self, pattern, host, match_subdomains):
up = urlmatch.UrlPattern(pattern)
assert up._scheme == 'http'
assert up.host == host
assert up._match_subdomains == match_subdomains
assert not up._match_all
assert up._path is None
@pytest.mark.parametrize('pattern, expected', [
("http://127.0.0.1/*", True),
# No subdomain matching is done with IPs
("http://*.0.0.1/*", False),
])
def test_urls(self, pattern, expected):
up = urlmatch.UrlPattern(pattern)
assert up.matches(QUrl("http://127.0.0.1")) == expected
## FIXME Missing TEST(ExtensionURLPatternTest, Match8) (unicode)?
class TestMatchChromeUrls:
"""Based on TEST(ExtensionURLPatternTest, Match9/10)."""
@pytest.fixture
def up(self):
return urlmatch.UrlPattern("chrome://favicon/*")
def test_attrs(self, up):
assert up._scheme == 'chrome'
assert up.host == 'favicon'
assert not up._match_subdomains
assert not up._match_all
assert up._path is None
@pytest.mark.parametrize('url, expected', [
("chrome://favicon/http://google.com", True),
("chrome://favicon/https://google.com", True),
("chrome://history", False),
])
def test_urls(self, up, url, expected):
assert up.matches(QUrl(url)) == expected
class TestMatchAnything:
"""Based on TEST(ExtensionURLPatternTest, Match10/11)."""
@pytest.fixture(params=['*://*/*', '*://*:*/*', '<all_urls>', '*://*'])
def up(self, request):
return urlmatch.UrlPattern(request.param)
def test_attrs_common(self, up):
assert up._scheme is None
assert up.host is None
assert up._path is None
def test_attrs_wildcard(self):
up = urlmatch.UrlPattern('*://*/*')
assert up._match_subdomains
assert not up._match_all
def test_attrs_all(self):
up = urlmatch.UrlPattern('<all_urls>')
assert not up._match_subdomains
assert up._match_all
@pytest.mark.parametrize('url', [
"http://127.0.0.1",
# We deviate from Chromium as we allow other schemes as well
"chrome://favicon/http://google.com",
"file:///foo/bar",
"file://localhost/foo/bar",
"qute://version",
"about:blank",
"data:text/html;charset=utf-8,<html>asdf</html>",
"javascript:",
])
def test_urls(self, up, url):
assert up.matches(QUrl(url))
@pytest.mark.parametrize('pattern, url, expected', [
("about:*", "about:blank", True),
("about:blank", "about:blank", True),
("about:*", "about:version", True),
("data:*", "data:monkey", True),
("javascript:*", "javascript:atemyhomework", True),
("data:*", "about:blank", False),
])
def test_special_schemes(pattern, url, expected):
"""Based on TEST(ExtensionURLPatternTest, Match13)."""
assert urlmatch.UrlPattern(pattern).matches(QUrl(url)) == expected
class TestFileScheme:
"""Based on TEST(ExtensionURLPatternTest, Match14/15/16)."""
@pytest.fixture(params=[
'file:///foo*',
'file://foo*',
# FIXME This doesn't pass all tests
pytest.param('file://localhost/foo*', marks=pytest.mark.skip(
reason="We're not handling this correctly in all cases"))
])
def up(self, request):
return urlmatch.UrlPattern(request.param)
def test_attrs(self, up):
assert up._scheme == 'file'
assert up.host is None
assert not up._match_subdomains
assert not up._match_all
assert up._path == '/foo*'
@pytest.mark.parametrize('url, expected', [
("file://foo", False),
("file://foobar", False),
("file:///foo", True),
("file:///foobar", True),
("file://localhost/foo", True),
])
def test_urls(self, up, url, expected):
assert up.matches(QUrl(url)) == expected
class TestMatchSpecificPort:
"""Based on TEST(ExtensionURLPatternTest, Match17)."""
@pytest.fixture
def up(self):
return urlmatch.UrlPattern("http://www.example.com:80/foo")
def test_attrs(self, up):
assert up._scheme == 'http'
assert up.host == 'www.example.com'
assert not up._match_subdomains
assert not up._match_all
assert up._path == '/foo'
assert up._port == 80
@pytest.mark.parametrize('url, expected', [
("http://www.example.com:80/foo", True),
("http://www.example.com/foo", True),
("http://www.example.com:8080/foo", False),
])
def test_urls(self, up, url, expected):
assert up.matches(QUrl(url)) == expected
class TestExplicitPortWildcard:
"""Based on TEST(ExtensionURLPatternTest, Match18)."""
@pytest.fixture
def up(self):
return urlmatch.UrlPattern("http://www.example.com:*/foo")
def test_attrs(self, up):
assert up._scheme == 'http'
assert up.host == 'www.example.com'
assert not up._match_subdomains
assert not up._match_all
assert up._path == '/foo'
assert up._port is None
@pytest.mark.parametrize('url, expected', [
("http://www.example.com:80/foo", True),
("http://www.example.com/foo", True),
("http://www.example.com:8080/foo", True),
])
def test_urls(self, up, url, expected):
assert up.matches(QUrl(url)) == expected
def test_ignore_missing_slashes():
"""Based on TEST(ExtensionURLPatternTest, IgnoreMissingBackslashes)."""
pattern1 = urlmatch.UrlPattern("http://www.example.com/example")
pattern2 = urlmatch.UrlPattern("http://www.example.com/example/*")
url1 = QUrl('http://www.example.com/example')
url2 = QUrl('http://www.example.com/example/')
# Same patterns should match same URLs.
assert pattern1.matches(url1)
assert pattern2.matches(url1)
# The not terminated path should match the terminated pattern.
assert pattern2.matches(url1)
# The terminated path however should not match the unterminated pattern.
assert not pattern1.matches(url2)
def test_trailing_slash():
"""Contrary to Chromium, we allow to leave off a trailing slash."""
url = QUrl('http://www.example.com/')
pattern = urlmatch.UrlPattern('http://www.example.com')
assert pattern.matches(url)
@pytest.mark.parametrize('pattern', ['*://example.com/*',
'*://example.com./*'])
@pytest.mark.parametrize('url', ['http://example.com/',
'http://example.com./'])
def test_trailing_dot_domain(pattern, url):
"""Both patterns should match trailing dot and non trailing dot domains.
More information about this not obvious behavior can be found in [1].
RFC 1738 [2] specifies clearly that the <host> part of a URL is supposed to
contain a fully qualified domain name:
3.1. Common Internet Scheme Syntax
//<user>:<password>@<host>:<port>/<url-path>
host
The fully qualified domain name of a network host
[1] http://www.dns-sd.org./TrailingDotsInDomainNames.html
[2] http://www.ietf.org/rfc/rfc1738.txt
"""
assert urlmatch.UrlPattern(pattern).matches(QUrl(url))
class TestUncanonicalizedUrl:
"""Test that URLPattern properly canonicalizes uncanonicalized hosts.
Equivalent to Chromium's TEST(ExtensionURLPatternTest, UncanonicalizedUrl).
"""
@pytest.mark.parametrize('url', [
'https://google.com',
'https://maps.google.com',
])
def test_lowercase(self, url):
"""Simple case: canonicalization should lowercase the host.
This is important, since gOoGle.com would never be matched in
practice.
"""
pattern = urlmatch.UrlPattern('*://*.gOoGle.com/*')
assert pattern.matches(QUrl(url))
@pytest.mark.parametrize('url', [
'https://ɡoogle.com',
'https://xn--oogle-qmc.com/',
])
def test_punycode(self, url):
"""Trickier case: internationalization with UTF8 characters.
The first 'g' isn't actually a 'g'.
"""
pattern = urlmatch.UrlPattern('https://*.ɡoogle.com/*')
assert pattern.matches(QUrl(url))
@pytest.mark.xfail(reason="Gets accepted by urllib.parse")
def test_failing_canonicalization(self):
"""Sometimes, canonicalization can fail.
Such as here, where we have invalid unicode characters. In that case,
URLPattern parsing should also fail.
This fails in Chromium, but Python's urllib.parse.urlparse happily
tries to parse it...
"""
with pytest.raises(urlmatch.ParseError):
urlmatch.UrlPattern('https://\xef\xb7\x90zyx.com/*')
@pytest.mark.xfail(reason="We return the original string")
@pytest.mark.parametrize('pattern_str, string, host', [
('*://*.gOoGle.com/*',
'*://*.google.com/*',
'google.com'),
('https://*.ɡoogle.com/*',
'https://*.xn--oogle-qmc.com/*',
'xn--oogle-qmc.com'),
])
def test_str(self, pattern_str, string, host):
"""Test that str() and .host get the canonicalized string.
Contrary to Chromium, we return the original values here.
"""
pattern = urlmatch.UrlPattern(pattern_str)
assert str(pattern) == string
assert pattern.host == host
def test_urlpattern_benchmark(benchmark):
url = QUrl('https://www.example.com/barfoobar')
def run():
up = urlmatch.UrlPattern('https://*.example.com/*foo*')
up.matches(url)
benchmark(run)
URL_TEXT = hst.text(alphabet=string.ascii_letters)
@hypothesis.given(pattern=hst.builds(
lambda *a: ''.join(a),
# Scheme
hst.sampled_from(['*', 'http', 'file']),
# Separator
hst.sampled_from([':', '://']),
# Host
hst.one_of(hst.just('*'),
hst.builds(lambda *a: ''.join(a), hst.just('*.'), URL_TEXT),
URL_TEXT),
# Port
hst.one_of(hst.just(''),
hst.builds(lambda *a: ''.join(a), hst.just(':'),
hst.integers(min_value=0,
max_value=65535).map(str))),
# Path
hst.one_of(hst.just(''),
hst.builds(lambda *a: ''.join(a), hst.just('/'), URL_TEXT))
))
def test_urlpattern_hypothesis(pattern):
try:
up = urlmatch.UrlPattern(pattern)
except urlmatch.ParseError:
return
up.matches(QUrl('https://www.example.com/'))
@pytest.mark.parametrize('text1, text2, equal', [
# schemes
("http://en.google.com/blah/*/foo",
"https://en.google.com/blah/*/foo",
False),
("https://en.google.com/blah/*/foo",
"https://en.google.com/blah/*/foo",
True),
("https://en.google.com/blah/*/foo",
"ftp://en.google.com/blah/*/foo",
False),
# subdomains
("https://en.google.com/blah/*/foo",
"https://fr.google.com/blah/*/foo",
False),
("https://www.google.com/blah/*/foo",
"https://*.google.com/blah/*/foo",
False),
("https://*.google.com/blah/*/foo",
"https://*.google.com/blah/*/foo",
True),
# domains
("http://en.example.com/blah/*/foo",
"http://en.google.com/blah/*/foo",
False),
# ports
("http://en.google.com:8000/blah/*/foo",
"http://en.google.com/blah/*/foo",
False),
("http://fr.google.com:8000/blah/*/foo",
"http://fr.google.com:8000/blah/*/foo",
True),
("http://en.google.com:8000/blah/*/foo",
"http://en.google.com:8080/blah/*/foo",
False),
# paths
("http://en.google.com/blah/*/foo",
"http://en.google.com/blah/*",
False),
("http://en.google.com/*",
"http://en.google.com/",
False),
("http://en.google.com/*",
"http://en.google.com/*",
True),
# all_urls
("<all_urls>",
"<all_urls>",
True),
("<all_urls>",
"http://*/*",
False)
])
def test_equal(text1, text2, equal):
pat1 = urlmatch.UrlPattern(text1)
pat2 = urlmatch.UrlPattern(text2)
assert (pat1 == pat2) == equal
assert (hash(pat1) == hash(pat2)) == equal
def test_equal_string():
assert urlmatch.UrlPattern("<all_urls>") != '<all_urls>'
def test_repr():
pat = urlmatch.UrlPattern('https://www.example.com/')
expected = ("qutebrowser.utils.urlmatch.UrlPattern("
"pattern='https://www.example.com/')")
assert repr(pat) == expected
def test_str():
text = 'https://www.example.com/'
pat = urlmatch.UrlPattern(text)
assert str(pat) == text
|
import chainer
import chainer.functions as F
from chainercv.links import Conv2DBNActiv
from chainercv.links import PickableSequentialChain
from chainercv.links import SEBlock
class ResBlock(PickableSequentialChain):
"""A building block for ResNets.
in --> Bottleneck with residual_conv --> Bottleneck * (n_layer - 1) --> out
Args:
n_layer (int): The number of layers used in the building block.
in_channels (int): The number of channels of the input array.
mid_channels (int): The number of channels of intermediate arrays.
out_channels (int): The number of channels of the output array.
stride (int or tuple of ints): Stride of filter application.
dilate (int or tuple of ints): Dilation factor of filter applications.
:obj:`dilate=d` and :obj:`dilate=(d, d)` are equivalent.
groups (int): The number of groups to use grouped convolution in the
second layer of each bottleneck. The default is one, where
grouped convolution is not used.
initialW (callable): Initial weight value used in
the convolutional layers.
bn_kwargs (dict): Keyword arguments passed to initialize
:class:`chainer.links.BatchNormalization`.
stride_first (bool): This determines the behavior of the
bottleneck with a shortcut. If :obj:`True`, apply strided
convolution with the first convolution layer.
Otherwise, apply strided convolution with the
second convolution layer.
add_seblock (bool): If :obj:`True`, apply a squeeze-and-excitation
block to each residual block.
"""
def __init__(self, n_layer, in_channels, mid_channels,
out_channels, stride, dilate=1, groups=1, initialW=None,
bn_kwargs={}, stride_first=False, add_seblock=False):
super(ResBlock, self).__init__()
# Dilate option is applied to all bottlenecks.
with self.init_scope():
self.a = Bottleneck(
in_channels, mid_channels, out_channels, stride, dilate,
groups, initialW, bn_kwargs=bn_kwargs, residual_conv=True,
stride_first=stride_first, add_seblock=add_seblock)
for i in range(n_layer - 1):
name = 'b{}'.format(i + 1)
bottleneck = Bottleneck(
out_channels, mid_channels, out_channels, stride=1,
dilate=dilate, initialW=initialW, bn_kwargs=bn_kwargs,
residual_conv=False, add_seblock=add_seblock,
groups=groups)
setattr(self, name, bottleneck)
class Bottleneck(chainer.Chain):
"""A bottleneck layer.
Args:
in_channels (int): The number of channels of the input array.
mid_channels (int): The number of channels of intermediate arrays.
out_channels (int): The number of channels of the output array.
stride (int or tuple of ints): Stride of filter application.
dilate (int or tuple of ints): Dilation factor of filter applications.
:obj:`dilate=d` and :obj:`dilate=(d, d)` are equivalent.
groups (int): The number of groups to use grouped convolution in the
second layer. The default is one, where grouped convolution is
not used.
initialW (callable): Initial weight value used in
the convolutional layers.
bn_kwargs (dict): Keyword arguments passed to initialize
:class:`chainer.links.BatchNormalization`.
residual_conv (bool): If :obj:`True`, apply a 1x1 convolution
to the residual.
stride_first (bool): If :obj:`True`, apply strided convolution
with the first convolution layer. Otherwise, apply
strided convolution with the second convolution layer.
add_seblock (bool): If :obj:`True`, apply a squeeze-and-excitation
block to each residual block.
"""
def __init__(self, in_channels, mid_channels, out_channels,
stride=1, dilate=1, groups=1, initialW=None, bn_kwargs={},
residual_conv=False, stride_first=False, add_seblock=False):
if stride_first:
first_stride = stride
second_stride = 1
else:
first_stride = 1
second_stride = stride
super(Bottleneck, self).__init__()
with self.init_scope():
self.conv1 = Conv2DBNActiv(in_channels, mid_channels,
1, first_stride, 0,
nobias=True, initialW=initialW,
bn_kwargs=bn_kwargs)
# pad = dilate
self.conv2 = Conv2DBNActiv(mid_channels, mid_channels,
3, second_stride, dilate, dilate,
groups, nobias=True, initialW=initialW,
bn_kwargs=bn_kwargs)
self.conv3 = Conv2DBNActiv(mid_channels, out_channels, 1, 1, 0,
nobias=True, initialW=initialW,
activ=None, bn_kwargs=bn_kwargs)
if add_seblock:
self.se = SEBlock(out_channels)
if residual_conv:
self.residual_conv = Conv2DBNActiv(
in_channels, out_channels, 1, stride, 0,
nobias=True, initialW=initialW,
activ=None, bn_kwargs=bn_kwargs)
def forward(self, x):
h = self.conv1(x)
h = self.conv2(h)
h = self.conv3(h)
if hasattr(self, 'se'):
h = self.se(h)
if hasattr(self, 'residual_conv'):
residual = self.residual_conv(x)
else:
residual = x
h += residual
h = F.relu(h)
return h
|
from collections import defaultdict
import logging
import time
from typing import Generator, Tuple, Iterable, Dict, List, Union
from dedupe._typing import Record, RecordID, Data
import dedupe.predicates
logger = logging.getLogger(__name__)
Docs = Union[Iterable[str], Iterable[Iterable[str]]]
def index_list():
return defaultdict(list)
class Fingerprinter(object):
'''Takes in a record and returns all blocks that record belongs to'''
def __init__(self, predicates: List[dedupe.predicates.Predicate]) -> None:
self.predicates = predicates
self.index_fields: Dict[str,
Dict[str,
List[dedupe.predicates.IndexPredicate]]]
self.index_fields = defaultdict(index_list)
'''
A dictionary of all the fingerprinter methods that use an
index of data field values. The keys are the field names,
which can be useful to know for indexing the data.
'''
self.index_predicates = []
for full_predicate in predicates:
for predicate in full_predicate:
if hasattr(predicate, 'index'):
self.index_fields[predicate.field][predicate.type].append(
predicate)
self.index_predicates.append(predicate)
def __call__(self,
records: Iterable[Record],
target: bool = False) -> Generator[Tuple[str, RecordID], None, None]:
'''
Generate the predicates for records. Yields tuples of (predicate,
record_id).
Args:
records: A sequence of tuples of (record_id,
record_dict). Can often be created by
`data_dict.items()`.
target: Indicates whether the data should be treated as
the target data. This effects the behavior of
search predicates. If `target` is set to
`True`, an search predicate will return the
value itself. If `target` is set to `False` the
search predicate will return all possible
values within the specified search distance.
Let's say we have a
`LevenshteinSearchPredicate` with an associated
distance of `1` on a `"name"` field; and we
have a record like `{"name": "thomas"}`. If the
`target` is set to `True` then the predicate
will return `"thomas"`. If `target` is set to
`False`, then the blocker could return
`"thomas"`, `"tomas"`, and `"thoms"`. By using
the `target` argument on one of your datasets,
you will dramatically reduce the total number
of comparisons without a loss of accuracy.
.. code:: python
> data = [(1, {'name' : 'bob'}), (2, {'name' : 'suzanne'})]
> blocked_ids = deduper.fingerprinter(data)
> print list(blocked_ids)
[('foo:1', 1), ..., ('bar:1', 100)]
'''
start_time = time.perf_counter()
predicates = [(':' + str(i), predicate)
for i, predicate
in enumerate(self.predicates)]
for i, record in enumerate(records):
record_id, instance = record
for pred_id, predicate in predicates:
block_keys = predicate(instance, target=target)
for block_key in block_keys:
yield block_key + pred_id, record_id
if i and i % 10000 == 0:
logger.info('%(iteration)d, %(elapsed)f2 seconds',
{'iteration': i,
'elapsed': time.perf_counter() - start_time})
def reset_indices(self) -> None:
'''
Fingeprinter indicdes can take up a lot of memory. If you are
done with blocking, the method will reset the indices to free up.
If you need to block again, the data will need to be re-indexed.
'''
for predicate in self.index_predicates:
predicate.reset()
def index(self,
docs: Docs,
field: str) -> None:
'''
Add docs to the indices used by fingerprinters.
Some fingerprinter methods depend upon having an index of
values that a field may have in the data. This method adds
those values to the index. If you don't have any fingerprinter
methods that use an index, this method will do nothing.
Args:
docs: an iterator of values from your data to index. While
not required, it is recommended that docs be a unique
set of of those values. Indexing can be an expensive
operation.
field: fieldname or key associated with the values you are
indexing
'''
indices = extractIndices(self.index_fields[field])
for doc in docs:
if doc:
for _, index, preprocess in indices:
index.index(preprocess(doc))
for index_type, index, _ in indices:
index.initSearch()
for predicate in self.index_fields[field][index_type]:
logger.debug("Canopy: %s", str(predicate))
predicate.index = index
predicate.bust_cache()
def unindex(self, docs: Docs, field: str) -> None:
'''Remove docs from indices used by fingerprinters
Args:
docs: an iterator of values from your data to remove. While
not required, it is recommended that docs be a unique
set of of those values. Indexing can be an expensive
operation.
field: fieldname or key associated with the values you are
unindexing
'''
indices = extractIndices(self.index_fields[field])
for doc in docs:
if doc:
for _, index, preprocess in indices:
try:
index.unindex(preprocess(doc))
except KeyError:
pass
for index_type, index, _ in indices:
index.initSearch()
for predicate in self.index_fields[field][index_type]:
logger.debug("Canopy: %s", str(predicate))
predicate.index = index
predicate.bust_cache()
def index_all(self, data: Data):
for field in self.index_fields:
unique_fields = {record[field]
for record
in data.values()
if record[field]}
self.index(unique_fields, field)
def extractIndices(index_fields):
indices = []
for index_type, predicates in index_fields.items():
predicate = predicates[0]
index = predicate.index
preprocess = predicate.preprocess
if predicate.index is None:
index = predicate.initIndex()
indices.append((index_type, index, preprocess))
return indices
|
from datetime import timedelta
import logging
import requests
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import HTTP_OK
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
CONF_LOCATIONS = "locations"
SCAN_INTERVAL = timedelta(minutes=30)
AUTHORITIES = [
"Barking and Dagenham",
"Bexley",
"Brent",
"Camden",
"City of London",
"Croydon",
"Ealing",
"Enfield",
"Greenwich",
"Hackney",
"Haringey",
"Harrow",
"Havering",
"Hillingdon",
"Islington",
"Kensington and Chelsea",
"Kingston",
"Lambeth",
"Lewisham",
"Merton",
"Redbridge",
"Richmond",
"Southwark",
"Sutton",
"Tower Hamlets",
"Wandsworth",
"Westminster",
]
URL = (
"http://api.erg.kcl.ac.uk/AirQuality/Hourly/"
"MonitoringIndex/GroupName=London/Json"
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_LOCATIONS, default=AUTHORITIES): vol.All(
cv.ensure_list, [vol.In(AUTHORITIES)]
)
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the London Air sensor."""
data = APIData()
data.update()
sensors = []
for name in config.get(CONF_LOCATIONS):
sensors.append(AirSensor(name, data))
add_entities(sensors, True)
class APIData:
"""Get the latest data for all authorities."""
def __init__(self):
"""Initialize the AirData object."""
self.data = None
# Update only once in scan interval.
@Throttle(SCAN_INTERVAL)
def update(self):
"""Get the latest data from TFL."""
response = requests.get(URL, timeout=10)
if response.status_code != HTTP_OK:
_LOGGER.warning("Invalid response from API")
else:
self.data = parse_api_response(response.json())
class AirSensor(Entity):
"""Single authority air sensor."""
ICON = "mdi:cloud-outline"
def __init__(self, name, APIdata):
"""Initialize the sensor."""
self._name = name
self._api_data = APIdata
self._site_data = None
self._state = None
self._updated = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def site_data(self):
"""Return the dict of sites data."""
return self._site_data
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return self.ICON
@property
def device_state_attributes(self):
"""Return other details about the sensor state."""
attrs = {}
attrs["updated"] = self._updated
attrs["sites"] = len(self._site_data) if self._site_data is not None else 0
attrs["data"] = self._site_data
return attrs
def update(self):
"""Update the sensor."""
sites_status = []
self._api_data.update()
if self._api_data.data:
self._site_data = self._api_data.data[self._name]
self._updated = self._site_data[0]["updated"]
for site in self._site_data:
if site["pollutants_status"] != "no_species_data":
sites_status.append(site["pollutants_status"])
if sites_status:
self._state = max(set(sites_status), key=sites_status.count)
else:
self._state = None
def parse_species(species_data):
"""Iterate over list of species at each site."""
parsed_species_data = []
quality_list = []
for species in species_data:
if species["@AirQualityBand"] != "No data":
species_dict = {}
species_dict["description"] = species["@SpeciesDescription"]
species_dict["code"] = species["@SpeciesCode"]
species_dict["quality"] = species["@AirQualityBand"]
species_dict["index"] = species["@AirQualityIndex"]
species_dict[
"summary"
] = f"{species_dict['code']} is {species_dict['quality']}"
parsed_species_data.append(species_dict)
quality_list.append(species_dict["quality"])
return parsed_species_data, quality_list
def parse_site(entry_sites_data):
"""Iterate over all sites at an authority."""
authority_data = []
for site in entry_sites_data:
site_data = {}
species_data = []
site_data["updated"] = site["@BulletinDate"]
site_data["latitude"] = site["@Latitude"]
site_data["longitude"] = site["@Longitude"]
site_data["site_code"] = site["@SiteCode"]
site_data["site_name"] = site["@SiteName"].split("-")[-1].lstrip()
site_data["site_type"] = site["@SiteType"]
if isinstance(site["Species"], dict):
species_data = [site["Species"]]
else:
species_data = site["Species"]
parsed_species_data, quality_list = parse_species(species_data)
if not parsed_species_data:
parsed_species_data.append("no_species_data")
site_data["pollutants"] = parsed_species_data
if quality_list:
site_data["pollutants_status"] = max(
set(quality_list), key=quality_list.count
)
site_data["number_of_pollutants"] = len(quality_list)
else:
site_data["pollutants_status"] = "no_species_data"
site_data["number_of_pollutants"] = 0
authority_data.append(site_data)
return authority_data
def parse_api_response(response):
"""Parse return dict or list of data from API."""
data = dict.fromkeys(AUTHORITIES)
for authority in AUTHORITIES:
for entry in response["HourlyAirQualityIndex"]["LocalAuthority"]:
if entry["@LocalAuthorityName"] == authority:
if isinstance(entry["Site"], dict):
entry_sites_data = [entry["Site"]]
else:
entry_sites_data = entry["Site"]
data[authority] = parse_site(entry_sites_data)
return data
|
import tempfile
from django.test.utils import override_settings
from weblate.utils.files import remove_tree
# Lowercase name to be consistent with Django
# pylint: disable=invalid-name
class tempdir_setting(override_settings): # noqa
def __init__(self, setting):
kwargs = {setting: None}
super().__init__(**kwargs)
self._tempdir = None
self._setting = setting
def enable(self):
self._tempdir = tempfile.mkdtemp()
self.options[self._setting] = self._tempdir
super().enable()
def disable(self):
super().disable()
if self._tempdir is not None:
remove_tree(self._tempdir)
self._tempdir = None
|
import logging
import voluptuous as vol
from homeassistant.components import mqtt
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_COLOR_TEMP,
ATTR_EFFECT,
ATTR_FLASH,
ATTR_HS_COLOR,
ATTR_TRANSITION,
ATTR_WHITE_VALUE,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
SUPPORT_COLOR_TEMP,
SUPPORT_EFFECT,
SUPPORT_FLASH,
SUPPORT_TRANSITION,
SUPPORT_WHITE_VALUE,
LightEntity,
)
from homeassistant.components.mqtt import (
CONF_COMMAND_TOPIC,
CONF_QOS,
CONF_RETAIN,
CONF_STATE_TOPIC,
MqttAttributes,
MqttAvailability,
MqttDiscoveryUpdate,
MqttEntityDeviceInfo,
subscription,
)
from homeassistant.const import (
CONF_DEVICE,
CONF_NAME,
CONF_OPTIMISTIC,
CONF_UNIQUE_ID,
STATE_OFF,
STATE_ON,
)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.restore_state import RestoreEntity
import homeassistant.util.color as color_util
from ..debug_info import log_messages
from .schema import MQTT_LIGHT_SCHEMA_SCHEMA
_LOGGER = logging.getLogger(__name__)
DOMAIN = "mqtt_template"
DEFAULT_NAME = "MQTT Template Light"
DEFAULT_OPTIMISTIC = False
CONF_BLUE_TEMPLATE = "blue_template"
CONF_BRIGHTNESS_TEMPLATE = "brightness_template"
CONF_COLOR_TEMP_TEMPLATE = "color_temp_template"
CONF_COMMAND_OFF_TEMPLATE = "command_off_template"
CONF_COMMAND_ON_TEMPLATE = "command_on_template"
CONF_EFFECT_LIST = "effect_list"
CONF_EFFECT_TEMPLATE = "effect_template"
CONF_GREEN_TEMPLATE = "green_template"
CONF_MAX_MIREDS = "max_mireds"
CONF_MIN_MIREDS = "min_mireds"
CONF_RED_TEMPLATE = "red_template"
CONF_STATE_TEMPLATE = "state_template"
CONF_WHITE_VALUE_TEMPLATE = "white_value_template"
PLATFORM_SCHEMA_TEMPLATE = (
mqtt.MQTT_RW_PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_BLUE_TEMPLATE): cv.template,
vol.Optional(CONF_BRIGHTNESS_TEMPLATE): cv.template,
vol.Optional(CONF_COLOR_TEMP_TEMPLATE): cv.template,
vol.Required(CONF_COMMAND_OFF_TEMPLATE): cv.template,
vol.Required(CONF_COMMAND_ON_TEMPLATE): cv.template,
vol.Optional(CONF_DEVICE): mqtt.MQTT_ENTITY_DEVICE_INFO_SCHEMA,
vol.Optional(CONF_EFFECT_LIST): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_EFFECT_TEMPLATE): cv.template,
vol.Optional(CONF_GREEN_TEMPLATE): cv.template,
vol.Optional(CONF_MAX_MIREDS): cv.positive_int,
vol.Optional(CONF_MIN_MIREDS): cv.positive_int,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean,
vol.Optional(CONF_RED_TEMPLATE): cv.template,
vol.Optional(CONF_STATE_TEMPLATE): cv.template,
vol.Optional(CONF_UNIQUE_ID): cv.string,
vol.Optional(CONF_WHITE_VALUE_TEMPLATE): cv.template,
}
)
.extend(mqtt.MQTT_AVAILABILITY_SCHEMA.schema)
.extend(mqtt.MQTT_JSON_ATTRS_SCHEMA.schema)
.extend(MQTT_LIGHT_SCHEMA_SCHEMA.schema)
)
async def async_setup_entity_template(
hass, config, async_add_entities, config_entry, discovery_data
):
"""Set up a MQTT Template light."""
async_add_entities([MqttLightTemplate(config, config_entry, discovery_data)])
class MqttLightTemplate(
MqttAttributes,
MqttAvailability,
MqttDiscoveryUpdate,
MqttEntityDeviceInfo,
LightEntity,
RestoreEntity,
):
"""Representation of a MQTT Template light."""
def __init__(self, config, config_entry, discovery_data):
"""Initialize a MQTT Template light."""
self._state = False
self._sub_state = None
self._topics = None
self._templates = None
self._optimistic = False
# features
self._brightness = None
self._color_temp = None
self._white_value = None
self._hs = None
self._effect = None
self._unique_id = config.get(CONF_UNIQUE_ID)
# Load config
self._setup_from_config(config)
device_config = config.get(CONF_DEVICE)
MqttAttributes.__init__(self, config)
MqttAvailability.__init__(self, config)
MqttDiscoveryUpdate.__init__(self, discovery_data, self.discovery_update)
MqttEntityDeviceInfo.__init__(self, device_config, config_entry)
async def async_added_to_hass(self):
"""Subscribe to MQTT events."""
await super().async_added_to_hass()
await self._subscribe_topics()
async def discovery_update(self, discovery_payload):
"""Handle updated discovery message."""
config = PLATFORM_SCHEMA_TEMPLATE(discovery_payload)
self._setup_from_config(config)
await self.attributes_discovery_update(config)
await self.availability_discovery_update(config)
await self.device_info_discovery_update(config)
await self._subscribe_topics()
self.async_write_ha_state()
def _setup_from_config(self, config):
"""(Re)Setup the entity."""
self._config = config
self._topics = {
key: config.get(key) for key in (CONF_STATE_TOPIC, CONF_COMMAND_TOPIC)
}
self._templates = {
key: config.get(key)
for key in (
CONF_BLUE_TEMPLATE,
CONF_BRIGHTNESS_TEMPLATE,
CONF_COLOR_TEMP_TEMPLATE,
CONF_COMMAND_OFF_TEMPLATE,
CONF_COMMAND_ON_TEMPLATE,
CONF_EFFECT_TEMPLATE,
CONF_GREEN_TEMPLATE,
CONF_RED_TEMPLATE,
CONF_STATE_TEMPLATE,
CONF_WHITE_VALUE_TEMPLATE,
)
}
optimistic = config[CONF_OPTIMISTIC]
self._optimistic = (
optimistic
or self._topics[CONF_STATE_TOPIC] is None
or self._templates[CONF_STATE_TEMPLATE] is None
)
async def _subscribe_topics(self):
"""(Re)Subscribe to topics."""
for tpl in self._templates.values():
if tpl is not None:
tpl.hass = self.hass
last_state = await self.async_get_last_state()
@callback
@log_messages(self.hass, self.entity_id)
def state_received(msg):
"""Handle new MQTT messages."""
state = self._templates[
CONF_STATE_TEMPLATE
].async_render_with_possible_json_value(msg.payload)
if state == STATE_ON:
self._state = True
elif state == STATE_OFF:
self._state = False
else:
_LOGGER.warning("Invalid state value received")
if self._templates[CONF_BRIGHTNESS_TEMPLATE] is not None:
try:
self._brightness = int(
self._templates[
CONF_BRIGHTNESS_TEMPLATE
].async_render_with_possible_json_value(msg.payload)
)
except ValueError:
_LOGGER.warning("Invalid brightness value received")
if self._templates[CONF_COLOR_TEMP_TEMPLATE] is not None:
try:
self._color_temp = int(
self._templates[
CONF_COLOR_TEMP_TEMPLATE
].async_render_with_possible_json_value(msg.payload)
)
except ValueError:
_LOGGER.warning("Invalid color temperature value received")
if (
self._templates[CONF_RED_TEMPLATE] is not None
and self._templates[CONF_GREEN_TEMPLATE] is not None
and self._templates[CONF_BLUE_TEMPLATE] is not None
):
try:
red = int(
self._templates[
CONF_RED_TEMPLATE
].async_render_with_possible_json_value(msg.payload)
)
green = int(
self._templates[
CONF_GREEN_TEMPLATE
].async_render_with_possible_json_value(msg.payload)
)
blue = int(
self._templates[
CONF_BLUE_TEMPLATE
].async_render_with_possible_json_value(msg.payload)
)
self._hs = color_util.color_RGB_to_hs(red, green, blue)
except ValueError:
_LOGGER.warning("Invalid color value received")
if self._templates[CONF_WHITE_VALUE_TEMPLATE] is not None:
try:
self._white_value = int(
self._templates[
CONF_WHITE_VALUE_TEMPLATE
].async_render_with_possible_json_value(msg.payload)
)
except ValueError:
_LOGGER.warning("Invalid white value received")
if self._templates[CONF_EFFECT_TEMPLATE] is not None:
effect = self._templates[
CONF_EFFECT_TEMPLATE
].async_render_with_possible_json_value(msg.payload)
if effect in self._config.get(CONF_EFFECT_LIST):
self._effect = effect
else:
_LOGGER.warning("Unsupported effect value received")
self.async_write_ha_state()
if self._topics[CONF_STATE_TOPIC] is not None:
self._sub_state = await subscription.async_subscribe_topics(
self.hass,
self._sub_state,
{
"state_topic": {
"topic": self._topics[CONF_STATE_TOPIC],
"msg_callback": state_received,
"qos": self._config[CONF_QOS],
}
},
)
if self._optimistic and last_state:
self._state = last_state.state == STATE_ON
if last_state.attributes.get(ATTR_BRIGHTNESS):
self._brightness = last_state.attributes.get(ATTR_BRIGHTNESS)
if last_state.attributes.get(ATTR_HS_COLOR):
self._hs = last_state.attributes.get(ATTR_HS_COLOR)
if last_state.attributes.get(ATTR_COLOR_TEMP):
self._color_temp = last_state.attributes.get(ATTR_COLOR_TEMP)
if last_state.attributes.get(ATTR_EFFECT):
self._effect = last_state.attributes.get(ATTR_EFFECT)
if last_state.attributes.get(ATTR_WHITE_VALUE):
self._white_value = last_state.attributes.get(ATTR_WHITE_VALUE)
async def async_will_remove_from_hass(self):
"""Unsubscribe when removed."""
self._sub_state = await subscription.async_unsubscribe_topics(
self.hass, self._sub_state
)
await MqttAttributes.async_will_remove_from_hass(self)
await MqttAvailability.async_will_remove_from_hass(self)
await MqttDiscoveryUpdate.async_will_remove_from_hass(self)
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
return self._brightness
@property
def color_temp(self):
"""Return the color temperature in mired."""
return self._color_temp
@property
def min_mireds(self):
"""Return the coldest color_temp that this light supports."""
return self._config.get(CONF_MIN_MIREDS, super().min_mireds)
@property
def max_mireds(self):
"""Return the warmest color_temp that this light supports."""
return self._config.get(CONF_MAX_MIREDS, super().max_mireds)
@property
def hs_color(self):
"""Return the hs color value [int, int]."""
return self._hs
@property
def white_value(self):
"""Return the white property."""
return self._white_value
@property
def should_poll(self):
"""Return True if entity has to be polled for state.
False if entity pushes its state to HA.
"""
return False
@property
def name(self):
"""Return the name of the entity."""
return self._config[CONF_NAME]
@property
def unique_id(self):
"""Return a unique ID."""
return self._unique_id
@property
def is_on(self):
"""Return True if entity is on."""
return self._state
@property
def assumed_state(self):
"""Return True if unable to access real state of the entity."""
return self._optimistic
@property
def effect_list(self):
"""Return the list of supported effects."""
return self._config.get(CONF_EFFECT_LIST)
@property
def effect(self):
"""Return the current effect."""
return self._effect
async def async_turn_on(self, **kwargs):
"""Turn the entity on.
This method is a coroutine.
"""
values = {"state": True}
if self._optimistic:
self._state = True
if ATTR_BRIGHTNESS in kwargs:
values["brightness"] = int(kwargs[ATTR_BRIGHTNESS])
if self._optimistic:
self._brightness = kwargs[ATTR_BRIGHTNESS]
if ATTR_COLOR_TEMP in kwargs:
values["color_temp"] = int(kwargs[ATTR_COLOR_TEMP])
if self._optimistic:
self._color_temp = kwargs[ATTR_COLOR_TEMP]
if ATTR_HS_COLOR in kwargs:
hs_color = kwargs[ATTR_HS_COLOR]
# If there's a brightness topic set, we don't want to scale the RGB
# values given using the brightness.
if self._templates[CONF_BRIGHTNESS_TEMPLATE] is not None:
brightness = 255
else:
brightness = kwargs.get(
ATTR_BRIGHTNESS,
self._brightness if self._brightness is not None else 255,
)
rgb = color_util.color_hsv_to_RGB(
hs_color[0], hs_color[1], brightness / 255 * 100
)
values["red"] = rgb[0]
values["green"] = rgb[1]
values["blue"] = rgb[2]
if self._optimistic:
self._hs = kwargs[ATTR_HS_COLOR]
if ATTR_WHITE_VALUE in kwargs:
values["white_value"] = int(kwargs[ATTR_WHITE_VALUE])
if self._optimistic:
self._white_value = kwargs[ATTR_WHITE_VALUE]
if ATTR_EFFECT in kwargs:
values["effect"] = kwargs.get(ATTR_EFFECT)
if self._optimistic:
self._effect = kwargs[ATTR_EFFECT]
if ATTR_FLASH in kwargs:
values["flash"] = kwargs.get(ATTR_FLASH)
if ATTR_TRANSITION in kwargs:
values["transition"] = int(kwargs[ATTR_TRANSITION])
mqtt.async_publish(
self.hass,
self._topics[CONF_COMMAND_TOPIC],
self._templates[CONF_COMMAND_ON_TEMPLATE].async_render(
parse_result=False, **values
),
self._config[CONF_QOS],
self._config[CONF_RETAIN],
)
if self._optimistic:
self.async_write_ha_state()
async def async_turn_off(self, **kwargs):
"""Turn the entity off.
This method is a coroutine.
"""
values = {"state": False}
if self._optimistic:
self._state = False
if ATTR_TRANSITION in kwargs:
values["transition"] = int(kwargs[ATTR_TRANSITION])
mqtt.async_publish(
self.hass,
self._topics[CONF_COMMAND_TOPIC],
self._templates[CONF_COMMAND_OFF_TEMPLATE].async_render(
parse_result=False, **values
),
self._config[CONF_QOS],
self._config[CONF_RETAIN],
)
if self._optimistic:
self.async_write_ha_state()
@property
def supported_features(self):
"""Flag supported features."""
features = SUPPORT_FLASH | SUPPORT_TRANSITION
if self._templates[CONF_BRIGHTNESS_TEMPLATE] is not None:
features = features | SUPPORT_BRIGHTNESS
if (
self._templates[CONF_RED_TEMPLATE] is not None
and self._templates[CONF_GREEN_TEMPLATE] is not None
and self._templates[CONF_BLUE_TEMPLATE] is not None
):
features = features | SUPPORT_COLOR
if self._config.get(CONF_EFFECT_LIST) is not None:
features = features | SUPPORT_EFFECT
if self._templates[CONF_COLOR_TEMP_TEMPLATE] is not None:
features = features | SUPPORT_COLOR_TEMP
if self._templates[CONF_WHITE_VALUE_TEMPLATE] is not None:
features = features | SUPPORT_WHITE_VALUE
return features
|
from datetime import timedelta
from django.utils import timezone
from shop.conf import app_settings
import factory.fuzzy
import pytest
from pytest_factoryboy import register
from conftest import CommodityFactory
from testshop.models import MyProduct, MyProductInventory
class MyProductFactory(CommodityFactory):
class Meta:
model = MyProduct
@register
class InventoryFactory(factory.django.DjangoModelFactory):
class Meta:
model = MyProductInventory
product = factory.SubFactory(MyProductFactory)
datetime_min = timezone.datetime.min.replace(tzinfo=timezone.get_current_timezone())
datetime_max = timezone.datetime.max.replace(tzinfo=timezone.get_current_timezone())
@pytest.mark.django_db
def test_availability(api_rf, inventory_factory):
request = api_rf.get('/add-to-cart')
now = timezone.now()
earliest = now - timedelta(days=1)
inventory = inventory_factory(earliest=earliest, quantity=10)
availability = inventory.product.get_availability(request)
assert availability.quantity == 10
assert availability.earliest == earliest
assert availability.latest == datetime_max
assert availability.sell_short is False
assert availability.limited_offer is False
@pytest.mark.django_db
def test_sell_short(api_rf, inventory_factory):
request = api_rf.get('/add-to-cart')
now = timezone.now()
earliest = now + app_settings.SHOP_SELL_SHORT_PERIOD / 2
inventory = inventory_factory(earliest=earliest, quantity=10)
availability = inventory.product.get_availability(request)
assert availability.quantity == 10
assert availability.earliest == earliest
assert availability.latest == datetime_max
assert availability.sell_short is True
assert availability.limited_offer is False
@pytest.mark.django_db
def test_limited_offer(api_rf, inventory_factory):
request = api_rf.get('/add-to-cart')
now = timezone.now()
earliest = now
latest = now + app_settings.SHOP_LIMITED_OFFER_PERIOD / 2
inventory = inventory_factory(earliest=earliest, latest=latest, quantity=10)
availability = inventory.product.get_availability(request)
assert availability.quantity == 10
assert availability.earliest == earliest
assert availability.latest == latest
assert availability.sell_short is False
assert availability.limited_offer is True
|
import logging
from perfkitbenchmarker import errors
from perfkitbenchmarker.providers import profitbricks
from perfkitbenchmarker.providers.profitbricks import \
profitbricks_machine_types
import requests
# Global Values
PROFITBRICKS_API = profitbricks.PROFITBRICKS_API
FLAVORS = profitbricks_machine_types.FLAVORS
def PerformRequest(action, url, header, json=None):
"""Makes an HTTP request to the ProfitBricks REST API."""
# Make HTTP call
if action == 'get':
r = requests.get(url, headers=header)
elif action == 'post':
r = requests.post(url, headers=header, json=json)
elif action == 'delete':
r = requests.delete(url, headers=header)
# Check Response Status Code
if r.status_code >= 300:
action = action.upper()
logging.info(r.text)
raise errors.Error('%s call to %s failed, see log.' % (action,
url))
return r
def ReturnImage(header, location):
"""Returns Ubuntu image based on zone location."""
# Retrieve list of provider images
url = '%s/images?depth=5' % PROFITBRICKS_API
r = PerformRequest('get', url, header)
response = r.json()
logging.info('Fetching image for new VM.')
# Search for Ubuntu image in preferred zone
for image in response['items']:
if('Ubuntu-14' in image['properties']['name'] and
image['properties']['location'] == location):
return image['id']
def ReturnFlavor(machine_type):
"""Returns RAM and Core values based on machine_type selection."""
logging.info('Fetching flavor specs for new VM.')
for flavor in FLAVORS:
if(machine_type == flavor['name']):
return flavor['ram'], flavor['cores']
def CreateDatacenter(header, location):
"""Creates a Datacenter."""
# Build new DC body
new_dc = {
'properties': {
'name': 'Perfkit DC',
'location': location,
},
}
# Make call
logging.info('Creating Datacenter: %s in Location: %s' %
(new_dc['properties']['name'], location))
url = '%s/datacenters' % PROFITBRICKS_API
r = PerformRequest('post', url, header, json=new_dc)
# Parse Required values from response
status_url = r.headers['Location']
response = r.json()
datacenter_id = response['id']
return datacenter_id, status_url
def CreateLan(header, datacenter):
"""Creates a LAN with public IP address."""
# Build new LAN body
new_lan = {
'properties': {
'name': 'lan1',
'public': True,
},
}
# Make call
logging.info('Creating LAN')
url = '%s/datacenters/%s/lans' % (PROFITBRICKS_API, datacenter)
r = PerformRequest('post', url, header, json=new_lan)
# Parse Required values from response
status_url = r.headers['Location']
response = r.json()
lan_id = response['id']
return lan_id, status_url
|
import pytest
import numpy as np
from numpy.testing import assert_array_equal
from mne.stats import combine_adjacency
from mne.utils import requires_sklearn
@requires_sklearn
@pytest.mark.parametrize('shape', [
(1,),
(2,),
(1, 1),
(1, 2),
(2, 1),
(3, 4),
(1, 1, 1),
(1, 1, 2),
(3, 4, 5),
])
def test_adjacency_equiv(shape):
"""Test adjacency equivalence for lattice adjacency."""
from sklearn.feature_extraction import grid_to_graph
# sklearn requires at least two dimensions
sk_shape = shape if len(shape) > 1 else (shape + (1,))
conn_sk = grid_to_graph(*sk_shape).toarray()
conn = combine_adjacency(*shape)
want_shape = (np.prod(shape),) * 2
assert conn.shape == conn_sk.shape == want_shape
assert (conn.data == 1.).all()
conn = conn.toarray()
# we end up with some duplicates that can turn into 2's and 3's,
# eventually we might want to keep these as 1's but it's easy enough
# with a .astype(bool) (also matches sklearn output) so let's leave it
# for now
assert np.in1d(conn, [0, 1, 2, 3]).all()
assert conn.shape == conn_sk.shape
assert_array_equal(conn, conn_sk)
|
from copy import deepcopy
from distutils.version import LooseVersion
import os.path as op
import shutil
from unittest import SkipTest
import numpy as np
from numpy.testing import (assert_array_equal, assert_array_almost_equal,
assert_equal, assert_allclose)
import pytest
from scipy import io
from mne import write_events, read_epochs_eeglab
from mne.io import read_raw_eeglab
from mne.io.tests.test_raw import _test_raw_reader
from mne.datasets import testing
from mne.utils import check_version
from mne.annotations import events_from_annotations, read_annotations
from mne.io.eeglab.tests._utils import _read_eeglab_montage
base_dir = op.join(testing.data_path(download=False), 'EEGLAB')
raw_fname_mat = op.join(base_dir, 'test_raw.set')
raw_fname_onefile_mat = op.join(base_dir, 'test_raw_onefile.set')
raw_fname_event_duration = op.join(base_dir, 'test_raw_event_duration.set')
epochs_fname_mat = op.join(base_dir, 'test_epochs.set')
epochs_fname_onefile_mat = op.join(base_dir, 'test_epochs_onefile.set')
raw_mat_fnames = [raw_fname_mat, raw_fname_onefile_mat]
epochs_mat_fnames = [epochs_fname_mat, epochs_fname_onefile_mat]
raw_fname_chanloc = op.join(base_dir, 'test_raw_chanloc.set')
raw_fname_h5 = op.join(base_dir, 'test_raw_h5.set')
raw_fname_onefile_h5 = op.join(base_dir, 'test_raw_onefile_h5.set')
epochs_fname_h5 = op.join(base_dir, 'test_epochs_h5.set')
epochs_fname_onefile_h5 = op.join(base_dir, 'test_epochs_onefile_h5.set')
raw_h5_fnames = [raw_fname_h5, raw_fname_onefile_h5]
epochs_h5_fnames = [epochs_fname_h5, epochs_fname_onefile_h5]
montage_path = op.join(base_dir, 'test_chans.locs')
needs_h5 = pytest.mark.skipif(not check_version('h5py'), reason='Needs h5py')
@testing.requires_testing_data
@pytest.mark.parametrize('fname', [
raw_fname_mat,
pytest.param(raw_fname_h5, marks=needs_h5),
raw_fname_chanloc,
], ids=op.basename)
def test_io_set_raw(fname):
"""Test importing EEGLAB .set files."""
montage = _read_eeglab_montage(montage_path)
montage.ch_names = [
'EEG {0:03d}'.format(ii) for ii in range(len(montage.ch_names))
]
kws = dict(reader=read_raw_eeglab, input_fname=fname)
if fname.endswith('test_raw_chanloc.set'):
with pytest.warns(RuntimeWarning,
match="The data contains 'boundary' events"):
raw0 = _test_raw_reader(**kws)
elif '_h5' in fname: # should be safe enough, and much faster
raw0 = read_raw_eeglab(fname, preload=True)
else:
raw0 = _test_raw_reader(**kws)
# test that preloading works
if fname.endswith('test_raw_chanloc.set'):
raw0.set_montage(montage, on_missing='ignore')
# crop to check if the data has been properly preloaded; we cannot
# filter as the snippet of raw data is very short
raw0.crop(0, 1)
else:
raw0.set_montage(montage)
raw0.filter(1, None, l_trans_bandwidth='auto', filter_length='auto',
phase='zero')
# test that using uint16_codec does not break stuff
read_raw_kws = dict(input_fname=fname, preload=False, uint16_codec='ascii')
if fname.endswith('test_raw_chanloc.set'):
with pytest.warns(RuntimeWarning,
match="The data contains 'boundary' events"):
raw0 = read_raw_eeglab(**read_raw_kws)
raw0.set_montage(montage, on_missing='ignore')
else:
raw0 = read_raw_eeglab(**read_raw_kws)
raw0.set_montage(montage)
# Annotations
if fname != raw_fname_chanloc:
assert len(raw0.annotations) == 154
assert set(raw0.annotations.description) == {'rt', 'square'}
assert_array_equal(raw0.annotations.duration, 0.)
@testing.requires_testing_data
def test_io_set_raw_more(tmpdir):
"""Test importing EEGLAB .set files."""
tmpdir = str(tmpdir)
eeg = io.loadmat(raw_fname_mat, struct_as_record=False,
squeeze_me=True)['EEG']
# test reading file with one event (read old version)
negative_latency_fname = op.join(tmpdir, 'test_negative_latency.set')
evnts = deepcopy(eeg.event[0])
evnts.latency = 0
io.savemat(negative_latency_fname,
{'EEG': {'trials': eeg.trials, 'srate': eeg.srate,
'nbchan': eeg.nbchan,
'data': 'test_negative_latency.fdt',
'epoch': eeg.epoch, 'event': evnts,
'chanlocs': eeg.chanlocs, 'pnts': eeg.pnts}},
appendmat=False, oned_as='row')
shutil.copyfile(op.join(base_dir, 'test_raw.fdt'),
negative_latency_fname.replace('.set', '.fdt'))
with pytest.warns(RuntimeWarning, match="has a sample index of -1."):
read_raw_eeglab(input_fname=negative_latency_fname, preload=True)
# test negative event latencies
evnts.latency = -1
io.savemat(negative_latency_fname,
{'EEG': {'trials': eeg.trials, 'srate': eeg.srate,
'nbchan': eeg.nbchan,
'data': 'test_negative_latency.fdt',
'epoch': eeg.epoch, 'event': evnts,
'chanlocs': eeg.chanlocs, 'pnts': eeg.pnts}},
appendmat=False, oned_as='row')
with pytest.raises(ValueError, match='event sample index is negative'):
with pytest.warns(RuntimeWarning, match="has a sample index of -1."):
read_raw_eeglab(input_fname=negative_latency_fname, preload=True)
# test overlapping events
overlap_fname = op.join(tmpdir, 'test_overlap_event.set')
io.savemat(overlap_fname,
{'EEG': {'trials': eeg.trials, 'srate': eeg.srate,
'nbchan': eeg.nbchan, 'data': 'test_overlap_event.fdt',
'epoch': eeg.epoch,
'event': [eeg.event[0], eeg.event[0]],
'chanlocs': eeg.chanlocs, 'pnts': eeg.pnts}},
appendmat=False, oned_as='row')
shutil.copyfile(op.join(base_dir, 'test_raw.fdt'),
overlap_fname.replace('.set', '.fdt'))
read_raw_eeglab(input_fname=overlap_fname, preload=True)
# test reading file with empty event durations
empty_dur_fname = op.join(tmpdir, 'test_empty_durations.set')
evnts = deepcopy(eeg.event)
for ev in evnts:
ev.duration = np.array([], dtype='float')
io.savemat(empty_dur_fname,
{'EEG': {'trials': eeg.trials, 'srate': eeg.srate,
'nbchan': eeg.nbchan,
'data': 'test_negative_latency.fdt',
'epoch': eeg.epoch, 'event': evnts,
'chanlocs': eeg.chanlocs, 'pnts': eeg.pnts}},
appendmat=False, oned_as='row')
shutil.copyfile(op.join(base_dir, 'test_raw.fdt'),
empty_dur_fname.replace('.set', '.fdt'))
raw = read_raw_eeglab(input_fname=empty_dur_fname, preload=True)
assert (raw.annotations.duration == 0).all()
# test reading file when the EEG.data name is wrong
io.savemat(overlap_fname,
{'EEG': {'trials': eeg.trials, 'srate': eeg.srate,
'nbchan': eeg.nbchan, 'data': 'test_overla_event.fdt',
'epoch': eeg.epoch,
'event': [eeg.event[0], eeg.event[0]],
'chanlocs': eeg.chanlocs, 'pnts': eeg.pnts}},
appendmat=False, oned_as='row')
with pytest.warns(RuntimeWarning, match="must have changed on disk"):
read_raw_eeglab(input_fname=overlap_fname, preload=True)
# raise error when both EEG.data and fdt name from set are wrong
overlap_fname = op.join(tmpdir, 'test_ovrlap_event.set')
io.savemat(overlap_fname,
{'EEG': {'trials': eeg.trials, 'srate': eeg.srate,
'nbchan': eeg.nbchan, 'data': 'test_overla_event.fdt',
'epoch': eeg.epoch,
'event': [eeg.event[0], eeg.event[0]],
'chanlocs': eeg.chanlocs, 'pnts': eeg.pnts}},
appendmat=False, oned_as='row')
with pytest.raises(FileNotFoundError, match="not find the .fdt data file"):
read_raw_eeglab(input_fname=overlap_fname, preload=True)
# test reading file with one channel
one_chan_fname = op.join(tmpdir, 'test_one_channel.set')
io.savemat(one_chan_fname,
{'EEG': {'trials': eeg.trials, 'srate': eeg.srate,
'nbchan': 1, 'data': np.random.random((1, 3)),
'epoch': eeg.epoch, 'event': eeg.epoch,
'chanlocs': {'labels': 'E1', 'Y': -6.6069,
'X': 6.3023, 'Z': -2.9423},
'times': eeg.times[:3], 'pnts': 3}},
appendmat=False, oned_as='row')
read_raw_eeglab(input_fname=one_chan_fname, preload=True)
# test reading file with 3 channels - one without position information
# first, create chanlocs structured array
ch_names = ['F3', 'unknown', 'FPz']
x, y, z = [1., 2., np.nan], [4., 5., np.nan], [7., 8., np.nan]
dt = [('labels', 'S10'), ('X', 'f8'), ('Y', 'f8'), ('Z', 'f8')]
nopos_dt = [('labels', 'S10'), ('Z', 'f8')]
chanlocs = np.zeros((3,), dtype=dt)
nopos_chanlocs = np.zeros((3,), dtype=nopos_dt)
for ind, vals in enumerate(zip(ch_names, x, y, z)):
for fld in range(4):
chanlocs[ind][dt[fld][0]] = vals[fld]
if fld in (0, 3):
nopos_chanlocs[ind][dt[fld][0]] = vals[fld]
# In theory this should work and be simpler, but there is an obscure
# SciPy writing bug that pops up sometimes:
# nopos_chanlocs = np.array(chanlocs[['labels', 'Z']])
if LooseVersion(np.__version__) == '1.14.0':
# There is a bug in 1.14.0 (or maybe with SciPy 1.0.0?) that causes
# this write to fail!
raise SkipTest('Need to fix bug in NumPy 1.14.0!')
# test reading channel names but not positions when there is no X (only Z)
# field in the EEG.chanlocs structure
nopos_fname = op.join(tmpdir, 'test_no_chanpos.set')
io.savemat(nopos_fname,
{'EEG': {'trials': eeg.trials, 'srate': eeg.srate, 'nbchan': 3,
'data': np.random.random((3, 2)), 'epoch': eeg.epoch,
'event': eeg.epoch, 'chanlocs': nopos_chanlocs,
'times': eeg.times[:2], 'pnts': 2}},
appendmat=False, oned_as='row')
# load the file
raw = read_raw_eeglab(input_fname=nopos_fname, preload=True)
# test that channel names have been loaded but not channel positions
for i in range(3):
assert_equal(raw.info['chs'][i]['ch_name'], ch_names[i])
assert_array_equal(raw.info['chs'][i]['loc'][:3],
np.array([np.nan, np.nan, np.nan]))
@pytest.mark.timeout(60) # ~60 sec on Travis OSX
@testing.requires_testing_data
@pytest.mark.parametrize('fnames', [
epochs_mat_fnames,
pytest.param(epochs_h5_fnames, marks=[needs_h5, pytest.mark.slowtest]),
])
def test_io_set_epochs(fnames):
"""Test importing EEGLAB .set epochs files."""
epochs_fname, epochs_fname_onefile = fnames
with pytest.warns(RuntimeWarning, match='multiple events'):
epochs = read_epochs_eeglab(epochs_fname)
with pytest.warns(RuntimeWarning, match='multiple events'):
epochs2 = read_epochs_eeglab(epochs_fname_onefile)
# one warning for each read_epochs_eeglab because both files have epochs
# associated with multiple events
assert_array_equal(epochs.get_data(), epochs2.get_data())
@testing.requires_testing_data
def test_io_set_epochs_events(tmpdir):
"""Test different combinations of events and event_ids."""
tmpdir = str(tmpdir)
out_fname = op.join(tmpdir, 'test-eve.fif')
events = np.array([[4, 0, 1], [12, 0, 2], [20, 0, 3], [26, 0, 3]])
write_events(out_fname, events)
event_id = {'S255/S8': 1, 'S8': 2, 'S255/S9': 3}
out_fname = op.join(tmpdir, 'test-eve.fif')
epochs = read_epochs_eeglab(epochs_fname_mat, events, event_id)
assert_equal(len(epochs.events), 4)
assert epochs.preload
assert epochs._bad_dropped
epochs = read_epochs_eeglab(epochs_fname_mat, out_fname, event_id)
pytest.raises(ValueError, read_epochs_eeglab, epochs_fname_mat,
None, event_id)
pytest.raises(ValueError, read_epochs_eeglab, epochs_fname_mat,
epochs.events, None)
@testing.requires_testing_data
def test_degenerate(tmpdir):
"""Test some degenerate conditions."""
# test if .dat file raises an error
tmpdir = str(tmpdir)
eeg = io.loadmat(epochs_fname_mat, struct_as_record=False,
squeeze_me=True)['EEG']
eeg.data = 'epochs_fname.dat'
bad_epochs_fname = op.join(tmpdir, 'test_epochs.set')
io.savemat(bad_epochs_fname,
{'EEG': {'trials': eeg.trials, 'srate': eeg.srate,
'nbchan': eeg.nbchan, 'data': eeg.data,
'epoch': eeg.epoch, 'event': eeg.event,
'chanlocs': eeg.chanlocs, 'pnts': eeg.pnts}},
appendmat=False, oned_as='row')
shutil.copyfile(op.join(base_dir, 'test_epochs.fdt'),
op.join(tmpdir, 'test_epochs.dat'))
with pytest.warns(RuntimeWarning, match='multiple events'):
pytest.raises(NotImplementedError, read_epochs_eeglab,
bad_epochs_fname)
@pytest.mark.parametrize("fname", [
raw_fname_mat,
raw_fname_onefile_mat,
# We don't test the h5 varaints here because they are implicitly tested
# in test_io_set_raw
])
@pytest.mark.filterwarnings('ignore: Complex objects')
@testing.requires_testing_data
def test_eeglab_annotations(fname):
"""Test reading annotations in EEGLAB files."""
annotations = read_annotations(fname)
assert len(annotations) == 154
assert set(annotations.description) == {'rt', 'square'}
assert np.all(annotations.duration == 0.)
@testing.requires_testing_data
def test_eeglab_read_annotations():
"""Test annotations onsets are timestamps (+ validate some)."""
annotations = read_annotations(raw_fname_mat)
validation_samples = [0, 1, 2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31]
expected_onset = np.array([1.00, 1.69, 2.08, 4.70, 7.71, 11.30, 17.18,
20.20, 26.12, 29.14, 35.25, 44.30, 47.15])
assert annotations.orig_time is None
assert_array_almost_equal(annotations.onset[validation_samples],
expected_onset, decimal=2)
# test if event durations are imported correctly
raw = read_raw_eeglab(raw_fname_event_duration, preload=True)
# file contains 3 annotations with 0.5 s (64 samples) duration each
assert_allclose(raw.annotations.duration, np.ones(3) * 0.5)
@testing.requires_testing_data
def test_eeglab_event_from_annot():
"""Test all forms of obtaining annotations."""
base_dir = op.join(testing.data_path(download=False), 'EEGLAB')
raw_fname_mat = op.join(base_dir, 'test_raw.set')
raw_fname = raw_fname_mat
event_id = {'rt': 1, 'square': 2}
raw1 = read_raw_eeglab(input_fname=raw_fname, preload=False)
annotations = read_annotations(raw_fname)
assert len(raw1.annotations) == 154
raw1.set_annotations(annotations)
events_b, _ = events_from_annotations(raw1, event_id=event_id)
assert len(events_b) == 154
def _assert_array_allclose_nan(left, right):
assert_array_equal(np.isnan(left), np.isnan(right))
assert_allclose(left[~np.isnan(left)], right[~np.isnan(left)], atol=1e-8)
@pytest.fixture(scope='session')
def one_chanpos_fname(tmpdir_factory):
"""Test file with 3 channels to exercise EEGLAB reader.
File characteristics
- ch_names: 'F3', 'unknown', 'FPz'
- 'FPz' has no position information.
- the rest is aleatory
Notes from when this code was factorized:
# test reading file with one event (read old version)
"""
fname = str(tmpdir_factory.mktemp('data').join('test_chanpos.set'))
file_conent = dict(EEG={
'trials': 1, 'nbchan': 3, 'pnts': 3, 'epoch': [], 'event': [],
'srate': 128, 'times': np.array([0., 0.1, 0.2]),
'data': np.empty([3, 3]),
'chanlocs': np.array(
[(b'F3', 1., 4., 7.),
(b'unknown', 2., 5., 8.),
(b'FPz', np.nan, np.nan, np.nan)],
dtype=[('labels', 'S10'), ('X', 'f8'), ('Y', 'f8'), ('Z', 'f8')]
)
})
io.savemat(file_name=fname, mdict=file_conent, appendmat=False,
oned_as='row')
return fname
@testing.requires_testing_data
def test_position_information(one_chanpos_fname):
"""Test reading file with 3 channels - one without position information."""
nan = np.nan
EXPECTED_LOCATIONS_FROM_FILE = np.array([
[-4., 1., 7., 0., 0., 0., nan, nan, nan, nan, nan, nan],
[-5., 2., 8., 0., 0., 0., nan, nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan],
])
EXPECTED_LOCATIONS_FROM_MONTAGE = np.array([
[nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan],
[nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan],
])
raw = read_raw_eeglab(input_fname=one_chanpos_fname, preload=True)
assert_array_equal(np.array([ch['loc'] for ch in raw.info['chs']]),
EXPECTED_LOCATIONS_FROM_FILE)
# To accommodate the new behavior so that:
# read_raw_eeglab(.. montage=montage) and raw.set_montage(montage)
# behaves the same we need to flush the montage. otherwise we get
# a mix of what is in montage and in the file
raw = read_raw_eeglab(
input_fname=one_chanpos_fname,
preload=True,
).set_montage(None) # Flush the montage builtin within input_fname
_assert_array_allclose_nan(np.array([ch['loc'] for ch in raw.info['chs']]),
EXPECTED_LOCATIONS_FROM_MONTAGE)
_assert_array_allclose_nan(np.array([ch['loc'] for ch in raw.info['chs']]),
EXPECTED_LOCATIONS_FROM_MONTAGE)
|
import asyncio
import aiohttp
from aiohttp.test_utils import TestClient
async def aiohttp_request(loop, method, url, output="text", encoding="utf-8", content_type=None, **kwargs):
session = aiohttp.ClientSession(loop=loop)
response_ctx = session.request(method, url, **kwargs)
response = await response_ctx.__aenter__()
if output == "text":
content = await response.text()
elif output == "json":
content_type = content_type or "application/json"
content = await response.json(encoding=encoding, content_type=content_type)
elif output == "raw":
content = await response.read()
elif output == "stream":
content = await response.content.read()
response_ctx._resp.close()
await session.close()
return response, content
def aiohttp_app():
async def hello(request):
return aiohttp.web.Response(text="hello")
async def json(request):
return aiohttp.web.json_response({})
async def json_empty_body(request):
return aiohttp.web.json_response()
app = aiohttp.web.Application()
app.router.add_get("/", hello)
app.router.add_get("/json", json)
app.router.add_get("/json/empty", json_empty_body)
return app
|
import asyncio
from ..event import _loop
from .. import config
from . import logger
# There is always a single current server (except initially there is None)
_current_server = None
def create_server(host=None, port=None, loop=None, backend='tornado',
**server_kwargs):
"""
Create a new server object. This is automatically called; users generally
don't need this, unless they want to explicitly specify host/port,
create a fresh server in testing scenarios, or run Flexx in a thread.
Flexx uses the notion of a single current server object. This function
(re)creates that object. If there already was a server object, it is
replaced. It is an error to call this function if the current server
is still running.
Arguments:
host (str): The hostname to serve on. By default
``flexx.config.hostname`` is used. If ``False``, do not listen
(e.g. when integrating with an existing Tornado application).
port (int, str): The port number. If a string is given, it is
hashed to an ephemeral port number. By default
``flexx.config.port`` is used.
loop: A fresh (asyncio) event loop, default None (use current).
backend (str): Stub argument; only Tornado is currently supported.
**server_kwargs: keyword arguments passed to the server constructor.
Returns:
AbstractServer: The server object, see ``current_server()``.
"""
# Lazy load tornado, so that we can use anything we want there without
# preventing other parts of flexx.app from using *this* module.
from ._tornadoserver import TornadoServer # noqa - circular dependency
global _current_server
if backend.lower() != 'tornado':
raise RuntimeError('Flexx server can only run on Tornado (for now).')
# Handle defaults
if host is None:
host = config.hostname
if port is None:
port = config.port
# Stop old server
if _current_server:
_current_server.close()
# Start hosting
_current_server = TornadoServer(host, port, loop, **server_kwargs)
assert isinstance(_current_server, AbstractServer)
return _current_server
def current_server(create=True):
"""
Get the current server object. Creates a server if there is none
and the ``create`` arg is True. Currently, this is always a
TornadoServer object, which has properties:
* serving: a tuple ``(hostname, port)`` specifying the location
being served (or ``None`` if the server is closed).
* protocol: the protocol (e.g. "http") being used.
* app: the ``tornado.web.Application`` instance
* server: the ``tornado.httpserver.HttpServer`` instance
"""
if create and not _current_server:
create_server()
return _current_server
## Server class
async def keep_awake():
# This is to wake Python up from time to time to allow interruption
# See #529, and e.g. Hypercorn's run() implementation.
# Stricly speaking only required on Windows.
while True:
await asyncio.sleep(0.2)
class AbstractServer:
""" This is an attempt to generalize the server, so that in the
future we may have e.g. a Flask or Pyramid server.
A server must implement this, and use the manager to instantiate,
connect and disconnect sessions. The assets object must be used to
server assets to the client.
Arguments:
host (str): the hostname to serve at
port (int): the port to serve at. None or 0 mean to autoselect a port.
"""
def __init__(self, host, port, loop=None, **kwargs):
# First off, create new event loop and integrate event.loop
if loop is None:
self._loop = asyncio.get_event_loop()
else:
assert isinstance(loop, asyncio.AbstractEventLoop)
self._loop = loop
asyncio.set_event_loop(self._loop)
_loop.loop.integrate(self._loop, reset=False)
self._serving = None
if host is not False:
self._open(host, port, **kwargs)
assert self._serving # Check that subclass set private variable
@property
def _running(self):
return self._loop.is_running()
def start(self):
""" Start the event loop. """
if not self._serving:
raise RuntimeError('Cannot start a closed or non-serving server!')
if self._running:
raise RuntimeError('Cannot start a running server.')
if asyncio.get_event_loop() is not self._loop:
raise RuntimeError('Can only start server in same thread that created it.')
logger.info('Starting Flexx event loop.')
# Make use of the semi-standard defined by IPython to determine
# if the ioloop is "hijacked" (e.g. in Pyzo).
if not getattr(self._loop, '_in_event_loop', False):
poller = self._loop.create_task(keep_awake())
try:
self._loop.run_forever()
except KeyboardInterrupt:
logger.info('Flexx event loop interrupted.')
except TypeError as err:
if "close() takes 1 positional argument but 3 were given" in str(err):
# This is weird - I looked into this but this does not seem to
# originate from Flexx, could this be a bug in CPython?
logger.info('Interrupted Flexx event loop.')
else:
raise
poller.cancel()
def stop(self):
""" Stop the event loop. This does not close the connection; the server
can be restarted. Thread safe. """
logger.info('Stopping Flexx event loop.')
self._loop.call_soon_threadsafe(self._loop.stop)
def close(self):
""" Close the connection. A closed server cannot be used again. """
if self._running:
raise RuntimeError('Cannot close a running server; need to stop first.')
self._serving = None
self._close()
# self._loop.close()
def _open(self, host, port, **kwargs):
raise NotImplementedError()
def _close(self):
raise NotImplementedError()
@property
def serving(self):
""" Get a tuple (hostname, port) that is being served.
Or None if the server is not serving (anymore).
"""
return self._serving
@property
def protocol(self):
""" Get a string representing served protocol
"""
raise NotImplementedError
|
from xknx import XKNX
from xknx.devices import (
BinarySensor as XknxBinarySensor,
Climate as XknxClimate,
ClimateMode as XknxClimateMode,
Cover as XknxCover,
Device as XknxDevice,
Light as XknxLight,
Notification as XknxNotification,
Scene as XknxScene,
Sensor as XknxSensor,
Switch as XknxSwitch,
Weather as XknxWeather,
)
from homeassistant.const import CONF_ADDRESS, CONF_DEVICE_CLASS, CONF_NAME, CONF_TYPE
from homeassistant.helpers.typing import ConfigType
from .const import ColorTempModes, SupportedPlatforms
from .schema import (
BinarySensorSchema,
ClimateSchema,
CoverSchema,
LightSchema,
SceneSchema,
SensorSchema,
SwitchSchema,
WeatherSchema,
)
def create_knx_device(
platform: SupportedPlatforms,
knx_module: XKNX,
config: ConfigType,
) -> XknxDevice:
"""Return the requested XKNX device."""
if platform is SupportedPlatforms.light:
return _create_light(knx_module, config)
if platform is SupportedPlatforms.cover:
return _create_cover(knx_module, config)
if platform is SupportedPlatforms.climate:
return _create_climate(knx_module, config)
if platform is SupportedPlatforms.switch:
return _create_switch(knx_module, config)
if platform is SupportedPlatforms.sensor:
return _create_sensor(knx_module, config)
if platform is SupportedPlatforms.notify:
return _create_notify(knx_module, config)
if platform is SupportedPlatforms.scene:
return _create_scene(knx_module, config)
if platform is SupportedPlatforms.binary_sensor:
return _create_binary_sensor(knx_module, config)
if platform is SupportedPlatforms.weather:
return _create_weather(knx_module, config)
def _create_cover(knx_module: XKNX, config: ConfigType) -> XknxCover:
"""Return a KNX Cover device to be used within XKNX."""
return XknxCover(
knx_module,
name=config[CONF_NAME],
group_address_long=config.get(CoverSchema.CONF_MOVE_LONG_ADDRESS),
group_address_short=config.get(CoverSchema.CONF_MOVE_SHORT_ADDRESS),
group_address_stop=config.get(CoverSchema.CONF_STOP_ADDRESS),
group_address_position_state=config.get(
CoverSchema.CONF_POSITION_STATE_ADDRESS
),
group_address_angle=config.get(CoverSchema.CONF_ANGLE_ADDRESS),
group_address_angle_state=config.get(CoverSchema.CONF_ANGLE_STATE_ADDRESS),
group_address_position=config.get(CoverSchema.CONF_POSITION_ADDRESS),
travel_time_down=config[CoverSchema.CONF_TRAVELLING_TIME_DOWN],
travel_time_up=config[CoverSchema.CONF_TRAVELLING_TIME_UP],
invert_position=config[CoverSchema.CONF_INVERT_POSITION],
invert_angle=config[CoverSchema.CONF_INVERT_ANGLE],
)
def _create_light(knx_module: XKNX, config: ConfigType) -> XknxLight:
"""Return a KNX Light device to be used within XKNX."""
group_address_tunable_white = None
group_address_tunable_white_state = None
group_address_color_temp = None
group_address_color_temp_state = None
if config[LightSchema.CONF_COLOR_TEMP_MODE] == ColorTempModes.absolute:
group_address_color_temp = config.get(LightSchema.CONF_COLOR_TEMP_ADDRESS)
group_address_color_temp_state = config.get(
LightSchema.CONF_COLOR_TEMP_STATE_ADDRESS
)
elif config[LightSchema.CONF_COLOR_TEMP_MODE] == ColorTempModes.relative:
group_address_tunable_white = config.get(LightSchema.CONF_COLOR_TEMP_ADDRESS)
group_address_tunable_white_state = config.get(
LightSchema.CONF_COLOR_TEMP_STATE_ADDRESS
)
return XknxLight(
knx_module,
name=config[CONF_NAME],
group_address_switch=config[CONF_ADDRESS],
group_address_switch_state=config.get(LightSchema.CONF_STATE_ADDRESS),
group_address_brightness=config.get(LightSchema.CONF_BRIGHTNESS_ADDRESS),
group_address_brightness_state=config.get(
LightSchema.CONF_BRIGHTNESS_STATE_ADDRESS
),
group_address_color=config.get(LightSchema.CONF_COLOR_ADDRESS),
group_address_color_state=config.get(LightSchema.CONF_COLOR_STATE_ADDRESS),
group_address_rgbw=config.get(LightSchema.CONF_RGBW_ADDRESS),
group_address_rgbw_state=config.get(LightSchema.CONF_RGBW_STATE_ADDRESS),
group_address_tunable_white=group_address_tunable_white,
group_address_tunable_white_state=group_address_tunable_white_state,
group_address_color_temperature=group_address_color_temp,
group_address_color_temperature_state=group_address_color_temp_state,
min_kelvin=config[LightSchema.CONF_MIN_KELVIN],
max_kelvin=config[LightSchema.CONF_MAX_KELVIN],
)
def _create_climate(knx_module: XKNX, config: ConfigType) -> XknxClimate:
"""Return a KNX Climate device to be used within XKNX."""
climate_mode = XknxClimateMode(
knx_module,
name=f"{config[CONF_NAME]} Mode",
group_address_operation_mode=config.get(
ClimateSchema.CONF_OPERATION_MODE_ADDRESS
),
group_address_operation_mode_state=config.get(
ClimateSchema.CONF_OPERATION_MODE_STATE_ADDRESS
),
group_address_controller_status=config.get(
ClimateSchema.CONF_CONTROLLER_STATUS_ADDRESS
),
group_address_controller_status_state=config.get(
ClimateSchema.CONF_CONTROLLER_STATUS_STATE_ADDRESS
),
group_address_controller_mode=config.get(
ClimateSchema.CONF_CONTROLLER_MODE_ADDRESS
),
group_address_controller_mode_state=config.get(
ClimateSchema.CONF_CONTROLLER_MODE_STATE_ADDRESS
),
group_address_operation_mode_protection=config.get(
ClimateSchema.CONF_OPERATION_MODE_FROST_PROTECTION_ADDRESS
),
group_address_operation_mode_night=config.get(
ClimateSchema.CONF_OPERATION_MODE_NIGHT_ADDRESS
),
group_address_operation_mode_comfort=config.get(
ClimateSchema.CONF_OPERATION_MODE_COMFORT_ADDRESS
),
group_address_operation_mode_standby=config.get(
ClimateSchema.CONF_OPERATION_MODE_STANDBY_ADDRESS
),
group_address_heat_cool=config.get(ClimateSchema.CONF_HEAT_COOL_ADDRESS),
group_address_heat_cool_state=config.get(
ClimateSchema.CONF_HEAT_COOL_STATE_ADDRESS
),
operation_modes=config.get(ClimateSchema.CONF_OPERATION_MODES),
)
return XknxClimate(
knx_module,
name=config[CONF_NAME],
group_address_temperature=config[ClimateSchema.CONF_TEMPERATURE_ADDRESS],
group_address_target_temperature=config.get(
ClimateSchema.CONF_TARGET_TEMPERATURE_ADDRESS
),
group_address_target_temperature_state=config[
ClimateSchema.CONF_TARGET_TEMPERATURE_STATE_ADDRESS
],
group_address_setpoint_shift=config.get(
ClimateSchema.CONF_SETPOINT_SHIFT_ADDRESS
),
group_address_setpoint_shift_state=config.get(
ClimateSchema.CONF_SETPOINT_SHIFT_STATE_ADDRESS
),
setpoint_shift_mode=config[ClimateSchema.CONF_SETPOINT_SHIFT_MODE],
setpoint_shift_max=config[ClimateSchema.CONF_SETPOINT_SHIFT_MAX],
setpoint_shift_min=config[ClimateSchema.CONF_SETPOINT_SHIFT_MIN],
temperature_step=config[ClimateSchema.CONF_TEMPERATURE_STEP],
group_address_on_off=config.get(ClimateSchema.CONF_ON_OFF_ADDRESS),
group_address_on_off_state=config.get(ClimateSchema.CONF_ON_OFF_STATE_ADDRESS),
min_temp=config.get(ClimateSchema.CONF_MIN_TEMP),
max_temp=config.get(ClimateSchema.CONF_MAX_TEMP),
mode=climate_mode,
on_off_invert=config[ClimateSchema.CONF_ON_OFF_INVERT],
)
def _create_switch(knx_module: XKNX, config: ConfigType) -> XknxSwitch:
"""Return a KNX switch to be used within XKNX."""
return XknxSwitch(
knx_module,
name=config[CONF_NAME],
group_address=config[CONF_ADDRESS],
group_address_state=config.get(SwitchSchema.CONF_STATE_ADDRESS),
)
def _create_sensor(knx_module: XKNX, config: ConfigType) -> XknxSensor:
"""Return a KNX sensor to be used within XKNX."""
return XknxSensor(
knx_module,
name=config[CONF_NAME],
group_address_state=config[SensorSchema.CONF_STATE_ADDRESS],
sync_state=config[SensorSchema.CONF_SYNC_STATE],
value_type=config[CONF_TYPE],
)
def _create_notify(knx_module: XKNX, config: ConfigType) -> XknxNotification:
"""Return a KNX notification to be used within XKNX."""
return XknxNotification(
knx_module,
name=config[CONF_NAME],
group_address=config[CONF_ADDRESS],
)
def _create_scene(knx_module: XKNX, config: ConfigType) -> XknxScene:
"""Return a KNX scene to be used within XKNX."""
return XknxScene(
knx_module,
name=config[CONF_NAME],
group_address=config[CONF_ADDRESS],
scene_number=config[SceneSchema.CONF_SCENE_NUMBER],
)
def _create_binary_sensor(knx_module: XKNX, config: ConfigType) -> XknxBinarySensor:
"""Return a KNX binary sensor to be used within XKNX."""
device_name = config[CONF_NAME]
return XknxBinarySensor(
knx_module,
name=device_name,
group_address_state=config[BinarySensorSchema.CONF_STATE_ADDRESS],
sync_state=config[BinarySensorSchema.CONF_SYNC_STATE],
device_class=config.get(CONF_DEVICE_CLASS),
ignore_internal_state=config[BinarySensorSchema.CONF_IGNORE_INTERNAL_STATE],
context_timeout=config[BinarySensorSchema.CONF_CONTEXT_TIMEOUT],
reset_after=config.get(BinarySensorSchema.CONF_RESET_AFTER),
)
def _create_weather(knx_module: XKNX, config: ConfigType) -> XknxWeather:
"""Return a KNX weather device to be used within XKNX."""
return XknxWeather(
knx_module,
name=config[CONF_NAME],
sync_state=config[WeatherSchema.CONF_SYNC_STATE],
expose_sensors=config[WeatherSchema.CONF_KNX_EXPOSE_SENSORS],
group_address_temperature=config[WeatherSchema.CONF_KNX_TEMPERATURE_ADDRESS],
group_address_brightness_south=config.get(
WeatherSchema.CONF_KNX_BRIGHTNESS_SOUTH_ADDRESS
),
group_address_brightness_east=config.get(
WeatherSchema.CONF_KNX_BRIGHTNESS_EAST_ADDRESS
),
group_address_brightness_west=config.get(
WeatherSchema.CONF_KNX_BRIGHTNESS_WEST_ADDRESS
),
group_address_brightness_north=config.get(
WeatherSchema.CONF_KNX_BRIGHTNESS_NORTH_ADDRESS
),
group_address_wind_speed=config.get(WeatherSchema.CONF_KNX_WIND_SPEED_ADDRESS),
group_address_rain_alarm=config.get(WeatherSchema.CONF_KNX_RAIN_ALARM_ADDRESS),
group_address_frost_alarm=config.get(
WeatherSchema.CONF_KNX_FROST_ALARM_ADDRESS
),
group_address_wind_alarm=config.get(WeatherSchema.CONF_KNX_WIND_ALARM_ADDRESS),
group_address_day_night=config.get(WeatherSchema.CONF_KNX_DAY_NIGHT_ADDRESS),
group_address_air_pressure=config.get(
WeatherSchema.CONF_KNX_AIR_PRESSURE_ADDRESS
),
group_address_humidity=config.get(WeatherSchema.CONF_KNX_HUMIDITY_ADDRESS),
)
|
import ast
from pathlib import Path
from typing import Dict, Set
from homeassistant.requirements import DISCOVERY_INTEGRATIONS
from .model import Integration
class ImportCollector(ast.NodeVisitor):
"""Collect all integrations referenced."""
def __init__(self, integration: Integration):
"""Initialize the import collector."""
self.integration = integration
self.referenced: Dict[Path, Set[str]] = {}
# Current file or dir we're inspecting
self._cur_fil_dir = None
def collect(self) -> None:
"""Collect imports from a source file."""
for fil in self.integration.path.glob("**/*.py"):
if not fil.is_file():
continue
self._cur_fil_dir = fil.relative_to(self.integration.path)
self.referenced[self._cur_fil_dir] = set()
self.visit(ast.parse(fil.read_text()))
self._cur_fil_dir = None
def _add_reference(self, reference_domain: str):
"""Add a reference."""
self.referenced[self._cur_fil_dir].add(reference_domain)
def visit_ImportFrom(self, node):
"""Visit ImportFrom node."""
if node.module is None:
return
if node.module.startswith("homeassistant.components."):
# from homeassistant.components.alexa.smart_home import EVENT_ALEXA_SMART_HOME
# from homeassistant.components.logbook import bla
self._add_reference(node.module.split(".")[2])
elif node.module == "homeassistant.components":
# from homeassistant.components import sun
for name_node in node.names:
self._add_reference(name_node.name)
def visit_Import(self, node):
"""Visit Import node."""
# import homeassistant.components.hue as hue
for name_node in node.names:
if name_node.name.startswith("homeassistant.components."):
self._add_reference(name_node.name.split(".")[2])
def visit_Attribute(self, node):
"""Visit Attribute node."""
# hass.components.hue.async_create()
# Name(id=hass)
# .Attribute(attr=hue)
# .Attribute(attr=async_create)
# self.hass.components.hue.async_create()
# Name(id=self)
# .Attribute(attr=hass) or .Attribute(attr=_hass)
# .Attribute(attr=hue)
# .Attribute(attr=async_create)
if (
isinstance(node.value, ast.Attribute)
and node.value.attr == "components"
and (
(
isinstance(node.value.value, ast.Name)
and node.value.value.id == "hass"
)
or (
isinstance(node.value.value, ast.Attribute)
and node.value.value.attr in ("hass", "_hass")
)
)
):
self._add_reference(node.attr)
else:
# Have it visit other kids
self.generic_visit(node)
ALLOWED_USED_COMPONENTS = {
# Internal integrations
"alert",
"automation",
"conversation",
"device_automation",
"frontend",
"group",
"hassio",
"homeassistant",
"input_boolean",
"input_datetime",
"input_number",
"input_select",
"input_text",
"onboarding",
"persistent_notification",
"person",
"script",
"shopping_list",
"sun",
"system_health",
"system_log",
"timer",
"webhook",
"websocket_api",
"zone",
# Entity integrations with platforms
"alarm_control_panel",
"binary_sensor",
"climate",
"cover",
"device_tracker",
"fan",
"humidifier",
"image_processing",
"light",
"lock",
"media_player",
"scene",
"sensor",
"switch",
"vacuum",
"water_heater",
# Other
"mjpeg", # base class, has no reqs or component to load.
"stream", # Stream cannot install on all systems, can be imported without reqs.
}
IGNORE_VIOLATIONS = {
# Has same requirement, gets defaults.
("sql", "recorder"),
# Sharing a base class
("openalpr_cloud", "openalpr_local"),
("lutron_caseta", "lutron"),
("ffmpeg_noise", "ffmpeg_motion"),
# Demo
("demo", "manual"),
("demo", "openalpr_local"),
# This should become a helper method that integrations can submit data to
("websocket_api", "lovelace"),
("websocket_api", "shopping_list"),
"logbook",
}
def calc_allowed_references(integration: Integration) -> Set[str]:
"""Return a set of allowed references."""
allowed_references = (
ALLOWED_USED_COMPONENTS
| set(integration.manifest.get("dependencies", []))
| set(integration.manifest.get("after_dependencies", []))
)
# Discovery requirements are ok if referenced in manifest
for check_domain, to_check in DISCOVERY_INTEGRATIONS.items():
if any(check in integration.manifest for check in to_check):
allowed_references.add(check_domain)
return allowed_references
def find_non_referenced_integrations(
integrations: Dict[str, Integration],
integration: Integration,
references: Dict[Path, Set[str]],
):
"""Find intergrations that are not allowed to be referenced."""
allowed_references = calc_allowed_references(integration)
referenced = set()
for path, refs in references.items():
if len(path.parts) == 1:
# climate.py is stored as climate
cur_fil_dir = path.stem
else:
# climate/__init__.py is stored as climate
cur_fil_dir = path.parts[0]
is_platform_other_integration = cur_fil_dir in integrations
for ref in refs:
# We are always allowed to import from ourselves
if ref == integration.domain:
continue
# These references are approved based on the manifest
if ref in allowed_references:
continue
# Some violations are whitelisted
if (integration.domain, ref) in IGNORE_VIOLATIONS:
continue
# If it's a platform for another integration, the other integration is ok
if is_platform_other_integration and cur_fil_dir == ref:
continue
# These have a platform specified in this integration
if not is_platform_other_integration and (
(integration.path / f"{ref}.py").is_file()
# Platform dir
or (integration.path / ref).is_dir()
):
continue
referenced.add(ref)
return referenced
def validate_dependencies(
integrations: Dict[str, Integration], integration: Integration
):
"""Validate all dependencies."""
# Some integrations are allowed to have violations.
if integration.domain in IGNORE_VIOLATIONS:
return
# Find usage of hass.components
collector = ImportCollector(integration)
collector.collect()
for domain in sorted(
find_non_referenced_integrations(
integrations, integration, collector.referenced
)
):
integration.add_error(
"dependencies",
f"Using component {domain} but it's not in 'dependencies' "
"or 'after_dependencies'",
)
def validate(integrations: Dict[str, Integration], config):
"""Handle dependencies for integrations."""
# check for non-existing dependencies
for integration in integrations.values():
if not integration.manifest:
continue
validate_dependencies(integrations, integration)
if config.specific_integrations:
continue
# check that all referenced dependencies exist
after_deps = integration.manifest.get("after_dependencies", [])
for dep in integration.manifest.get("dependencies", []):
if dep in after_deps:
integration.add_error(
"dependencies",
f"Dependency {dep} is both in dependencies and after_dependencies",
)
if dep not in integrations:
integration.add_error(
"dependencies", f"Dependency {dep} does not exist"
)
|
import aiohttp
import pytest
import homeassistant.util.location as location_util
from tests.async_mock import Mock, patch
from tests.common import load_fixture
# Paris
COORDINATES_PARIS = (48.864716, 2.349014)
# New York
COORDINATES_NEW_YORK = (40.730610, -73.935242)
# Results for the assertion (vincenty algorithm):
# Distance [km] Distance [miles]
# [0] 5846.39 3632.78
# [1] 5851 3635
#
# [0]: http://boulter.com/gps/distance/
# [1]: https://www.wolframalpha.com/input/?i=from+paris+to+new+york
DISTANCE_KM = 5846.39
DISTANCE_MILES = 3632.78
@pytest.fixture
async def session(hass):
"""Return aioclient session."""
return hass.helpers.aiohttp_client.async_get_clientsession()
@pytest.fixture
async def raising_session(loop):
"""Return an aioclient session that only fails."""
return Mock(get=Mock(side_effect=aiohttp.ClientError))
def test_get_distance_to_same_place():
"""Test getting the distance."""
meters = location_util.distance(
COORDINATES_PARIS[0],
COORDINATES_PARIS[1],
COORDINATES_PARIS[0],
COORDINATES_PARIS[1],
)
assert meters == 0
def test_get_distance():
"""Test getting the distance."""
meters = location_util.distance(
COORDINATES_PARIS[0],
COORDINATES_PARIS[1],
COORDINATES_NEW_YORK[0],
COORDINATES_NEW_YORK[1],
)
assert meters / 1000 - DISTANCE_KM < 0.01
def test_get_kilometers():
"""Test getting the distance between given coordinates in km."""
kilometers = location_util.vincenty(COORDINATES_PARIS, COORDINATES_NEW_YORK)
assert round(kilometers, 2) == DISTANCE_KM
def test_get_miles():
"""Test getting the distance between given coordinates in miles."""
miles = location_util.vincenty(COORDINATES_PARIS, COORDINATES_NEW_YORK, miles=True)
assert round(miles, 2) == DISTANCE_MILES
async def test_detect_location_info_ipapi(aioclient_mock, session):
"""Test detect location info using ipapi.co."""
aioclient_mock.get(location_util.IPAPI, text=load_fixture("ipapi.co.json"))
info = await location_util.async_detect_location_info(session, _test_real=True)
assert info is not None
assert info.ip == "1.2.3.4"
assert info.country_code == "CH"
assert info.country_name == "Switzerland"
assert info.region_code == "BE"
assert info.region_name == "Bern"
assert info.city == "Bern"
assert info.zip_code == "3000"
assert info.time_zone == "Europe/Zurich"
assert info.latitude == 46.9480278
assert info.longitude == 7.4490812
assert info.use_metric
async def test_detect_location_info_ipapi_exhaust(aioclient_mock, session):
"""Test detect location info using ipapi.co."""
aioclient_mock.get(location_util.IPAPI, json={"latitude": "Sign up to access"})
aioclient_mock.get(location_util.IP_API, text=load_fixture("ip-api.com.json"))
info = await location_util.async_detect_location_info(session, _test_real=True)
assert info is not None
# ip_api result because ipapi got skipped
assert info.country_code == "US"
assert len(aioclient_mock.mock_calls) == 2
async def test_detect_location_info_ip_api(aioclient_mock, session):
"""Test detect location info using ip-api.com."""
aioclient_mock.get(location_util.IP_API, text=load_fixture("ip-api.com.json"))
with patch("homeassistant.util.location._get_ipapi", return_value=None):
info = await location_util.async_detect_location_info(session, _test_real=True)
assert info is not None
assert info.ip == "1.2.3.4"
assert info.country_code == "US"
assert info.country_name == "United States"
assert info.region_code == "CA"
assert info.region_name == "California"
assert info.city == "San Diego"
assert info.zip_code == "92122"
assert info.time_zone == "America/Los_Angeles"
assert info.latitude == 32.8594
assert info.longitude == -117.2073
assert not info.use_metric
async def test_detect_location_info_both_queries_fail(session):
"""Ensure we return None if both queries fail."""
with patch("homeassistant.util.location._get_ipapi", return_value=None), patch(
"homeassistant.util.location._get_ip_api", return_value=None
):
info = await location_util.async_detect_location_info(session, _test_real=True)
assert info is None
async def test_freegeoip_query_raises(raising_session):
"""Test ipapi.co query when the request to API fails."""
info = await location_util._get_ipapi(raising_session)
assert info is None
async def test_ip_api_query_raises(raising_session):
"""Test ip api query when the request to API fails."""
info = await location_util._get_ip_api(raising_session)
assert info is None
|
from django.db import migrations
FIELDS = (
("term", "source"),
("term", "target"),
)
PG_TRGM = "CREATE INDEX {0}_{1}_fulltext ON glossary_{0} USING GIN ({1} gin_trgm_ops)"
PG_DROP = "DROP INDEX {0}_{1}_fulltext"
MY_FTX = "CREATE FULLTEXT INDEX {0}_{1}_fulltext ON glossary_{0}({1})"
MY_DROP = "ALTER TABLE glossary_{0} DROP INDEX {0}_{1}_fulltext"
def create_index(apps, schema_editor):
vendor = schema_editor.connection.vendor
if vendor == "postgresql":
# Create GIN trigram index on searched fields
for table, field in FIELDS:
schema_editor.execute(PG_TRGM.format(table, field))
elif vendor == "mysql":
for table, field in FIELDS:
schema_editor.execute(MY_FTX.format(table, field))
else:
raise Exception(f"Unsupported database: {vendor}")
def drop_index(apps, schema_editor):
vendor = schema_editor.connection.vendor
if vendor == "postgresql":
for table, field in FIELDS:
schema_editor.execute(PG_DROP.format(table, field))
elif vendor == "mysql":
for table, field in FIELDS:
schema_editor.execute(MY_DROP.format(table, field))
else:
raise Exception(f"Unsupported database: {vendor}")
class Migration(migrations.Migration):
dependencies = [
("glossary", "0002_migrate_dictionary"),
]
operations = [
migrations.RunPython(create_index, drop_index, elidable=False, atomic=False)
]
|
from asyncio import TimeoutError as asyncioTimeoutError
import logging
from aiohttp.client_exceptions import ClientError
from pyControl4.account import C4Account
from pyControl4.director import C4Director
from pyControl4.error_handling import NotFound, Unauthorized
import voluptuous as vol
from homeassistant import config_entries, exceptions
from homeassistant.const import (
CONF_HOST,
CONF_PASSWORD,
CONF_SCAN_INTERVAL,
CONF_USERNAME,
)
from homeassistant.core import callback
from homeassistant.helpers import aiohttp_client, config_validation as cv
from homeassistant.helpers.device_registry import format_mac
from .const import CONF_CONTROLLER_UNIQUE_ID, DEFAULT_SCAN_INTERVAL, MIN_SCAN_INTERVAL
from .const import DOMAIN # pylint:disable=unused-import
_LOGGER = logging.getLogger(__name__)
DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_HOST): str,
vol.Required(CONF_USERNAME): str,
vol.Required(CONF_PASSWORD): str,
}
)
class Control4Validator:
"""Validates that config details can be used to authenticate and communicate with Control4."""
def __init__(self, host, username, password, hass):
"""Initialize."""
self.host = host
self.username = username
self.password = password
self.controller_unique_id = None
self.director_bearer_token = None
self.hass = hass
async def authenticate(self) -> bool:
"""Test if we can authenticate with the Control4 account API."""
try:
account_session = aiohttp_client.async_get_clientsession(self.hass)
account = C4Account(self.username, self.password, account_session)
# Authenticate with Control4 account
await account.getAccountBearerToken()
# Get controller name
account_controllers = await account.getAccountControllers()
self.controller_unique_id = account_controllers["controllerCommonName"]
# Get bearer token to communicate with controller locally
self.director_bearer_token = (
await account.getDirectorBearerToken(self.controller_unique_id)
)["token"]
return True
except (Unauthorized, NotFound):
return False
async def connect_to_director(self) -> bool:
"""Test if we can connect to the local Control4 Director."""
try:
director_session = aiohttp_client.async_get_clientsession(
self.hass, verify_ssl=False
)
director = C4Director(
self.host, self.director_bearer_token, director_session
)
await director.getAllItemInfo()
return True
except (Unauthorized, ClientError, asyncioTimeoutError):
_LOGGER.error("Failed to connect to the Control4 controller")
return False
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Control4."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL
async def async_step_user(self, user_input=None):
"""Handle the initial step."""
errors = {}
if user_input is not None:
hub = Control4Validator(
user_input["host"],
user_input["username"],
user_input["password"],
self.hass,
)
try:
if not await hub.authenticate():
raise InvalidAuth
if not await hub.connect_to_director():
raise CannotConnect
except InvalidAuth:
errors["base"] = "invalid_auth"
except CannotConnect:
errors["base"] = "cannot_connect"
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
if not errors:
controller_unique_id = hub.controller_unique_id
mac = (controller_unique_id.split("_", 3))[2]
formatted_mac = format_mac(mac)
await self.async_set_unique_id(formatted_mac)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=controller_unique_id,
data={
CONF_HOST: user_input["host"],
CONF_USERNAME: user_input["username"],
CONF_PASSWORD: user_input["password"],
CONF_CONTROLLER_UNIQUE_ID: controller_unique_id,
},
)
return self.async_show_form(
step_id="user", data_schema=DATA_SCHEMA, errors=errors
)
@staticmethod
@callback
def async_get_options_flow(config_entry):
"""Get the options flow for this handler."""
return OptionsFlowHandler(config_entry)
class OptionsFlowHandler(config_entries.OptionsFlow):
"""Handle a option flow for Control4."""
def __init__(self, config_entry: config_entries.ConfigEntry):
"""Initialize options flow."""
self.config_entry = config_entry
async def async_step_init(self, user_input=None):
"""Handle options flow."""
if user_input is not None:
return self.async_create_entry(title="", data=user_input)
data_schema = vol.Schema(
{
vol.Optional(
CONF_SCAN_INTERVAL,
default=self.config_entry.options.get(
CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL
),
): vol.All(cv.positive_int, vol.Clamp(min=MIN_SCAN_INTERVAL)),
}
)
return self.async_show_form(step_id="init", data_schema=data_schema)
class CannotConnect(exceptions.HomeAssistantError):
"""Error to indicate we cannot connect."""
class InvalidAuth(exceptions.HomeAssistantError):
"""Error to indicate there is invalid auth."""
|
import asyncio
import logging
import pywemo
import requests
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_send
from .const import DOMAIN
# Mapping from Wemo model_name to component.
WEMO_MODEL_DISPATCH = {
"Bridge": "light",
"CoffeeMaker": "switch",
"Dimmer": "light",
"Humidifier": "fan",
"Insight": "switch",
"LightSwitch": "switch",
"Maker": "switch",
"Motion": "binary_sensor",
"Sensor": "binary_sensor",
"Socket": "switch",
}
_LOGGER = logging.getLogger(__name__)
def coerce_host_port(value):
"""Validate that provided value is either just host or host:port.
Returns (host, None) or (host, port) respectively.
"""
host, _, port = value.partition(":")
if not host:
raise vol.Invalid("host cannot be empty")
if port:
port = cv.port(port)
else:
port = None
return host, port
CONF_STATIC = "static"
CONF_DISCOVERY = "discovery"
DEFAULT_DISCOVERY = True
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Optional(CONF_STATIC, default=[]): vol.Schema(
[vol.All(cv.string, coerce_host_port)]
),
vol.Optional(CONF_DISCOVERY, default=DEFAULT_DISCOVERY): cv.boolean,
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Set up for WeMo devices."""
hass.data[DOMAIN] = {
"config": config.get(DOMAIN, {}),
"registry": None,
"pending": {},
}
if DOMAIN in config:
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_IMPORT}
)
)
return True
async def async_setup_entry(hass, entry):
"""Set up a wemo config entry."""
config = hass.data[DOMAIN].pop("config")
# Keep track of WeMo device subscriptions for push updates
registry = hass.data[DOMAIN]["registry"] = pywemo.SubscriptionRegistry()
await hass.async_add_executor_job(registry.start)
def stop_wemo(event):
"""Shutdown Wemo subscriptions and subscription thread on exit."""
_LOGGER.debug("Shutting down WeMo event subscriptions")
registry.stop()
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, stop_wemo)
devices = {}
static_conf = config.get(CONF_STATIC, [])
if static_conf:
_LOGGER.debug("Adding statically configured WeMo devices...")
for device in await asyncio.gather(
*[
hass.async_add_executor_job(validate_static_config, host, port)
for host, port in static_conf
]
):
if device is None:
continue
devices.setdefault(device.serialnumber, device)
if config.get(CONF_DISCOVERY, DEFAULT_DISCOVERY):
_LOGGER.debug("Scanning network for WeMo devices...")
for device in await hass.async_add_executor_job(pywemo.discover_devices):
devices.setdefault(
device.serialnumber,
device,
)
loaded_components = set()
for device in devices.values():
_LOGGER.debug(
"Adding WeMo device at %s:%i (%s)",
device.host,
device.port,
device.serialnumber,
)
component = WEMO_MODEL_DISPATCH.get(device.model_name, "switch")
# Three cases:
# - First time we see component, we need to load it and initialize the backlog
# - Component is being loaded, add to backlog
# - Component is loaded, backlog is gone, dispatch discovery
if component not in loaded_components:
hass.data[DOMAIN]["pending"][component] = [device]
loaded_components.add(component)
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
elif component in hass.data[DOMAIN]["pending"]:
hass.data[DOMAIN]["pending"][component].append(device)
else:
async_dispatcher_send(
hass,
f"{DOMAIN}.{component}",
device,
)
return True
def validate_static_config(host, port):
"""Handle a static config."""
url = setup_url_for_address(host, port)
if not url:
_LOGGER.error(
"Unable to get description url for WeMo at: %s",
f"{host}:{port}" if port else host,
)
return None
try:
device = pywemo.discovery.device_from_description(url, None)
except (
requests.exceptions.ConnectionError,
requests.exceptions.Timeout,
) as err:
_LOGGER.error("Unable to access WeMo at %s (%s)", url, err)
return None
return device
def setup_url_for_address(host, port):
"""Determine setup.xml url for given host and port pair."""
if not port:
port = pywemo.ouimeaux_device.probe_wemo(host)
if not port:
return None
return f"http://{host}:{port}/setup.xml"
|
import homeassistant.components.ffmpeg as ffmpeg
from homeassistant.components.ffmpeg import (
DOMAIN,
SERVICE_RESTART,
SERVICE_START,
SERVICE_STOP,
)
from homeassistant.const import ATTR_ENTITY_ID
from homeassistant.core import callback
from homeassistant.setup import async_setup_component, setup_component
from tests.async_mock import MagicMock
from tests.common import assert_setup_component, get_test_home_assistant
@callback
def async_start(hass, entity_id=None):
"""Start a FFmpeg process on entity.
This is a legacy helper method. Do not use it for new tests.
"""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
hass.async_add_job(hass.services.async_call(DOMAIN, SERVICE_START, data))
@callback
def async_stop(hass, entity_id=None):
"""Stop a FFmpeg process on entity.
This is a legacy helper method. Do not use it for new tests.
"""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
hass.async_add_job(hass.services.async_call(DOMAIN, SERVICE_STOP, data))
@callback
def async_restart(hass, entity_id=None):
"""Restart a FFmpeg process on entity.
This is a legacy helper method. Do not use it for new tests.
"""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
hass.async_add_job(hass.services.async_call(DOMAIN, SERVICE_RESTART, data))
class MockFFmpegDev(ffmpeg.FFmpegBase):
"""FFmpeg device mock."""
def __init__(self, hass, initial_state=True, entity_id="test.ffmpeg_device"):
"""Initialize mock."""
super().__init__(initial_state)
self.hass = hass
self.entity_id = entity_id
self.ffmpeg = MagicMock
self.called_stop = False
self.called_start = False
self.called_restart = False
self.called_entities = None
async def _async_start_ffmpeg(self, entity_ids):
"""Mock start."""
self.called_start = True
self.called_entities = entity_ids
async def _async_stop_ffmpeg(self, entity_ids):
"""Mock stop."""
self.called_stop = True
self.called_entities = entity_ids
class TestFFmpegSetup:
"""Test class for ffmpeg."""
def setup_method(self):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
def teardown_method(self):
"""Stop everything that was started."""
self.hass.stop()
def test_setup_component(self):
"""Set up ffmpeg component."""
with assert_setup_component(1):
setup_component(self.hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}})
assert self.hass.data[ffmpeg.DATA_FFMPEG].binary == "ffmpeg"
def test_setup_component_test_service(self):
"""Set up ffmpeg component test services."""
with assert_setup_component(1):
setup_component(self.hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}})
assert self.hass.services.has_service(ffmpeg.DOMAIN, "start")
assert self.hass.services.has_service(ffmpeg.DOMAIN, "stop")
assert self.hass.services.has_service(ffmpeg.DOMAIN, "restart")
async def test_setup_component_test_register(hass):
"""Set up ffmpeg component test register."""
with assert_setup_component(1):
await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}})
hass.bus.async_listen_once = MagicMock()
ffmpeg_dev = MockFFmpegDev(hass)
await ffmpeg_dev.async_added_to_hass()
assert hass.bus.async_listen_once.called
assert hass.bus.async_listen_once.call_count == 2
async def test_setup_component_test_register_no_startup(hass):
"""Set up ffmpeg component test register without startup."""
with assert_setup_component(1):
await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}})
hass.bus.async_listen_once = MagicMock()
ffmpeg_dev = MockFFmpegDev(hass, False)
await ffmpeg_dev.async_added_to_hass()
assert hass.bus.async_listen_once.called
assert hass.bus.async_listen_once.call_count == 1
async def test_setup_component_test_service_start(hass):
"""Set up ffmpeg component test service start."""
with assert_setup_component(1):
await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}})
ffmpeg_dev = MockFFmpegDev(hass, False)
await ffmpeg_dev.async_added_to_hass()
async_start(hass)
await hass.async_block_till_done()
assert ffmpeg_dev.called_start
async def test_setup_component_test_service_stop(hass):
"""Set up ffmpeg component test service stop."""
with assert_setup_component(1):
await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}})
ffmpeg_dev = MockFFmpegDev(hass, False)
await ffmpeg_dev.async_added_to_hass()
async_stop(hass)
await hass.async_block_till_done()
assert ffmpeg_dev.called_stop
async def test_setup_component_test_service_restart(hass):
"""Set up ffmpeg component test service restart."""
with assert_setup_component(1):
await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}})
ffmpeg_dev = MockFFmpegDev(hass, False)
await ffmpeg_dev.async_added_to_hass()
async_restart(hass)
await hass.async_block_till_done()
assert ffmpeg_dev.called_stop
assert ffmpeg_dev.called_start
async def test_setup_component_test_service_start_with_entity(hass):
"""Set up ffmpeg component test service start."""
with assert_setup_component(1):
await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}})
ffmpeg_dev = MockFFmpegDev(hass, False)
await ffmpeg_dev.async_added_to_hass()
async_start(hass, "test.ffmpeg_device")
await hass.async_block_till_done()
assert ffmpeg_dev.called_start
assert ffmpeg_dev.called_entities == ["test.ffmpeg_device"]
|
import requests_mock
from homeassistant.components.tado import DOMAIN
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from homeassistant.core import HomeAssistant
from tests.common import MockConfigEntry, load_fixture
async def async_init_integration(
hass: HomeAssistant,
skip_setup: bool = False,
):
"""Set up the tado integration in Home Assistant."""
token_fixture = "tado/token.json"
devices_fixture = "tado/devices.json"
me_fixture = "tado/me.json"
zones_fixture = "tado/zones.json"
# Smart AC with Swing
zone_5_state_fixture = "tado/smartac3.with_swing.json"
zone_5_capabilities_fixture = "tado/zone_with_swing_capabilities.json"
# Water Heater 2
zone_4_state_fixture = "tado/tadov2.water_heater.heating.json"
zone_4_capabilities_fixture = "tado/water_heater_zone_capabilities.json"
# Smart AC
zone_3_state_fixture = "tado/smartac3.cool_mode.json"
zone_3_capabilities_fixture = "tado/zone_capabilities.json"
# Water Heater
zone_2_state_fixture = "tado/tadov2.water_heater.auto_mode.json"
zone_2_capabilities_fixture = "tado/water_heater_zone_capabilities.json"
# Tado V2 with manual heating
zone_1_state_fixture = "tado/tadov2.heating.manual_mode.json"
zone_1_capabilities_fixture = "tado/tadov2.zone_capabilities.json"
with requests_mock.mock() as m:
m.post("https://auth.tado.com/oauth/token", text=load_fixture(token_fixture))
m.get(
"https://my.tado.com/api/v2/me",
text=load_fixture(me_fixture),
)
m.get(
"https://my.tado.com/api/v2/homes/1/devices",
text=load_fixture(devices_fixture),
)
m.get(
"https://my.tado.com/api/v2/homes/1/zones",
text=load_fixture(zones_fixture),
)
m.get(
"https://my.tado.com/api/v2/homes/1/zones/5/capabilities",
text=load_fixture(zone_5_capabilities_fixture),
)
m.get(
"https://my.tado.com/api/v2/homes/1/zones/4/capabilities",
text=load_fixture(zone_4_capabilities_fixture),
)
m.get(
"https://my.tado.com/api/v2/homes/1/zones/3/capabilities",
text=load_fixture(zone_3_capabilities_fixture),
)
m.get(
"https://my.tado.com/api/v2/homes/1/zones/2/capabilities",
text=load_fixture(zone_2_capabilities_fixture),
)
m.get(
"https://my.tado.com/api/v2/homes/1/zones/1/capabilities",
text=load_fixture(zone_1_capabilities_fixture),
)
m.get(
"https://my.tado.com/api/v2/homes/1/zones/5/state",
text=load_fixture(zone_5_state_fixture),
)
m.get(
"https://my.tado.com/api/v2/homes/1/zones/4/state",
text=load_fixture(zone_4_state_fixture),
)
m.get(
"https://my.tado.com/api/v2/homes/1/zones/3/state",
text=load_fixture(zone_3_state_fixture),
)
m.get(
"https://my.tado.com/api/v2/homes/1/zones/2/state",
text=load_fixture(zone_2_state_fixture),
)
m.get(
"https://my.tado.com/api/v2/homes/1/zones/1/state",
text=load_fixture(zone_1_state_fixture),
)
entry = MockConfigEntry(
domain=DOMAIN, data={CONF_USERNAME: "mock", CONF_PASSWORD: "mock"}
)
entry.add_to_hass(hass)
if not skip_setup:
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
|
import pytest
import zigpy.profiles.zha
import zigpy.zcl.clusters.measurement as measurement
import zigpy.zcl.clusters.security as security
from homeassistant.components.binary_sensor import DOMAIN
from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE
from .common import (
async_enable_traffic,
async_test_rejoin,
find_entity_id,
send_attributes_report,
)
DEVICE_IAS = {
1: {
"device_type": zigpy.profiles.zha.DeviceType.IAS_ZONE,
"in_clusters": [security.IasZone.cluster_id],
"out_clusters": [],
}
}
DEVICE_OCCUPANCY = {
1: {
"device_type": zigpy.profiles.zha.DeviceType.OCCUPANCY_SENSOR,
"in_clusters": [measurement.OccupancySensing.cluster_id],
"out_clusters": [],
}
}
async def async_test_binary_sensor_on_off(hass, cluster, entity_id):
"""Test getting on and off messages for binary sensors."""
# binary sensor on
await send_attributes_report(hass, cluster, {1: 0, 0: 1, 2: 2})
assert hass.states.get(entity_id).state == STATE_ON
# binary sensor off
await send_attributes_report(hass, cluster, {1: 1, 0: 0, 2: 2})
assert hass.states.get(entity_id).state == STATE_OFF
async def async_test_iaszone_on_off(hass, cluster, entity_id):
"""Test getting on and off messages for iaszone binary sensors."""
# binary sensor on
cluster.listener_event("cluster_command", 1, 0, [1])
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == STATE_ON
# binary sensor off
cluster.listener_event("cluster_command", 1, 0, [0])
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == STATE_OFF
@pytest.mark.parametrize(
"device, on_off_test, cluster_name, reporting",
[
(DEVICE_IAS, async_test_iaszone_on_off, "ias_zone", (0,)),
# (DEVICE_OCCUPANCY, async_test_binary_sensor_on_off, "occupancy", (1,)),
],
)
async def test_binary_sensor(
hass,
zigpy_device_mock,
zha_device_joined_restored,
device,
on_off_test,
cluster_name,
reporting,
):
"""Test ZHA binary_sensor platform."""
zigpy_device = zigpy_device_mock(device)
zha_device = await zha_device_joined_restored(zigpy_device)
entity_id = await find_entity_id(DOMAIN, zha_device, hass)
assert entity_id is not None
assert hass.states.get(entity_id).state == STATE_OFF
await async_enable_traffic(hass, [zha_device], enabled=False)
# test that the sensors exist and are in the unavailable state
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
await async_enable_traffic(hass, [zha_device])
# test that the sensors exist and are in the off state
assert hass.states.get(entity_id).state == STATE_OFF
# test getting messages that trigger and reset the sensors
cluster = getattr(zigpy_device.endpoints[1], cluster_name)
await on_off_test(hass, cluster, entity_id)
# test rejoin
await async_test_rejoin(hass, zigpy_device, [cluster], reporting)
assert hass.states.get(entity_id).state == STATE_OFF
|
import asyncio
from typing import Optional, cast
from aiohttp.web import Request, Response
import voluptuous as vol
from withings_api import WithingsAuth
from withings_api.common import NotifyAppli, enum_or_raise
from homeassistant.components import webhook
from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.components.webhook import (
async_unregister as async_unregister_webhook,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_CLIENT_ID, CONF_CLIENT_SECRET, CONF_WEBHOOK_ID
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.event import async_call_later
from homeassistant.helpers.typing import ConfigType
from . import config_flow, const
from .common import (
_LOGGER,
WithingsLocalOAuth2Implementation,
async_get_data_manager,
async_remove_data_manager,
get_data_manager_by_webhook_id,
json_message_response,
)
DOMAIN = const.DOMAIN
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.All(
cv.deprecated(const.CONF_PROFILES, invalidation_version="0.114"),
vol.Schema(
{
vol.Required(CONF_CLIENT_ID): vol.All(cv.string, vol.Length(min=1)),
vol.Required(CONF_CLIENT_SECRET): vol.All(
cv.string, vol.Length(min=1)
),
vol.Optional(const.CONF_USE_WEBHOOK, default=False): cv.boolean,
vol.Optional(const.CONF_PROFILES): vol.All(
cv.ensure_list,
vol.Unique(),
vol.Length(min=1),
[vol.All(cv.string, vol.Length(min=1))],
),
}
),
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the Withings component."""
conf = config.get(DOMAIN, {})
if not conf:
return True
# Make the config available to the oauth2 config flow.
hass.data[DOMAIN] = {const.CONFIG: conf}
# Setup the oauth2 config flow.
config_flow.WithingsFlowHandler.async_register_implementation(
hass,
WithingsLocalOAuth2Implementation(
hass,
const.DOMAIN,
conf[CONF_CLIENT_ID],
conf[CONF_CLIENT_SECRET],
f"{WithingsAuth.URL}/oauth2_user/authorize2",
f"{WithingsAuth.URL}/oauth2/token",
),
)
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up Withings from a config entry."""
config_updates = {}
# Add a unique id if it's an older config entry.
if entry.unique_id != entry.data["token"]["userid"] or not isinstance(
entry.unique_id, str
):
config_updates["unique_id"] = str(entry.data["token"]["userid"])
# Add the webhook configuration.
if CONF_WEBHOOK_ID not in entry.data:
webhook_id = webhook.async_generate_id()
config_updates["data"] = {
**entry.data,
**{
const.CONF_USE_WEBHOOK: hass.data[DOMAIN][const.CONFIG][
const.CONF_USE_WEBHOOK
],
CONF_WEBHOOK_ID: webhook_id,
const.CONF_WEBHOOK_URL: entry.data.get(
const.CONF_WEBHOOK_URL,
webhook.async_generate_url(hass, webhook_id),
),
},
}
if config_updates:
hass.config_entries.async_update_entry(entry, **config_updates)
data_manager = await async_get_data_manager(hass, entry)
_LOGGER.debug("Confirming %s is authenticated to withings", data_manager.profile)
await data_manager.poll_data_update_coordinator.async_refresh()
if not data_manager.poll_data_update_coordinator.last_update_success:
raise ConfigEntryNotReady()
webhook.async_register(
hass,
const.DOMAIN,
"Withings notify",
data_manager.webhook_config.id,
async_webhook_handler,
)
# Perform first webhook subscription check.
if data_manager.webhook_config.enabled:
data_manager.async_start_polling_webhook_subscriptions()
@callback
def async_call_later_callback(now) -> None:
hass.async_create_task(
data_manager.subscription_update_coordinator.async_refresh()
)
# Start subscription check in the background, outside this component's setup.
async_call_later(hass, 1, async_call_later_callback)
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, BINARY_SENSOR_DOMAIN)
)
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, SENSOR_DOMAIN)
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload Withings config entry."""
data_manager = await async_get_data_manager(hass, entry)
data_manager.async_stop_polling_webhook_subscriptions()
async_unregister_webhook(hass, data_manager.webhook_config.id)
await asyncio.gather(
data_manager.async_unsubscribe_webhook(),
hass.config_entries.async_forward_entry_unload(entry, BINARY_SENSOR_DOMAIN),
hass.config_entries.async_forward_entry_unload(entry, SENSOR_DOMAIN),
)
async_remove_data_manager(hass, entry)
return True
async def async_webhook_handler(
hass: HomeAssistant, webhook_id: str, request: Request
) -> Optional[Response]:
"""Handle webhooks calls."""
# Handle http head calls to the path.
# When creating a notify subscription, Withings will check that the endpoint is running by sending a HEAD request.
if request.method.upper() == "HEAD":
return Response()
if request.method.upper() != "POST":
return json_message_response("Invalid method", message_code=2)
# Handle http post calls to the path.
if not request.body_exists:
return json_message_response("No request body", message_code=12)
params = await request.post()
if "appli" not in params:
return json_message_response("Parameter appli not provided", message_code=20)
try:
appli = cast(
NotifyAppli, enum_or_raise(int(params.getone("appli")), NotifyAppli)
)
except ValueError:
return json_message_response("Invalid appli provided", message_code=21)
data_manager = get_data_manager_by_webhook_id(hass, webhook_id)
if not data_manager:
_LOGGER.error(
"Webhook id %s not handled by data manager. This is a bug and should be reported",
webhook_id,
)
return json_message_response("User not found", message_code=1)
# Run this in the background and return immediately.
hass.async_create_task(data_manager.async_webhook_data_updated(appli))
return json_message_response("Success", message_code=0)
|
from unittest import TestCase
from weblate.accounts.captcha import MathCaptcha
class CaptchaTest(TestCase):
def test_object(self):
captcha = MathCaptcha("1 * 2")
self.assertFalse(captcha.validate(1))
self.assertTrue(captcha.validate(2))
restored = MathCaptcha.unserialize(captcha.serialize())
self.assertEqual(captcha.question, restored.question)
self.assertTrue(restored.validate(2))
def test_generate(self):
"""Test generating of captcha for every operator."""
captcha = MathCaptcha()
for operator in MathCaptcha.operators:
captcha.operators = (operator,)
self.assertIn(operator, captcha.generate_question())
|
from pylatex import Document, Section
from pylatex.figure import Figure
import os
def test():
doc = Document()
section = Section('Multirow Test')
figure = Figure()
image_filename = os.path.join(os.path.dirname(__file__),
'../examples/kitten.jpg')
figure.add_image(image_filename)
figure.add_caption('Whoooo an imagage of a pdf')
section.append(figure)
doc.append(section)
doc.generate_pdf()
|
import numpy as np
import pandas as pd
from scattertext.distancemeasures.EuclideanDistance import EuclideanDistance
from scattertext.semioticsquare.SemioticSquare import SemioticSquareBase
class SemioticSquareFromAxes(SemioticSquareBase):
def __init__(self,
term_doc_matrix,
axes,
x_axis_name,
y_axis_name,
labels=None,
distance_measure=EuclideanDistance):
'''
:param term_doc_matrix: TermDocMatrix
:param axes: pd.DataFrame
:param x_axis_name: str
:param y_axis_name: str
:param labels: dict, optional
:param distance_measure: DistanceMeasureBase, EuclideanDistance by default
'''
self.term_doc_matrix = term_doc_matrix
assert type(axes) == pd.DataFrame
assert set(axes.columns) == set(['x', 'y'])
assert set(axes.index) == set(term_doc_matrix.get_terms())
self.axes = axes
self.y_axis_name = y_axis_name
self.x_axis_name = x_axis_name
if labels is None:
self._labels = {}
else:
self._labels = labels
self._distance_measure = distance_measure
def get_labels(self):
'''
:return: dict
'''
default_labels = {'a': 'not-%s; %s' % (self.x_axis_name, self.y_axis_name),
'not_a': '%s; not-%s' % (self.x_axis_name, self.y_axis_name),
'b': '%s; %s' % (self.x_axis_name, self.y_axis_name),
'not_b': 'not-%s; not-%s' % (self.x_axis_name, self.y_axis_name),
'a_and_b': '%s' % (self.y_axis_name),
'not_a_and_not_b': 'not-%s' % (self.y_axis_name),
'a_and_not_b': 'not-%s' % (self.x_axis_name),
'b_and_not_a': '%s' % (self.x_axis_name)}
return {name + '_label': self._labels.get(name, default_labels[name]) for name in default_labels}
def get_axes(self, **kwargs):
return self.axes
def get_lexicons(self, num_terms=10):
x_max = self.axes['x'].max()
x_med = self.axes['x'].median()
x_min = self.axes['x'].min()
y_max = self.axes['y'].max()
y_med = self.axes['y'].median()
y_min = self.axes['y'].min()
lexicons = {}
for label, [x_coord, y_coord] in [
['a', [x_min, y_max]],
['not_a', [x_max, y_min]],
['b', [x_max, y_max]],
['not_b', [x_min, y_min]],
['a_and_b', [x_med, y_max]],
['not_a_and_not_b', [x_med, y_min]],
['b_and_not_a', [x_max, y_med]],
['a_and_not_b', [x_min, y_med]],
]:
#scores = np.linalg.norm(np.array([self.axes['x'] - x_coord, self.axes['y'] - y_coord]), 2, axis=0)
scores = self._distance_measure.distances(x_coord, y_coord, self.axes['x'], self.axes['y'])
lexicons[label] = list(self.axes.index[np.argsort(scores)])[:num_terms]
return lexicons
|
import logging
import secrets
from aiohttp import hdrs
from aiohttp.web import middleware
import jwt
from homeassistant.core import callback
from homeassistant.util import dt as dt_util
from .const import KEY_AUTHENTICATED, KEY_HASS_USER
# mypy: allow-untyped-defs, no-check-untyped-defs
_LOGGER = logging.getLogger(__name__)
DATA_API_PASSWORD = "api_password"
DATA_SIGN_SECRET = "http.auth.sign_secret"
SIGN_QUERY_PARAM = "authSig"
@callback
def async_sign_path(hass, refresh_token_id, path, expiration):
"""Sign a path for temporary access without auth header."""
secret = hass.data.get(DATA_SIGN_SECRET)
if secret is None:
secret = hass.data[DATA_SIGN_SECRET] = secrets.token_hex()
now = dt_util.utcnow()
encoded = jwt.encode(
{"iss": refresh_token_id, "path": path, "iat": now, "exp": now + expiration},
secret,
algorithm="HS256",
)
return f"{path}?{SIGN_QUERY_PARAM}=" f"{encoded.decode()}"
@callback
def setup_auth(hass, app):
"""Create auth middleware for the app."""
async def async_validate_auth_header(request):
"""
Test authorization header against access token.
Basic auth_type is legacy code, should be removed with api_password.
"""
try:
auth_type, auth_val = request.headers.get(hdrs.AUTHORIZATION).split(" ", 1)
except ValueError:
# If no space in authorization header
return False
if auth_type != "Bearer":
return False
refresh_token = await hass.auth.async_validate_access_token(auth_val)
if refresh_token is None:
return False
request[KEY_HASS_USER] = refresh_token.user
return True
async def async_validate_signed_request(request):
"""Validate a signed request."""
secret = hass.data.get(DATA_SIGN_SECRET)
if secret is None:
return False
signature = request.query.get(SIGN_QUERY_PARAM)
if signature is None:
return False
try:
claims = jwt.decode(
signature, secret, algorithms=["HS256"], options={"verify_iss": False}
)
except jwt.InvalidTokenError:
return False
if claims["path"] != request.path:
return False
refresh_token = await hass.auth.async_get_refresh_token(claims["iss"])
if refresh_token is None:
return False
request[KEY_HASS_USER] = refresh_token.user
return True
@middleware
async def auth_middleware(request, handler):
"""Authenticate as middleware."""
authenticated = False
if hdrs.AUTHORIZATION in request.headers and await async_validate_auth_header(
request
):
authenticated = True
auth_type = "bearer token"
# We first start with a string check to avoid parsing query params
# for every request.
elif (
request.method == "GET"
and SIGN_QUERY_PARAM in request.query
and await async_validate_signed_request(request)
):
authenticated = True
auth_type = "signed request"
if authenticated:
_LOGGER.debug(
"Authenticated %s for %s using %s",
request.remote,
request.path,
auth_type,
)
request[KEY_AUTHENTICATED] = authenticated
return await handler(request)
app.middlewares.append(auth_middleware)
|