text
stringlengths 213
32.3k
|
---|
import os
import pytest
import homeassistant.components.notify as notify
from homeassistant.components.notify import ATTR_TITLE_DEFAULT
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from tests.async_mock import call, mock_open, patch
from tests.common import assert_setup_component
async def test_bad_config(hass):
"""Test set up the platform with bad/missing config."""
config = {notify.DOMAIN: {"name": "test", "platform": "file"}}
with assert_setup_component(0) as handle_config:
assert await async_setup_component(hass, notify.DOMAIN, config)
assert not handle_config[notify.DOMAIN]
@pytest.mark.parametrize(
"timestamp",
[
False,
True,
],
)
async def test_notify_file(hass, timestamp):
"""Test the notify file output."""
filename = "mock_file"
message = "one, two, testing, testing"
with assert_setup_component(1) as handle_config:
assert await async_setup_component(
hass,
notify.DOMAIN,
{
"notify": {
"name": "test",
"platform": "file",
"filename": filename,
"timestamp": timestamp,
}
},
)
assert handle_config[notify.DOMAIN]
m_open = mock_open()
with patch("homeassistant.components.file.notify.open", m_open, create=True), patch(
"homeassistant.components.file.notify.os.stat"
) as mock_st, patch("homeassistant.util.dt.utcnow", return_value=dt_util.utcnow()):
mock_st.return_value.st_size = 0
title = (
f"{ATTR_TITLE_DEFAULT} notifications "
f"(Log started: {dt_util.utcnow().isoformat()})\n{'-' * 80}\n"
)
await hass.services.async_call(
"notify", "test", {"message": message}, blocking=True
)
full_filename = os.path.join(hass.config.path(), filename)
assert m_open.call_count == 1
assert m_open.call_args == call(full_filename, "a")
assert m_open.return_value.write.call_count == 2
if not timestamp:
assert m_open.return_value.write.call_args_list == [
call(title),
call(f"{message}\n"),
]
else:
assert m_open.return_value.write.call_args_list == [
call(title),
call(f"{dt_util.utcnow().isoformat()} {message}\n"),
]
|
from homeassistant.components import sensor
from homeassistant.setup import async_setup_component
CONDITIONS = ["stationname", "temperature"]
BASE_CONFIG = {
"sensor": [
{
"platform": "buienradar",
"name": "volkel",
"latitude": 51.65,
"longitude": 5.7,
"monitored_conditions": CONDITIONS,
}
]
}
async def test_smoke_test_setup_component(hass):
"""Smoke test for successfully set-up with default config."""
assert await async_setup_component(hass, sensor.DOMAIN, BASE_CONFIG)
await hass.async_block_till_done()
for cond in CONDITIONS:
state = hass.states.get(f"sensor.volkel_{cond}")
assert state.state == "unknown"
|
from typing import Any
import pywink
from homeassistant.components.scene import Scene
from . import DOMAIN, WinkDevice
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Wink platform."""
for scene in pywink.get_scenes():
_id = scene.object_id() + scene.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
add_entities([WinkScene(scene, hass)])
class WinkScene(WinkDevice, Scene):
"""Representation of a Wink shortcut/scene."""
def __init__(self, wink, hass):
"""Initialize the Wink device."""
super().__init__(wink, hass)
hass.data[DOMAIN]["entities"]["scene"].append(self)
async def async_added_to_hass(self):
"""Call when entity is added to hass."""
self.hass.data[DOMAIN]["entities"]["scene"].append(self)
def activate(self, **kwargs: Any) -> None:
"""Activate the scene."""
self.wink.activate()
|
from simplipy.errors import SimplipyError
from simplipy.lock import LockStates
from simplipy.websocket import EVENT_LOCK_LOCKED, EVENT_LOCK_UNLOCKED
from homeassistant.components.lock import LockEntity
from homeassistant.core import callback
from . import SimpliSafeEntity
from .const import DATA_CLIENT, DOMAIN, LOGGER
ATTR_LOCK_LOW_BATTERY = "lock_low_battery"
ATTR_JAMMED = "jammed"
ATTR_PIN_PAD_LOW_BATTERY = "pin_pad_low_battery"
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up SimpliSafe locks based on a config entry."""
simplisafe = hass.data[DOMAIN][DATA_CLIENT][entry.entry_id]
async_add_entities(
[
SimpliSafeLock(simplisafe, system, lock)
for system in simplisafe.systems.values()
for lock in system.locks.values()
]
)
class SimpliSafeLock(SimpliSafeEntity, LockEntity):
"""Define a SimpliSafe lock."""
def __init__(self, simplisafe, system, lock):
"""Initialize."""
super().__init__(simplisafe, system, lock.name, serial=lock.serial)
self._lock = lock
self._is_locked = None
for event_type in (EVENT_LOCK_LOCKED, EVENT_LOCK_UNLOCKED):
self.websocket_events_to_listen_for.append(event_type)
@property
def is_locked(self):
"""Return true if the lock is locked."""
return self._is_locked
async def async_lock(self, **kwargs):
"""Lock the lock."""
try:
await self._lock.lock()
except SimplipyError as err:
LOGGER.error('Error while locking "%s": %s', self._lock.name, err)
return
async def async_unlock(self, **kwargs):
"""Unlock the lock."""
try:
await self._lock.unlock()
except SimplipyError as err:
LOGGER.error('Error while unlocking "%s": %s', self._lock.name, err)
return
@callback
def async_update_from_rest_api(self):
"""Update the entity with the provided REST API data."""
self._attrs.update(
{
ATTR_LOCK_LOW_BATTERY: self._lock.lock_low_battery,
ATTR_JAMMED: self._lock.state == LockStates.jammed,
ATTR_PIN_PAD_LOW_BATTERY: self._lock.pin_pad_low_battery,
}
)
self._is_locked = self._lock.state == LockStates.locked
@callback
def async_update_from_websocket_event(self, event):
"""Update the entity with the provided websocket event data."""
if event.event_type == EVENT_LOCK_LOCKED:
self._is_locked = True
else:
self._is_locked = False
|
class EntryCacheMixin(object):
"""
Mixin implementing cache on ``get_object`` method.
"""
_cached_object = None
def get_object(self, queryset=None):
"""
Implement cache on ``get_object`` method to
avoid repetitive calls, in POST.
"""
if self._cached_object is None:
self._cached_object = super(EntryCacheMixin, self).get_object(
queryset)
return self._cached_object
|
import logging
from pyskyqhub.skyq_hub import SkyQHub
import voluptuous as vol
from homeassistant.components.device_tracker import (
DOMAIN,
PLATFORM_SCHEMA,
DeviceScanner,
)
from homeassistant.const import CONF_HOST
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({vol.Optional(CONF_HOST): cv.string})
async def async_get_scanner(hass, config):
"""Return a Sky Hub scanner if successful."""
host = config[DOMAIN].get(CONF_HOST, "192.168.1.254")
websession = async_get_clientsession(hass)
hub = SkyQHub(websession, host)
_LOGGER.debug("Initialising Sky Hub")
await hub.async_connect()
if hub.success_init:
scanner = SkyHubDeviceScanner(hub)
return scanner
return None
class SkyHubDeviceScanner(DeviceScanner):
"""This class queries a Sky Hub router."""
def __init__(self, hub):
"""Initialise the scanner."""
self._hub = hub
self.last_results = {}
async def async_scan_devices(self):
"""Scan for new devices and return a list with found device IDs."""
await self._async_update_info()
return [device.mac for device in self.last_results]
async def async_get_device_name(self, device):
"""Return the name of the given device."""
name = next(
(result.name for result in self.last_results if result.mac == device),
None,
)
return name
async def async_get_extra_attributes(self, device):
"""Get extra attributes of a device."""
device = next(
(result for result in self.last_results if result.mac == device), None
)
if device is None:
return {}
return device.asdict()
async def _async_update_info(self):
"""Ensure the information from the Sky Hub is up to date."""
_LOGGER.debug("Scanning")
data = await self._hub.async_get_skyhub_data()
if not data:
return
self.last_results = data
|
import os.path as op
import re
import shutil
import zipfile
import numpy as np
import pytest
from mne.io.constants import (FIFF, FWD, _coord_frame_named, _ch_kind_named,
_ch_unit_named, _ch_unit_mul_named,
_ch_coil_type_named, _dig_kind_named,
_dig_cardinal_named)
from mne.forward._make_forward import _read_coil_defs
from mne.utils import _fetch_file, requires_good_network
# https://github.com/mne-tools/fiff-constants/commits/master
commit = '198d943d0ff92ecdfb947b84af6289a0e79ad060'
# These are oddities that we won't address:
iod_dups = (355, 359) # these are in both MEGIN and MNE files
tag_dups = (3501, 3507) # in both MEGIN and MNE files
_dir_ignore_names = ('clear', 'copy', 'fromkeys', 'get', 'items', 'keys',
'pop', 'popitem', 'setdefault', 'update', 'values',
'has_key', 'iteritems', 'iterkeys', 'itervalues', # Py2
'viewitems', 'viewkeys', 'viewvalues', # Py2
)
_tag_ignore_names = (
) # for fiff-constants pending updates
_ignore_incomplete_enums = ( # XXX eventually we could complete these
'bem_surf_id', 'cardinal_point_cardiac', 'cond_model', 'coord',
'dacq_system', 'diffusion_param', 'gantry_type', 'map_surf',
'mne_lin_proj', 'mne_ori', 'mri_format', 'mri_pixel', 'proj_by',
'tags', 'type', 'iod', 'volume_type', 'vol_type',
)
# not in coil_def.dat but in DictionaryTypes:enum(coil)
_missing_coil_def = (
0, # The location info contains no data
1, # EEG electrode position in r0
3, # Old 24 channel system in HUT
4, # The axial devices in the HUCS MCG system
5, # Bipolar EEG electrode position
6, # CSD-transformed EEG electrodes
200, # Time-varying dipole definition
300, # fNIRS oxyhemoglobin
301, # fNIRS deoxyhemoglobin
302, # fNIRS continuous wave
303, # fNIRS optical density
304, # fNIRS frequency domain AC amplitude
305, # fNIRS frequency domain phase
1000, # For testing the MCG software
2001, # Generic axial gradiometer
3011, # VV prototype wirewound planar sensor
3014, # Vectorview SQ20950N planar gradiometer
3021, # VV prototype wirewound magnetometer
)
# explicit aliases in constants.py
_aliases = dict(
FIFFV_COIL_MAGNES_R_MAG='FIFFV_COIL_MAGNES_REF_MAG',
FIFFV_COIL_MAGNES_R_GRAD='FIFFV_COIL_MAGNES_REF_GRAD',
FIFFV_COIL_MAGNES_R_GRAD_OFF='FIFFV_COIL_MAGNES_OFFDIAG_REF_GRAD',
FIFFV_COIL_FNIRS_RAW='FIFFV_COIL_FNIRS_CW_AMPLITUDE',
FIFFV_MNE_COORD_CTF_HEAD='FIFFV_MNE_COORD_4D_HEAD',
FIFFV_MNE_COORD_KIT_HEAD='FIFFV_MNE_COORD_4D_HEAD',
FIFFV_MNE_COORD_DIGITIZER='FIFFV_COORD_ISOTRAK',
FIFFV_MNE_COORD_SURFACE_RAS='FIFFV_COORD_MRI',
FIFFV_MNE_SENSOR_COV='FIFFV_MNE_NOISE_COV',
FIFFV_POINT_EEG='FIFFV_POINT_ECG',
FIFF_DESCRIPTION='FIFF_COMMENT',
FIFF_REF_PATH='FIFF_MRI_SOURCE_PATH',
)
@requires_good_network
def test_constants(tmpdir):
"""Test compensation."""
tmpdir = str(tmpdir) # old pytest...
dest = op.join(tmpdir, 'fiff.zip')
_fetch_file('https://codeload.github.com/mne-tools/fiff-constants/zip/' +
commit, dest)
names = list()
with zipfile.ZipFile(dest, 'r') as ff:
for name in ff.namelist():
if 'Dictionary' in name:
ff.extract(name, tmpdir)
names.append(op.basename(name))
shutil.move(op.join(tmpdir, name), op.join(tmpdir, names[-1]))
names = sorted(names)
assert names == ['DictionaryIOD.txt', 'DictionaryIOD_MNE.txt',
'DictionaryStructures.txt',
'DictionaryTags.txt', 'DictionaryTags_MNE.txt',
'DictionaryTypes.txt', 'DictionaryTypes_MNE.txt']
# IOD (MEGIN and MNE)
fif = dict(iod=dict(), tags=dict(), types=dict(), defines=dict())
con = dict(iod=dict(), tags=dict(), types=dict(), defines=dict())
fiff_version = None
for name in ['DictionaryIOD.txt', 'DictionaryIOD_MNE.txt']:
with open(op.join(tmpdir, name), 'rb') as fid:
for line in fid:
line = line.decode('latin1').strip()
if line.startswith('# Packing revision'):
assert fiff_version is None
fiff_version = line.split()[-1]
if (line.startswith('#') or line.startswith('alias') or
len(line) == 0):
continue
line = line.split('"')
assert len(line) in (1, 2, 3)
desc = '' if len(line) == 1 else line[1]
line = line[0].split()
assert len(line) in (2, 3)
if len(line) == 2:
kind, id_ = line
else:
kind, id_, tagged = line
assert tagged in ('tagged',)
id_ = int(id_)
if id_ not in iod_dups:
assert id_ not in fif['iod']
fif['iod'][id_] = [kind, desc]
# Tags (MEGIN)
with open(op.join(tmpdir, 'DictionaryTags.txt'), 'rb') as fid:
for line in fid:
line = line.decode('ISO-8859-1').strip()
if (line.startswith('#') or line.startswith('alias') or
line.startswith(':') or len(line) == 0):
continue
line = line.split('"')
assert len(line) in (1, 2, 3), line
desc = '' if len(line) == 1 else line[1]
line = line[0].split()
assert len(line) == 4, line
kind, id_, dtype, unit = line
id_ = int(id_)
val = [kind, dtype, unit]
assert id_ not in fif['tags'], (fif['tags'].get(id_), val)
fif['tags'][id_] = val
# Tags (MNE)
with open(op.join(tmpdir, 'DictionaryTags_MNE.txt'), 'rb') as fid:
for li, line in enumerate(fid):
line = line.decode('ISO-8859-1').strip()
# ignore continuation lines (*)
if (line.startswith('#') or line.startswith('alias') or
line.startswith(':') or line.startswith('*') or
len(line) == 0):
continue
# weird syntax around line 80:
if line in ('/*', '"'):
continue
line = line.split('"')
assert len(line) in (1, 2, 3), line
if len(line) == 3 and len(line[2]) > 0:
l2 = line[2].strip()
assert l2.startswith('/*') and l2.endswith('*/'), l2
desc = '' if len(line) == 1 else line[1]
line = line[0].split()
assert len(line) == 3, (li + 1, line)
kind, id_, dtype = line
unit = '-'
id_ = int(id_)
val = [kind, dtype, unit]
if id_ not in tag_dups:
assert id_ not in fif['tags'], (fif['tags'].get(id_), val)
fif['tags'][id_] = val
# Types and enums
in_ = None
re_prim = re.compile(r'^primitive\((.*)\)\s*(\S*)\s*"(.*)"$')
re_enum = re.compile(r'^enum\((\S*)\)\s*".*"$')
re_enum_entry = re.compile(r'\s*(\S*)\s*(\S*)\s*"(.*)"$')
re_defi = re.compile(r'#define\s*(\S*)\s*(\S*)\s*"(.*)"$')
used_enums = list()
for extra in ('', '_MNE'):
with open(op.join(tmpdir, 'DictionaryTypes%s.txt'
% (extra,)), 'rb') as fid:
for li, line in enumerate(fid):
line = line.decode('ISO-8859-1').strip()
if in_ is None:
p = re_prim.match(line)
e = re_enum.match(line)
d = re_defi.match(line)
if p is not None:
t, s, d = p.groups()
s = int(s)
assert s not in fif['types']
fif['types'][s] = [t, d]
elif e is not None:
# entering an enum
this_enum = e.group(1)
if this_enum not in fif:
used_enums.append(this_enum)
fif[this_enum] = dict()
con[this_enum] = dict()
in_ = fif[this_enum]
elif d is not None:
t, s, d = d.groups()
s = int(s)
fif['defines'][t] = [s, d]
else:
assert not line.startswith('enum(')
else: # in an enum
if line == '{':
continue
elif line == '}':
in_ = None
continue
t, s, d = re_enum_entry.match(line).groups()
s = int(s)
if t != 'ecg' and s != 3: # ecg defined the same way
assert s not in in_
in_[s] = [t, d]
#
# Assertions
#
# Version
mne_version = '%d.%d' % (FIFF.FIFFC_MAJOR_VERSION,
FIFF.FIFFC_MINOR_VERSION)
assert fiff_version == mne_version
unknowns = list()
# Assert that all our constants are in the FIF def
assert 'FIFFV_SSS_JOB_NOTHING' in dir(FIFF)
for name in sorted(dir(FIFF)):
if name.startswith('_') or name in _dir_ignore_names:
continue
check = None
val = getattr(FIFF, name)
if name in fif['defines']:
assert fif['defines'][name][0] == val
elif name.startswith('FIFFC_'):
# Checked above
assert name in ('FIFFC_MAJOR_VERSION', 'FIFFC_MINOR_VERSION',
'FIFFC_VERSION')
elif name.startswith('FIFFB_'):
check = 'iod'
elif name.startswith('FIFFT_'):
check = 'types'
elif name.startswith('FIFFV_'):
if name.startswith('FIFFV_MNE_') and name.endswith('_ORI'):
check = 'mne_ori'
elif name.startswith('FIFFV_MNE_') and name.endswith('_COV'):
check = 'covariance_type'
elif name.startswith('FIFFV_MNE_COORD'):
check = 'coord' # weird wrapper
elif name.endswith('_CH') or '_QUAT_' in name or name in \
('FIFFV_DIPOLE_WAVE', 'FIFFV_GOODNESS_FIT',
'FIFFV_HPI_ERR', 'FIFFV_HPI_G', 'FIFFV_HPI_MOV'):
check = 'ch_type'
elif name.startswith('FIFFV_SUBJ_'):
check = name.split('_')[2].lower()
elif name in ('FIFFV_POINT_LPA', 'FIFFV_POINT_NASION',
'FIFFV_POINT_RPA', 'FIFFV_POINT_INION'):
check = 'cardinal_point'
else:
for check in used_enums:
if name.startswith('FIFFV_' + check.upper()):
break
else:
if name not in _tag_ignore_names:
raise RuntimeError('Could not find %s' % (name,))
assert check in used_enums, name
if 'SSS' in check:
raise RuntimeError
elif name.startswith('FIFF_UNIT'): # units and multipliers
check = name.split('_')[1].lower()
elif name.startswith('FIFF_'):
check = 'tags'
else:
unknowns.append((name, val))
if check is not None and name not in _tag_ignore_names:
assert val in fif[check], '%s: %s, %s' % (check, val, name)
if val in con[check]:
msg = "%s='%s' ?" % (name, con[check][val])
assert _aliases.get(name) == con[check][val], msg
else:
con[check][val] = name
unknowns = '\n\t'.join('%s (%s)' % u for u in unknowns)
assert len(unknowns) == 0, 'Unknown types\n\t%s' % unknowns
# Assert that all the FIF defs are in our constants
assert set(fif.keys()) == set(con.keys())
for key in sorted(set(fif.keys()) - {'defines'}):
this_fif, this_con = fif[key], con[key]
assert len(set(this_fif.keys())) == len(this_fif)
assert len(set(this_con.keys())) == len(this_con)
missing_from_con = sorted(set(this_con.keys()) - set(this_fif.keys()))
assert missing_from_con == [], key
if key not in _ignore_incomplete_enums:
missing_from_fif = sorted(set(this_fif.keys()) -
set(this_con.keys()))
assert missing_from_fif == [], key
# Assert that `coil_def.dat` has accurate descriptions of all enum(coil)
coil_def = _read_coil_defs()
coil_desc = np.array([c['desc'] for c in coil_def])
coil_def = np.array([(c['coil_type'], c['accuracy'])
for c in coil_def], int)
mask = (coil_def[:, 1] == FWD.COIL_ACCURACY_ACCURATE)
coil_def = coil_def[mask, 0]
coil_desc = coil_desc[mask]
bad_list = []
for key in fif['coil']:
if key not in _missing_coil_def and key not in coil_def:
bad_list.append((' %s,' % key).ljust(10) +
' # ' + fif['coil'][key][1])
assert len(bad_list) == 0, \
'\nIn fiff-constants, missing from coil_def:\n' + '\n'.join(bad_list)
# Assert that enum(coil) has all `coil_def.dat` entries
for key, desc in zip(coil_def, coil_desc):
if key not in fif['coil']:
bad_list.append((' %s,' % key).ljust(10) + ' # ' + desc)
assert len(bad_list) == 0, \
'In coil_def, missing from fiff-constants:\n' + '\n'.join(bad_list)
@pytest.mark.parametrize('dict_, match, extras', [
({**_dig_kind_named, **_dig_cardinal_named}, 'FIFFV_POINT_', ()),
(_ch_kind_named, '^FIFFV_.*_CH$',
(FIFF.FIFFV_DIPOLE_WAVE, FIFF.FIFFV_GOODNESS_FIT)),
(_coord_frame_named, 'FIFFV_COORD_', ()),
(_ch_unit_named, 'FIFF_UNIT_', ()),
(_ch_unit_mul_named, 'FIFF_UNITM_', ()),
(_ch_coil_type_named, 'FIFFV_COIL_', ()),
])
def test_dict_completion(dict_, match, extras):
"""Test readable dict completions."""
regex = re.compile(match)
got = set(FIFF[key] for key in FIFF if regex.search(key) is not None)
for e in extras:
got.add(e)
want = set(dict_)
assert got == want
|
import inspect
import os
import sys
from os.path import dirname, relpath
import alagitpull
import tmuxp
# Get the project root dir, which is the parent dir of this
cwd = os.getcwd()
project_root = os.path.dirname(cwd)
sys.path.insert(0, project_root)
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "_ext")))
# package data
about = {}
with open("../tmuxp/__about__.py") as fp:
exec(fp.read(), about)
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.napoleon',
'sphinx.ext.linkcode',
'aafig',
'alagitpull',
'sphinx_issues',
]
releases_unstable_prehistory = True
releases_document_name = ["history"]
releases_issue_uri = "https://github.com/tmux-python/tmuxp/issues/%s"
releases_release_uri = "https://github.com/tmux-python/tmuxp/tree/v%s"
issues_github_path = about['__github__'].replace('https://github.com/', '')
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = about['__title__']
copyright = about['__copyright__']
version = '%s' % ('.'.join(about['__version__'].split('.'))[:2])
release = '%s' % (about['__version__'])
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme_path = [alagitpull.get_path()]
html_favicon = '_static/favicon.ico'
html_theme = 'alagitpull'
html_static_path = ['_static']
html_sidebars = {
'**': [
'about.html',
'navigation.html',
'relations.html',
'more.html',
'book.html',
'searchbox.html',
]
}
html_theme_options = {
'logo': 'img/tmuxp.svg',
'github_user': 'tmux-python',
'github_repo': 'tmuxp',
'github_type': 'star',
'github_banner': True,
'projects': alagitpull.projects,
'project_name': about['__title__'],
}
alagitpull_internal_hosts = ['tmuxp.git-pull.com', '0.0.0.0']
alagitpull_external_hosts_new_window = True
htmlhelp_basename = '%sdoc' % about['__title__']
latex_documents = [
(
'index',
'{0}.tex'.format(about['__package_name__']),
'{0} Documentation'.format(about['__title__']),
about['__author__'],
'manual',
)
]
man_pages = [
(
'index',
about['__package_name__'],
'{0} Documentation'.format(about['__title__']),
about['__author__'],
1,
)
]
texinfo_documents = [
(
'index',
'{0}'.format(about['__package_name__']),
'{0} Documentation'.format(about['__title__']),
about['__author__'],
about['__package_name__'],
about['__description__'],
'Miscellaneous',
)
]
intersphinx_mapping = {
'python': ('https://docs.python.org/', None),
'libtmux': ('https://libtmux.readthedocs.io/en/latest', None),
'click': ('http://click.pocoo.org/5', None),
}
# aafig format, try to get working with pdf
aafig_format = dict(latex='pdf', html='gif')
aafig_default_options = dict(scale=0.75, aspect=0.5, proportional=True)
def linkcode_resolve(domain, info): # NOQA: C901
"""
Determine the URL corresponding to Python object
Notes
-----
From https://github.com/numpy/numpy/blob/v1.15.1/doc/source/conf.py, 7c49cfa
on Jul 31. License BSD-3. https://github.com/numpy/numpy/blob/v1.15.1/LICENSE.txt
"""
if domain != 'py':
return None
modname = info['module']
fullname = info['fullname']
submod = sys.modules.get(modname)
if submod is None:
return None
obj = submod
for part in fullname.split('.'):
try:
obj = getattr(obj, part)
except Exception:
return None
# strip decorators, which would resolve to the source of the decorator
# possibly an upstream bug in getsourcefile, bpo-1764286
try:
unwrap = inspect.unwrap
except AttributeError:
pass
else:
obj = unwrap(obj)
try:
fn = inspect.getsourcefile(obj)
except Exception:
fn = None
if not fn:
return None
try:
source, lineno = inspect.getsourcelines(obj)
except Exception:
lineno = None
if lineno:
linespec = "#L%d-L%d" % (lineno, lineno + len(source) - 1)
else:
linespec = ""
fn = relpath(fn, start=dirname(tmuxp.__file__))
if 'dev' in about['__version__']:
return "%s/blob/master/%s/%s%s" % (
about['__github__'],
about['__package_name__'],
fn,
linespec,
)
else:
return "%s/blob/v%s/%s/%s%s" % (
about['__github__'],
about['__version__'],
about['__package_name__'],
fn,
linespec,
)
|
import os
import unittest
from perfkitbenchmarker import sample
from perfkitbenchmarker import test_util
from perfkitbenchmarker.linux_benchmarks import unixbench_benchmark
class UnixBenchBenchmarkTestCase(unittest.TestCase, test_util.SamplesTestMixin):
maxDiff = None
def setUp(self):
path = os.path.join(os.path.dirname(__file__), '..', 'data',
'unix-bench-sample-result.txt')
with open(path) as fp:
self.contents = fp.read()
def tearDown(self):
pass
def testParseUnixBench(self):
result = unixbench_benchmark.ParseResults(self.contents)
expected_result = [
['Dhrystone 2 using register variables', 34872897.7, 'lps',
{'num_parallel_copies': 1, 'samples': 7, 'time': '10.0s'}],
['Double-Precision Whetstone', 4022.0, 'MWIPS',
{'num_parallel_copies': 1, 'samples': 7, 'time': '9.9s'}],
['Execl Throughput', 4735.8, 'lps',
{'num_parallel_copies': 1, 'samples': 2, 'time': '29.8s'}],
['File Copy 1024 bufsize 2000 maxblocks', 1294367.0, 'KBps',
{'num_parallel_copies': 1, 'samples': 2, 'time': '30.0s'}],
['File Copy 256 bufsize 500 maxblocks', 396912.9, 'KBps',
{'num_parallel_copies': 1, 'samples': 2, 'time': '30.0s'}],
['File Copy 4096 bufsize 8000 maxblocks', 2513158.7, 'KBps',
{'num_parallel_copies': 1, 'samples': 2, 'time': '30.0s'}],
['Pipe Throughput', 2221775.6, 'lps',
{'num_parallel_copies': 1, 'samples': 7, 'time': '10.0s'}],
['Pipe-based Context Switching', 369000.7, 'lps',
{'num_parallel_copies': 1, 'samples': 7, 'time': '10.0s'}],
['Process Creation', 12587.7, 'lps',
{'num_parallel_copies': 1, 'samples': 2, 'time': '30.0s'}],
['Shell Scripts (1 concurrent)', 8234.3, 'lpm',
{'num_parallel_copies': 1, 'samples': 2, 'time': '60.0s'}],
['Shell Scripts (8 concurrent)', 1064.5, 'lpm',
{'num_parallel_copies': 1, 'samples': 2, 'time': '60.0s'}],
['System Call Overhead', 4439274.5, 'lps',
{'num_parallel_copies': 1, 'samples': 7, 'time': '10.0s'}],
['Dhrystone 2 using register variables:score', 34872897.7, '',
{'index': 2988.3, 'baseline': 116700.0, 'num_parallel_copies': 1}],
['Double-Precision Whetstone:score', 4022.0, '',
{'index': 731.3, 'baseline': 55.0, 'num_parallel_copies': 1}],
['Execl Throughput:score', 4735.8, '',
{'index': 1101.4, 'baseline': 43.0, 'num_parallel_copies': 1}],
['File Copy 1024 bufsize 2000 maxblocks:score', 1294367.0, '',
{'index': 3268.6, 'baseline': 3960.0, 'num_parallel_copies': 1}],
['File Copy 256 bufsize 500 maxblocks:score', 396912.9, '',
{'index': 2398.3, 'baseline': 1655.0, 'num_parallel_copies': 1}],
['File Copy 4096 bufsize 8000 maxblocks:score', 2513158.7, '',
{'index': 4333.0, 'baseline': 5800.0, 'num_parallel_copies': 1}],
['Pipe Throughput:score', 2221775.6, '',
{'index': 1786.0, 'baseline': 12440.0, 'num_parallel_copies': 1}],
['Pipe-based Context Switching:score', 369000.7, '',
{'index': 922.5, 'baseline': 4000.0, 'num_parallel_copies': 1}],
['Process Creation:score', 12587.7, '',
{'index': 999.0, 'baseline': 126.0, 'num_parallel_copies': 1}],
['Shell Scripts (1 concurrent):score', 8234.3, '',
{'index': 1942.1, 'baseline': 42.4, 'num_parallel_copies': 1}],
['Shell Scripts (8 concurrent):score', 1064.5, '',
{'index': 1774.2, 'baseline': 6.0, 'num_parallel_copies': 1}],
['System Call Overhead:score', 4439274.5, '',
{'index': 2959.5, 'baseline': 15000.0, 'num_parallel_copies': 1}],
['System Benchmarks Index Score', 1825.8, '',
{'num_parallel_copies': 1}],
['Dhrystone 2 using register variables', 155391896.7, 'lps',
{'num_parallel_copies': 8, 'samples': 7, 'time': '10.0s'}],
['Double-Precision Whetstone', 28632.5, 'MWIPS',
{'num_parallel_copies': 8, 'samples': 7, 'time': '9.8s'}],
['Execl Throughput', 15184.0, 'lps',
{'num_parallel_copies': 8, 'samples': 2, 'time': '30.0s'}],
['File Copy 1024 bufsize 2000 maxblocks', 985484.8, 'KBps',
{'num_parallel_copies': 8, 'samples': 2, 'time': '30.0s'}],
['File Copy 256 bufsize 500 maxblocks', 269732.2, 'KBps',
{'num_parallel_copies': 8, 'samples': 2, 'time': '30.0s'}],
['File Copy 4096 bufsize 8000 maxblocks', 2706156.4, 'KBps',
{'num_parallel_copies': 8, 'samples': 2, 'time': '30.0s'}],
['Pipe Throughput', 8525928.8, 'lps',
{'num_parallel_copies': 8, 'samples': 7, 'time': '10.0s'}],
['Pipe-based Context Switching', 1017270.4, 'lps',
{'num_parallel_copies': 8, 'samples': 7, 'time': '10.0s'}],
['Process Creation', 31563.7, 'lps',
{'num_parallel_copies': 8, 'samples': 2, 'time': '30.0s'}],
['Shell Scripts (1 concurrent)', 32516.3, 'lpm',
{'num_parallel_copies': 8, 'samples': 2, 'time': '60.0s'}],
['Shell Scripts (8 concurrent)', 5012.2, 'lpm',
{'num_parallel_copies': 8, 'samples': 2, 'time': '60.0s'}],
['System Call Overhead', 10288762.3, 'lps',
{'num_parallel_copies': 8, 'samples': 7, 'time': '10.0s'}],
['Dhrystone 2 using register variables:score', 155391896.7, '',
{'index': 13315.5, 'baseline': 116700.0, 'num_parallel_copies': 8}],
['Double-Precision Whetstone:score', 28632.5, '',
{'index': 5205.9, 'baseline': 55.0, 'num_parallel_copies': 8}],
['Execl Throughput:score', 15184.0, '',
{'index': 3531.2, 'baseline': 43.0, 'num_parallel_copies': 8}],
['File Copy 1024 bufsize 2000 maxblocks:score', 985484.8, '',
{'index': 2488.6, 'baseline': 3960.0, 'num_parallel_copies': 8}],
['File Copy 256 bufsize 500 maxblocks:score', 269732.2, '',
{'index': 1629.8, 'baseline': 1655.0, 'num_parallel_copies': 8}],
['File Copy 4096 bufsize 8000 maxblocks:score', 2706156.4, '',
{'index': 4665.8, 'baseline': 5800.0, 'num_parallel_copies': 8}],
['Pipe Throughput:score', 8525928.8, '',
{'index': 6853.6, 'baseline': 12440.0, 'num_parallel_copies': 8}],
['Pipe-based Context Switching:score', 1017270.4, '',
{'index': 2543.2, 'baseline': 4000.0, 'num_parallel_copies': 8}],
['Process Creation:score', 31563.7, '',
{'index': 2505.1, 'baseline': 126.0, 'num_parallel_copies': 8}],
['Shell Scripts (1 concurrent):score', 32516.3, '',
{'index': 7668.9, 'baseline': 42.4, 'num_parallel_copies': 8}],
['Shell Scripts (8 concurrent):score', 5012.2, '',
{'index': 8353.6, 'baseline': 6.0, 'num_parallel_copies': 8}],
['System Call Overhead:score', 10288762.3, '',
{'index': 6859.2, 'baseline': 15000.0, 'num_parallel_copies': 8}],
['System Benchmarks Index Score', 4596.2, '',
{'num_parallel_copies': 8}]]
expected_result = [sample.Sample(*exp) for exp in expected_result]
self.assertSampleListsEqualUpToTimestamp(result, expected_result)
if __name__ == '__main__':
unittest.main()
|
import logging
import threading
from time import monotonic, sleep
import bme680 # pylint: disable=import-error
from smbus import SMBus # pylint: disable=import-error
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_MONITORED_CONDITIONS,
CONF_NAME,
PERCENTAGE,
TEMP_FAHRENHEIT,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util.temperature import celsius_to_fahrenheit
_LOGGER = logging.getLogger(__name__)
CONF_I2C_ADDRESS = "i2c_address"
CONF_I2C_BUS = "i2c_bus"
CONF_OVERSAMPLING_TEMP = "oversampling_temperature"
CONF_OVERSAMPLING_PRES = "oversampling_pressure"
CONF_OVERSAMPLING_HUM = "oversampling_humidity"
CONF_FILTER_SIZE = "filter_size"
CONF_GAS_HEATER_TEMP = "gas_heater_temperature"
CONF_GAS_HEATER_DURATION = "gas_heater_duration"
CONF_AQ_BURN_IN_TIME = "aq_burn_in_time"
CONF_AQ_HUM_BASELINE = "aq_humidity_baseline"
CONF_AQ_HUM_WEIGHTING = "aq_humidity_bias"
CONF_TEMP_OFFSET = "temp_offset"
DEFAULT_NAME = "BME680 Sensor"
DEFAULT_I2C_ADDRESS = 0x77
DEFAULT_I2C_BUS = 1
DEFAULT_OVERSAMPLING_TEMP = 8 # Temperature oversampling x 8
DEFAULT_OVERSAMPLING_PRES = 4 # Pressure oversampling x 4
DEFAULT_OVERSAMPLING_HUM = 2 # Humidity oversampling x 2
DEFAULT_FILTER_SIZE = 3 # IIR Filter Size
DEFAULT_GAS_HEATER_TEMP = 320 # Temperature in celsius 200 - 400
DEFAULT_GAS_HEATER_DURATION = 150 # Heater duration in ms 1 - 4032
DEFAULT_AQ_BURN_IN_TIME = 300 # 300 second burn in time for AQ gas measurement
DEFAULT_AQ_HUM_BASELINE = 40 # 40%, an optimal indoor humidity.
DEFAULT_AQ_HUM_WEIGHTING = 25 # 25% Weighting of humidity to gas in AQ score
DEFAULT_TEMP_OFFSET = 0 # No calibration out of the box.
SENSOR_TEMP = "temperature"
SENSOR_HUMID = "humidity"
SENSOR_PRESS = "pressure"
SENSOR_GAS = "gas"
SENSOR_AQ = "airquality"
SENSOR_TYPES = {
SENSOR_TEMP: ["Temperature", None],
SENSOR_HUMID: ["Humidity", PERCENTAGE],
SENSOR_PRESS: ["Pressure", "mb"],
SENSOR_GAS: ["Gas Resistance", "Ohms"],
SENSOR_AQ: ["Air Quality", PERCENTAGE],
}
DEFAULT_MONITORED = [SENSOR_TEMP, SENSOR_HUMID, SENSOR_PRESS, SENSOR_AQ]
OVERSAMPLING_VALUES = {0, 1, 2, 4, 8, 16}
FILTER_VALUES = {0, 1, 3, 7, 15, 31, 63, 127}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_I2C_ADDRESS, default=DEFAULT_I2C_ADDRESS): cv.positive_int,
vol.Optional(CONF_MONITORED_CONDITIONS, default=DEFAULT_MONITORED): vol.All(
cv.ensure_list, [vol.In(SENSOR_TYPES)]
),
vol.Optional(CONF_I2C_BUS, default=DEFAULT_I2C_BUS): cv.positive_int,
vol.Optional(
CONF_OVERSAMPLING_TEMP, default=DEFAULT_OVERSAMPLING_TEMP
): vol.All(vol.Coerce(int), vol.In(OVERSAMPLING_VALUES)),
vol.Optional(
CONF_OVERSAMPLING_PRES, default=DEFAULT_OVERSAMPLING_PRES
): vol.All(vol.Coerce(int), vol.In(OVERSAMPLING_VALUES)),
vol.Optional(CONF_OVERSAMPLING_HUM, default=DEFAULT_OVERSAMPLING_HUM): vol.All(
vol.Coerce(int), vol.In(OVERSAMPLING_VALUES)
),
vol.Optional(CONF_FILTER_SIZE, default=DEFAULT_FILTER_SIZE): vol.All(
vol.Coerce(int), vol.In(FILTER_VALUES)
),
vol.Optional(CONF_GAS_HEATER_TEMP, default=DEFAULT_GAS_HEATER_TEMP): vol.All(
vol.Coerce(int), vol.Range(200, 400)
),
vol.Optional(
CONF_GAS_HEATER_DURATION, default=DEFAULT_GAS_HEATER_DURATION
): vol.All(vol.Coerce(int), vol.Range(1, 4032)),
vol.Optional(
CONF_AQ_BURN_IN_TIME, default=DEFAULT_AQ_BURN_IN_TIME
): cv.positive_int,
vol.Optional(CONF_AQ_HUM_BASELINE, default=DEFAULT_AQ_HUM_BASELINE): vol.All(
vol.Coerce(int), vol.Range(1, 100)
),
vol.Optional(CONF_AQ_HUM_WEIGHTING, default=DEFAULT_AQ_HUM_WEIGHTING): vol.All(
vol.Coerce(int), vol.Range(1, 100)
),
vol.Optional(CONF_TEMP_OFFSET, default=DEFAULT_TEMP_OFFSET): vol.All(
vol.Coerce(float), vol.Range(-100.0, 100.0)
),
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the BME680 sensor."""
SENSOR_TYPES[SENSOR_TEMP][1] = hass.config.units.temperature_unit
name = config[CONF_NAME]
sensor_handler = await hass.async_add_executor_job(_setup_bme680, config)
if sensor_handler is None:
return
dev = []
for variable in config[CONF_MONITORED_CONDITIONS]:
dev.append(
BME680Sensor(sensor_handler, variable, SENSOR_TYPES[variable][1], name)
)
async_add_entities(dev)
return
def _setup_bme680(config):
"""Set up and configure the BME680 sensor."""
sensor_handler = None
sensor = None
try:
# pylint: disable=no-member
i2c_address = config[CONF_I2C_ADDRESS]
bus = SMBus(config[CONF_I2C_BUS])
sensor = bme680.BME680(i2c_address, bus)
# Configure Oversampling
os_lookup = {
0: bme680.OS_NONE,
1: bme680.OS_1X,
2: bme680.OS_2X,
4: bme680.OS_4X,
8: bme680.OS_8X,
16: bme680.OS_16X,
}
sensor.set_temperature_oversample(os_lookup[config[CONF_OVERSAMPLING_TEMP]])
sensor.set_temp_offset(config[CONF_TEMP_OFFSET])
sensor.set_humidity_oversample(os_lookup[config[CONF_OVERSAMPLING_HUM]])
sensor.set_pressure_oversample(os_lookup[config[CONF_OVERSAMPLING_PRES]])
# Configure IIR Filter
filter_lookup = {
0: bme680.FILTER_SIZE_0,
1: bme680.FILTER_SIZE_1,
3: bme680.FILTER_SIZE_3,
7: bme680.FILTER_SIZE_7,
15: bme680.FILTER_SIZE_15,
31: bme680.FILTER_SIZE_31,
63: bme680.FILTER_SIZE_63,
127: bme680.FILTER_SIZE_127,
}
sensor.set_filter(filter_lookup[config[CONF_FILTER_SIZE]])
# Configure the Gas Heater
if (
SENSOR_GAS in config[CONF_MONITORED_CONDITIONS]
or SENSOR_AQ in config[CONF_MONITORED_CONDITIONS]
):
sensor.set_gas_status(bme680.ENABLE_GAS_MEAS)
sensor.set_gas_heater_duration(config[CONF_GAS_HEATER_DURATION])
sensor.set_gas_heater_temperature(config[CONF_GAS_HEATER_TEMP])
sensor.select_gas_heater_profile(0)
else:
sensor.set_gas_status(bme680.DISABLE_GAS_MEAS)
except (RuntimeError, OSError):
_LOGGER.error("BME680 sensor not detected at 0x%02x", i2c_address)
return None
sensor_handler = BME680Handler(
sensor,
(
SENSOR_GAS in config[CONF_MONITORED_CONDITIONS]
or SENSOR_AQ in config[CONF_MONITORED_CONDITIONS]
),
config[CONF_AQ_BURN_IN_TIME],
config[CONF_AQ_HUM_BASELINE],
config[CONF_AQ_HUM_WEIGHTING],
)
sleep(0.5) # Wait for device to stabilize
if not sensor_handler.sensor_data.temperature:
_LOGGER.error("BME680 sensor failed to Initialize")
return None
return sensor_handler
class BME680Handler:
"""BME680 sensor working in i2C bus."""
class SensorData:
"""Sensor data representation."""
def __init__(self):
"""Initialize the sensor data object."""
self.temperature = None
self.humidity = None
self.pressure = None
self.gas_resistance = None
self.air_quality = None
def __init__(
self,
sensor,
gas_measurement=False,
burn_in_time=300,
hum_baseline=40,
hum_weighting=25,
):
"""Initialize the sensor handler."""
self.sensor_data = BME680Handler.SensorData()
self._sensor = sensor
self._gas_sensor_running = False
self._hum_baseline = hum_baseline
self._hum_weighting = hum_weighting
self._gas_baseline = None
if gas_measurement:
threading.Thread(
target=self._run_gas_sensor,
kwargs={"burn_in_time": burn_in_time},
name="BME680Handler_run_gas_sensor",
).start()
self.update(first_read=True)
def _run_gas_sensor(self, burn_in_time):
"""Calibrate the Air Quality Gas Baseline."""
if self._gas_sensor_running:
return
self._gas_sensor_running = True
# Pause to allow initial data read for device validation.
sleep(1)
start_time = monotonic()
curr_time = monotonic()
burn_in_data = []
_LOGGER.info(
"Beginning %d second gas sensor burn in for Air Quality", burn_in_time
)
while curr_time - start_time < burn_in_time:
curr_time = monotonic()
if self._sensor.get_sensor_data() and self._sensor.data.heat_stable:
gas_resistance = self._sensor.data.gas_resistance
burn_in_data.append(gas_resistance)
self.sensor_data.gas_resistance = gas_resistance
_LOGGER.debug(
"AQ Gas Resistance Baseline reading %2f Ohms", gas_resistance
)
sleep(1)
_LOGGER.debug(
"AQ Gas Resistance Burn In Data (Size: %d): \n\t%s",
len(burn_in_data),
burn_in_data,
)
self._gas_baseline = sum(burn_in_data[-50:]) / 50.0
_LOGGER.info("Completed gas sensor burn in for Air Quality")
_LOGGER.info("AQ Gas Resistance Baseline: %f", self._gas_baseline)
while True:
if self._sensor.get_sensor_data() and self._sensor.data.heat_stable:
self.sensor_data.gas_resistance = self._sensor.data.gas_resistance
self.sensor_data.air_quality = self._calculate_aq_score()
sleep(1)
def update(self, first_read=False):
"""Read sensor data."""
if first_read:
# Attempt first read, it almost always fails first attempt
self._sensor.get_sensor_data()
if self._sensor.get_sensor_data():
self.sensor_data.temperature = self._sensor.data.temperature
self.sensor_data.humidity = self._sensor.data.humidity
self.sensor_data.pressure = self._sensor.data.pressure
def _calculate_aq_score(self):
"""Calculate the Air Quality Score."""
hum_baseline = self._hum_baseline
hum_weighting = self._hum_weighting
gas_baseline = self._gas_baseline
gas_resistance = self.sensor_data.gas_resistance
gas_offset = gas_baseline - gas_resistance
hum = self.sensor_data.humidity
hum_offset = hum - hum_baseline
# Calculate hum_score as the distance from the hum_baseline.
if hum_offset > 0:
hum_score = (
(100 - hum_baseline - hum_offset) / (100 - hum_baseline) * hum_weighting
)
else:
hum_score = (hum_baseline + hum_offset) / hum_baseline * hum_weighting
# Calculate gas_score as the distance from the gas_baseline.
if gas_offset > 0:
gas_score = (gas_resistance / gas_baseline) * (100 - hum_weighting)
else:
gas_score = 100 - hum_weighting
# Calculate air quality score.
return hum_score + gas_score
class BME680Sensor(Entity):
"""Implementation of the BME680 sensor."""
def __init__(self, bme680_client, sensor_type, temp_unit, name):
"""Initialize the sensor."""
self.client_name = name
self._name = SENSOR_TYPES[sensor_type][0]
self.bme680_client = bme680_client
self.temp_unit = temp_unit
self.type = sensor_type
self._state = None
self._unit_of_measurement = SENSOR_TYPES[sensor_type][1]
@property
def name(self):
"""Return the name of the sensor."""
return f"{self.client_name} {self._name}"
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of the sensor."""
return self._unit_of_measurement
async def async_update(self):
"""Get the latest data from the BME680 and update the states."""
await self.hass.async_add_executor_job(self.bme680_client.update)
if self.type == SENSOR_TEMP:
temperature = round(self.bme680_client.sensor_data.temperature, 1)
if self.temp_unit == TEMP_FAHRENHEIT:
temperature = round(celsius_to_fahrenheit(temperature), 1)
self._state = temperature
elif self.type == SENSOR_HUMID:
self._state = round(self.bme680_client.sensor_data.humidity, 1)
elif self.type == SENSOR_PRESS:
self._state = round(self.bme680_client.sensor_data.pressure, 1)
elif self.type == SENSOR_GAS:
self._state = int(round(self.bme680_client.sensor_data.gas_resistance, 0))
elif self.type == SENSOR_AQ:
aq_score = self.bme680_client.sensor_data.air_quality
if aq_score is not None:
self._state = round(aq_score, 1)
|
import logging
import unittest
from gensim.topic_coherence import aggregation
class TestAggregation(unittest.TestCase):
def setUp(self):
self.confirmed_measures = [1.1, 2.2, 3.3, 4.4]
def testArithmeticMean(self):
"""Test arithmetic_mean()"""
obtained = aggregation.arithmetic_mean(self.confirmed_measures)
expected = 2.75
self.assertEqual(obtained, expected)
if __name__ == '__main__':
logging.root.setLevel(logging.WARNING)
unittest.main()
|
from homeassistant.components.switch import SwitchEntity
from . import (
CONF_HOST,
CONF_NAME,
CONF_SWITCHES,
DATA_IP_WEBCAM,
ICON_MAP,
KEY_MAP,
AndroidIPCamEntity,
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the IP Webcam switch platform."""
if discovery_info is None:
return
host = discovery_info[CONF_HOST]
name = discovery_info[CONF_NAME]
switches = discovery_info[CONF_SWITCHES]
ipcam = hass.data[DATA_IP_WEBCAM][host]
all_switches = []
for setting in switches:
all_switches.append(IPWebcamSettingsSwitch(name, host, ipcam, setting))
async_add_entities(all_switches, True)
class IPWebcamSettingsSwitch(AndroidIPCamEntity, SwitchEntity):
"""An abstract class for an IP Webcam setting."""
def __init__(self, name, host, ipcam, setting):
"""Initialize the settings switch."""
super().__init__(host, ipcam)
self._setting = setting
self._mapped_name = KEY_MAP.get(self._setting, self._setting)
self._name = f"{name} {self._mapped_name}"
self._state = False
@property
def name(self):
"""Return the name of the node."""
return self._name
async def async_update(self):
"""Get the updated status of the switch."""
self._state = bool(self._ipcam.current_settings.get(self._setting))
@property
def is_on(self):
"""Return the boolean response if the node is on."""
return self._state
async def async_turn_on(self, **kwargs):
"""Turn device on."""
if self._setting == "torch":
await self._ipcam.torch(activate=True)
elif self._setting == "focus":
await self._ipcam.focus(activate=True)
elif self._setting == "video_recording":
await self._ipcam.record(record=True)
else:
await self._ipcam.change_setting(self._setting, True)
self._state = True
self.async_write_ha_state()
async def async_turn_off(self, **kwargs):
"""Turn device off."""
if self._setting == "torch":
await self._ipcam.torch(activate=False)
elif self._setting == "focus":
await self._ipcam.focus(activate=False)
elif self._setting == "video_recording":
await self._ipcam.record(record=False)
else:
await self._ipcam.change_setting(self._setting, False)
self._state = False
self.async_write_ha_state()
@property
def icon(self):
"""Return the icon for the switch."""
return ICON_MAP.get(self._setting, "mdi:flash")
|
import asyncio
import logging
from hole import Hole
from hole.exceptions import HoleError
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.const import (
CONF_API_KEY,
CONF_HOST,
CONF_NAME,
CONF_SSL,
CONF_VERIFY_SSL,
)
from homeassistant.core import callback
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
UpdateFailed,
)
from .const import (
CONF_LOCATION,
DATA_KEY_API,
DATA_KEY_COORDINATOR,
DEFAULT_LOCATION,
DEFAULT_NAME,
DEFAULT_SSL,
DEFAULT_VERIFY_SSL,
DOMAIN,
MIN_TIME_BETWEEN_UPDATES,
)
_LOGGER = logging.getLogger(__name__)
PI_HOLE_SCHEMA = vol.Schema(
vol.All(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_API_KEY): cv.string,
vol.Optional(CONF_SSL, default=DEFAULT_SSL): cv.boolean,
vol.Optional(CONF_LOCATION, default=DEFAULT_LOCATION): cv.string,
vol.Optional(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL): cv.boolean,
},
)
)
CONFIG_SCHEMA = vol.Schema(
{DOMAIN: vol.Schema(vol.All(cv.ensure_list, [PI_HOLE_SCHEMA]))},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Set up the Pi-hole integration."""
hass.data[DOMAIN] = {}
# import
if DOMAIN in config:
for conf in config[DOMAIN]:
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=conf
)
)
return True
async def async_setup_entry(hass, entry):
"""Set up Pi-hole entry."""
name = entry.data[CONF_NAME]
host = entry.data[CONF_HOST]
use_tls = entry.data[CONF_SSL]
verify_tls = entry.data[CONF_VERIFY_SSL]
location = entry.data[CONF_LOCATION]
api_key = entry.data.get(CONF_API_KEY)
_LOGGER.debug("Setting up %s integration with host %s", DOMAIN, host)
try:
session = async_get_clientsession(hass, verify_tls)
api = Hole(
host,
hass.loop,
session,
location=location,
tls=use_tls,
api_token=api_key,
)
await api.get_data()
except HoleError as ex:
_LOGGER.warning("Failed to connect: %s", ex)
raise ConfigEntryNotReady from ex
async def async_update_data():
"""Fetch data from API endpoint."""
try:
await api.get_data()
except HoleError as err:
raise UpdateFailed(f"Failed to communicating with API: {err}") from err
coordinator = DataUpdateCoordinator(
hass,
_LOGGER,
name=name,
update_method=async_update_data,
update_interval=MIN_TIME_BETWEEN_UPDATES,
)
hass.data[DOMAIN][entry.entry_id] = {
DATA_KEY_API: api,
DATA_KEY_COORDINATOR: coordinator,
}
for platform in _async_platforms(entry):
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, platform)
)
return True
async def async_unload_entry(hass, entry):
"""Unload Pi-hole entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, platform)
for platform in _async_platforms(entry)
]
)
)
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
@callback
def _async_platforms(entry):
"""Return platforms to be loaded / unloaded."""
platforms = ["sensor"]
if entry.data.get(CONF_API_KEY):
platforms.append("switch")
else:
platforms.append("binary_sensor")
return platforms
class PiHoleEntity(CoordinatorEntity):
"""Representation of a Pi-hole entity."""
def __init__(self, api, coordinator, name, server_unique_id):
"""Initialize a Pi-hole entity."""
super().__init__(coordinator)
self.api = api
self._name = name
self._server_unique_id = server_unique_id
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return "mdi:pi-hole"
@property
def device_info(self):
"""Return the device information of the entity."""
return {
"identifiers": {(DOMAIN, self._server_unique_id)},
"name": self._name,
"manufacturer": "Pi-hole",
}
|
import os
from paasta_tools.cli.utils import get_jenkins_build_output_url
from paasta_tools.cli.utils import lazy_choices_completer
from paasta_tools.cli.utils import validate_service_name
from paasta_tools.utils import _log
from paasta_tools.utils import _run
from paasta_tools.utils import build_docker_tag
from paasta_tools.utils import check_docker_image
from paasta_tools.utils import DEFAULT_SOA_DIR
from paasta_tools.utils import list_services
def add_subparser(subparsers):
list_parser = subparsers.add_parser(
"itest",
help="Runs 'make itest' as part of the PaaSTA contract.",
description=(
"'paasta itest' runs 'make itest' in the root of a service directory. "
"It is designed to be used in conjunction with the 'Jenkins' workflow: "
"http://paasta.readthedocs.io/en/latest/about/contract.html#jenkins-pipeline-recommended"
),
)
list_parser.add_argument(
"-s",
"--service",
help="Test and build docker image for this service. Leading "
'"services-", as included in a Jenkins job name, '
"will be stripped.",
required=True,
)
list_parser.add_argument(
"-c",
"--commit",
help="Git sha used to construct tag for built image",
required=True,
)
list_parser.add_argument(
"-d",
"--soa-dir",
dest="soa_dir",
help="A directory from which soa-configs should be read from",
default=DEFAULT_SOA_DIR,
).completer = lazy_choices_completer(list_services)
list_parser.add_argument(
"--timeout",
dest="timeout",
help="How many seconds before this command times out",
default=3600,
type=float,
)
list_parser.set_defaults(command=paasta_itest)
def paasta_itest(args):
"""Build and test a docker image"""
service = args.service
soa_dir = args.soa_dir
if service and service.startswith("services-"):
service = service.split("services-", 1)[1]
validate_service_name(service, soa_dir=soa_dir)
tag = build_docker_tag(service, args.commit)
run_env = os.environ.copy()
run_env["DOCKER_TAG"] = tag
cmd = "make itest"
loglines = []
_log(
service=service,
line="starting itest for %s." % args.commit,
component="build",
level="event",
)
returncode, output = _run(
cmd,
env=run_env,
timeout=args.timeout,
log=True,
component="build",
service=service,
loglevel="debug",
)
if returncode != 0:
loglines.append("ERROR: itest failed for %s." % args.commit)
output = get_jenkins_build_output_url()
if output:
loglines.append("See output: %s" % output)
else:
loglines.append("itest passed for %s." % args.commit)
if not check_docker_image(service, args.commit):
loglines.append("ERROR: itest has not created %s" % tag)
returncode = 1
for logline in loglines:
_log(service=service, line=logline, component="build", level="event")
return returncode
|
import hangups
from common import run_example
async def send_message(client, args):
request = hangups.hangouts_pb2.CreateConversationRequest(
request_header=client.get_request_header(),
type=hangups.hangouts_pb2.CONVERSATION_TYPE_GROUP,
client_generated_id=client.get_client_generated_id(),
invitee_id=[
hangups.hangouts_pb2.InviteeID(
gaia_id=gaia_id
) for gaia_id in args.gaia_ids.split(",")
],
name=args.conversation_name
)
res = await client.create_conversation(request)
print(res)
# --gaia-ids: list of participant gaia_id, separated by comma (excluding self)
# --conversation-name: the group conversation name to specify/customize
if __name__ == '__main__':
run_example(send_message, '--gaia-ids', '--conversation-name')
|
from typing import List, Optional
import voluptuous as vol
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_SUPPORTED_FEATURES,
CONF_DEVICE_ID,
CONF_DOMAIN,
CONF_ENTITY_ID,
CONF_TYPE,
SERVICE_LOCK,
SERVICE_OPEN,
SERVICE_UNLOCK,
)
from homeassistant.core import Context, HomeAssistant
from homeassistant.helpers import entity_registry
import homeassistant.helpers.config_validation as cv
from . import DOMAIN, SUPPORT_OPEN
ACTION_TYPES = {"lock", "unlock", "open"}
ACTION_SCHEMA = cv.DEVICE_ACTION_BASE_SCHEMA.extend(
{
vol.Required(CONF_TYPE): vol.In(ACTION_TYPES),
vol.Required(CONF_ENTITY_ID): cv.entity_domain(DOMAIN),
}
)
async def async_get_actions(hass: HomeAssistant, device_id: str) -> List[dict]:
"""List device actions for Lock devices."""
registry = await entity_registry.async_get_registry(hass)
actions = []
# Get all the integrations entities for this device
for entry in entity_registry.async_entries_for_device(registry, device_id):
if entry.domain != DOMAIN:
continue
# Add actions for each entity that belongs to this integration
actions.append(
{
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "lock",
}
)
actions.append(
{
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "unlock",
}
)
state = hass.states.get(entry.entity_id)
if state:
features = state.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
if features & (SUPPORT_OPEN):
actions.append(
{
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "open",
}
)
return actions
async def async_call_action_from_config(
hass: HomeAssistant, config: dict, variables: dict, context: Optional[Context]
) -> None:
"""Execute a device action."""
config = ACTION_SCHEMA(config)
service_data = {ATTR_ENTITY_ID: config[CONF_ENTITY_ID]}
if config[CONF_TYPE] == "lock":
service = SERVICE_LOCK
elif config[CONF_TYPE] == "unlock":
service = SERVICE_UNLOCK
elif config[CONF_TYPE] == "open":
service = SERVICE_OPEN
await hass.services.async_call(
DOMAIN, service, service_data, blocking=True, context=context
)
|
import datetime
import time
import numpy as np
from .egimff import _read_raw_egi_mff
from .events import _combine_triggers
from ..base import BaseRaw
from ..utils import _read_segments_file, _create_chs
from ..meas_info import _empty_info
from ..constants import FIFF
from ...utils import verbose, logger, warn
def _read_header(fid):
"""Read EGI binary header."""
version = np.fromfile(fid, '<i4', 1)[0]
if version > 6 & ~np.bitwise_and(version, 6):
version = version.byteswap().astype(np.uint32)
else:
raise ValueError('Watchout. This does not seem to be a simple '
'binary EGI file.')
def my_fread(*x, **y):
return np.fromfile(*x, **y)[0]
info = dict(
version=version,
year=my_fread(fid, '>i2', 1),
month=my_fread(fid, '>i2', 1),
day=my_fread(fid, '>i2', 1),
hour=my_fread(fid, '>i2', 1),
minute=my_fread(fid, '>i2', 1),
second=my_fread(fid, '>i2', 1),
millisecond=my_fread(fid, '>i4', 1),
samp_rate=my_fread(fid, '>i2', 1),
n_channels=my_fread(fid, '>i2', 1),
gain=my_fread(fid, '>i2', 1),
bits=my_fread(fid, '>i2', 1),
value_range=my_fread(fid, '>i2', 1)
)
unsegmented = 1 if np.bitwise_and(version, 1) == 0 else 0
precision = np.bitwise_and(version, 6)
if precision == 0:
raise RuntimeError('Floating point precision is undefined.')
if unsegmented:
info.update(dict(n_categories=0,
n_segments=1,
n_samples=np.fromfile(fid, '>i4', 1)[0],
n_events=np.fromfile(fid, '>i2', 1)[0],
event_codes=[],
category_names=[],
category_lengths=[],
pre_baseline=0))
for event in range(info['n_events']):
event_codes = ''.join(np.fromfile(fid, 'S1', 4).astype('U1'))
info['event_codes'].append(event_codes)
else:
raise NotImplementedError('Only continuous files are supported')
info['unsegmented'] = unsegmented
info['dtype'], info['orig_format'] = {2: ('>i2', 'short'),
4: ('>f4', 'float'),
6: ('>f8', 'double')}[precision]
info['dtype'] = np.dtype(info['dtype'])
return info
def _read_events(fid, info):
"""Read events."""
events = np.zeros([info['n_events'],
info['n_segments'] * info['n_samples']])
fid.seek(36 + info['n_events'] * 4, 0) # skip header
for si in range(info['n_samples']):
# skip data channels
fid.seek(info['n_channels'] * info['dtype'].itemsize, 1)
# read event channels
events[:, si] = np.fromfile(fid, info['dtype'], info['n_events'])
return events
@verbose
def read_raw_egi(input_fname, eog=None, misc=None,
include=None, exclude=None, preload=False,
channel_naming='E%d', verbose=None):
"""Read EGI simple binary as raw object.
Parameters
----------
input_fname : str
Path to the raw file. Files with an extension .mff are automatically
considered to be EGI's native MFF format files.
eog : list or tuple
Names of channels or list of indices that should be designated
EOG channels. Default is None.
misc : list or tuple
Names of channels or list of indices that should be designated
MISC channels. Default is None.
include : None | list
The event channels to be ignored when creating the synthetic
trigger. Defaults to None.
Note. Overrides ``exclude`` parameter.
exclude : None | list
The event channels to be ignored when creating the synthetic
trigger. Defaults to None. If None, channels that have more than
one event and the ``sync`` and ``TREV`` channels will be
ignored.
%(preload)s
.. versionadded:: 0.11
channel_naming : str
Channel naming convention for the data channels. Defaults to 'E%%d'
(resulting in channel names 'E1', 'E2', 'E3'...). The effective default
prior to 0.14.0 was 'EEG %%03d'.
.. versionadded:: 0.14.0
%(verbose)s
Returns
-------
raw : instance of RawEGI
A Raw object containing EGI data.
See Also
--------
mne.io.Raw : Documentation of attribute and methods.
Notes
-----
The trigger channel names are based on the arbitrary user dependent event
codes used. However this function will attempt to generate a synthetic
trigger channel named ``STI 014`` in accordance with the general
Neuromag / MNE naming pattern.
The event_id assignment equals ``np.arange(n_events) + 1``. The resulting
``event_id`` mapping is stored as attribute to the resulting raw object but
will be ignored when saving to a fiff. Note. The trigger channel is
artificially constructed based on timestamps received by the Netstation.
As a consequence, triggers have only short durations.
This step will fail if events are not mutually exclusive.
"""
if input_fname.endswith('.mff'):
return _read_raw_egi_mff(input_fname, eog, misc, include,
exclude, preload, channel_naming, verbose)
return RawEGI(input_fname, eog, misc, include, exclude, preload,
channel_naming, verbose)
class RawEGI(BaseRaw):
"""Raw object from EGI simple binary file."""
@verbose
def __init__(self, input_fname, eog=None, misc=None,
include=None, exclude=None, preload=False,
channel_naming='E%d', verbose=None): # noqa: D102
if eog is None:
eog = []
if misc is None:
misc = []
with open(input_fname, 'rb') as fid: # 'rb' important for py3k
logger.info('Reading EGI header from %s...' % input_fname)
egi_info = _read_header(fid)
logger.info(' Reading events ...')
egi_events = _read_events(fid, egi_info) # update info + jump
if egi_info['value_range'] != 0 and egi_info['bits'] != 0:
cal = egi_info['value_range'] / 2. ** egi_info['bits']
else:
cal = 1e-6
logger.info(' Assembling measurement info ...')
event_codes = []
if egi_info['n_events'] > 0:
event_codes = list(egi_info['event_codes'])
if include is None:
exclude_list = ['sync', 'TREV'] if exclude is None else exclude
exclude_inds = [i for i, k in enumerate(event_codes) if k in
exclude_list]
more_excludes = []
if exclude is None:
for ii, event in enumerate(egi_events):
if event.sum() <= 1 and event_codes[ii]:
more_excludes.append(ii)
if len(exclude_inds) + len(more_excludes) == len(event_codes):
warn('Did not find any event code with more than one '
'event.', RuntimeWarning)
else:
exclude_inds.extend(more_excludes)
exclude_inds.sort()
include_ = [i for i in np.arange(egi_info['n_events']) if
i not in exclude_inds]
include_names = [k for i, k in enumerate(event_codes)
if i in include_]
else:
include_ = [i for i, k in enumerate(event_codes)
if k in include]
include_names = include
for kk, v in [('include', include_names), ('exclude', exclude)]:
if isinstance(v, list):
for k in v:
if k not in event_codes:
raise ValueError('Could find event named "%s"' % k)
elif v is not None:
raise ValueError('`%s` must be None or of type list' % kk)
event_ids = np.arange(len(include_)) + 1
logger.info(' Synthesizing trigger channel "STI 014" ...')
logger.info(' Excluding events {%s} ...' %
", ".join([k for i, k in enumerate(event_codes)
if i not in include_]))
egi_info['new_trigger'] = _combine_triggers(
egi_events[include_], remapping=event_ids)
self.event_id = dict(zip([e for e in event_codes if e in
include_names], event_ids))
else:
# No events
self.event_id = None
egi_info['new_trigger'] = None
info = _empty_info(egi_info['samp_rate'])
my_time = datetime.datetime(
egi_info['year'], egi_info['month'], egi_info['day'],
egi_info['hour'], egi_info['minute'], egi_info['second'])
my_timestamp = time.mktime(my_time.timetuple())
info['meas_date'] = (my_timestamp, 0)
ch_names = [channel_naming % (i + 1) for i in
range(egi_info['n_channels'])]
ch_names.extend(list(egi_info['event_codes']))
if egi_info['new_trigger'] is not None:
ch_names.append('STI 014') # our new_trigger
nchan = len(ch_names)
cals = np.repeat(cal, nchan)
ch_coil = FIFF.FIFFV_COIL_EEG
ch_kind = FIFF.FIFFV_EEG_CH
chs = _create_chs(ch_names, cals, ch_coil, ch_kind, eog, (), (), misc)
sti_ch_idx = [i for i, name in enumerate(ch_names) if
name.startswith('STI') or name in event_codes]
for idx in sti_ch_idx:
chs[idx].update({'unit_mul': FIFF.FIFF_UNITM_NONE, 'cal': 1.,
'kind': FIFF.FIFFV_STIM_CH,
'coil_type': FIFF.FIFFV_COIL_NONE,
'unit': FIFF.FIFF_UNIT_NONE})
info['chs'] = chs
info._update_redundant()
super(RawEGI, self).__init__(
info, preload, orig_format=egi_info['orig_format'],
filenames=[input_fname], last_samps=[egi_info['n_samples'] - 1],
raw_extras=[egi_info], verbose=verbose)
def _read_segment_file(self, data, idx, fi, start, stop, cals, mult):
"""Read a segment of data from a file."""
egi_info = self._raw_extras[fi]
dtype = egi_info['dtype']
n_chan_read = egi_info['n_channels'] + egi_info['n_events']
offset = 36 + egi_info['n_events'] * 4
trigger_ch = egi_info['new_trigger']
_read_segments_file(self, data, idx, fi, start, stop, cals, mult,
dtype=dtype, n_channels=n_chan_read, offset=offset,
trigger_ch=trigger_ch)
|
from __future__ import print_function
import os
import re
import sys
from collections import Counter
#from pkg_resources import resource_filename
def logmsg(s):
# would be better to use python logger
print("[phrasemachine] %s" % s, file=sys.stderr)
if sys.version_info[0] >= 3:
xrange = range
############## SimpleNP
## Uses a five-tag coarse grammar.
## tagset: A D P N O
# Requires conversion from PTB or Petrov/Gimpel tags to our system.
# "Coarse*" indicates petrov/gimpel
# Grammar change from the FST version: can't repeat NUM in both adj and noun.
coarsemap = {
'A': "JJ JJR JJS CoarseADJ CD CoarseNUM".split(),
'D': "DT CoarseDET".split(),
'P': "IN TO CoarseADP".split(),
'N': "NN NNS NNP NNPS FW CoarseNOUN".split(),
# all other tags get O
}
## OLDER ATTEMPT: tried to use direct tags as port from foma.
## but this was annoying. have to map back to token positions at the end.
## probably slower too since the python regex compiler is not as smart as foma
# def regex_or(items):
# return '|'.join(re.escape(x) for x in items)
# Adj = regex_or("JJ JJR JJS CD CoarseADJ CoarseNUM".split())
# Det = regex_or("DT CoarseDET".split())
# Prep= regex_or("IN TO CoarseADP".split())
# Noun= regex_or("NN NNS NNP NNPS FW CD CoarseNOUN CoarseNUM".split())
# ## convention: SPACES separate tags.
# BaseNP = "(({Adj}|{Noun}) )*({Noun} )+".format(**globals())
# PP = "{Prep} ({Det} )*{BaseNP}".format(**globals())
# NP = "{BaseNP}({PP} )*".format(**globals())
tag2coarse = {}
for coarsetag, inputtags in coarsemap.items():
for intag in inputtags:
assert intag not in tag2coarse
tag2coarse[intag] = coarsetag
## The grammar!
SimpleNP = "(A|N)*N(PD*(A|N)*N)*"
def coarse_tag_str(pos_seq):
"""Convert POS sequence to our coarse system, formatted as a string."""
global tag2coarse
tags = [tag2coarse.get(tag, 'O') for tag in pos_seq]
return ''.join(tags)
# POS extraction assuming list of POS tags as input.
# >>> pyre.extract_finditer(["VB","JJ","NN","NN","QQ","QQ",])
# [(1, 4)]
# >>> pyre.extract_ngram_filter(["VB","JJ","NN","NN","QQ","QQ",])
# [(1, 3), (1, 4), (2, 3), (2, 4), (3, 4)]
def extract_finditer(pos_seq, regex=SimpleNP):
"""The "GreedyFSA" method in Handler et al. 2016.
Returns token position spans of valid ngrams."""
ss = coarse_tag_str(pos_seq)
def gen():
for m in re.finditer(regex, ss):
yield (m.start(), m.end())
return list(gen())
def extract_ngram_filter(pos_seq, regex=SimpleNP, minlen=1, maxlen=8):
"""The "FilterFSA" method in Handler et al. 2016.
Returns token position spans of valid ngrams."""
ss = coarse_tag_str(pos_seq)
def gen():
for s in xrange(len(ss)):
for n in xrange(minlen, 1 + min(maxlen, len(ss) - s)):
e = s + n
substr = ss[s:e]
if re.match(regex + "$", substr):
yield (s, e)
return list(gen())
def extract_JK(pos_seq):
"""The 'JK' method in Handler et al. 2016.
Returns token positions of valid ngrams."""
def find_ngrams(input_list, num_):
'''get ngrams of len n from input list'''
return zip(*[input_list[i:] for i in range(num_)])
# copied from M and S chp 5'''
patterns = set(['AN', 'NN', 'AAN', 'ANN', 'NAN', 'NNN', 'NPN'])
pos_seq = [tag2coarse.get(tag, 'O') for tag in pos_seq]
pos_seq = [(i, p) for i, p in enumerate(pos_seq)]
ngrams = [ngram for n in range(1, 4) for ngram in find_ngrams(pos_seq, n)]
def stringify(s):
return "".join(a[1] for a in s)
def positionify(s):
return tuple(a[0] for a in s)
ngrams = filter(lambda x: stringify(x) in patterns, ngrams)
return [set(positionify(n)) for n in ngrams]
########
def unicodify(s, encoding='utf8', errors='ignore'):
# Force conversion to unicode
if sys.version_info[0] < 3:
if isinstance(s, unicode): return s
if isinstance(s, str): return s.decode(encoding, errors)
return unicode(s)
else:
if type(s) == bytes:
return s.decode('utf8')
else:
return s
def safejoin(list_of_str_or_unicode):
## can accept a list of str objects, or a list of unicodes.
## safely joins them, returning the same type.
xx = list_of_str_or_unicode
if not xx:
return u""
if isinstance(xx[0], str):
return ' '.join(xx)
if isinstance(xx[0], bytes):
return ' '.join(xx)
if sys.version_info[0] < 3:
if isinstance(xx[0], unicode):
return u' '.join(xx)
raise Exception("Bad input to safejoin:", list_of_str_or_unicode)
#########
class NLTKTagger:
'''
class that supplies part of speech tags using NLTK
note: avoids the NLTK downloader (see __init__ method)
'''
def __init__(self):
import nltk
from nltk.tag import PerceptronTagger
from nltk.tokenize import TreebankWordTokenizer
#return pkgutil.get_data('scattertext',
# 'data/viz/semiotic_new.html').decode('utf-8')
path = os.path.dirname(sys.modules['scattertext'].__file__)+'/data/'
tokenizer_fn = path + 'punkt.english.pickle'
tagger_fn = path + 'averaged_perceptron_tagger.pickle'
#tokenizer_fn = os.path.abspath(resource_filename('scattertext.data', 'punkt.english.pickle'))
#tagger_fn = os.path.abspath(resource_filename('scattertext.data', 'averaged_perceptron_tagger.pickle'))
# Load the tagger
self.tagger = PerceptronTagger(load=False)
self.tagger.load(tagger_fn)
# note: nltk.word_tokenize calls the TreebankWordTokenizer, but uses the downloader.
# Calling the TreebankWordTokenizer like this allows skipping the downloader.
# It seems the TreebankWordTokenizer uses PTB tokenization = regexes. i.e. no downloads
# https://github.com/nltk/nltk/blob/develop/nltk/tokenize/treebank.py#L25
self.tokenize = TreebankWordTokenizer().tokenize
self.sent_detector = nltk.data.load(tokenizer_fn)
# http://www.nltk.org/book/ch05.html
def tag_text(self, text):
'''take input text and return tokens w/ part of speech tags using NLTK'''
# putting import here instead of top of file b.c. not all will have nltk installed
sents = self.sent_detector.tokenize(text) # TODO: this will fail on some unicode chars. I think assumes ascii
word_pos_pairs = []
all_tokens = []
for sent in sents:
tokens = self.tokenize(sent)
all_tokens = all_tokens + tokens
word_pos_pairs = word_pos_pairs + self.tagger.tag(tokens)
return {'tokens': all_tokens, 'pos': [tag for (w, tag) in word_pos_pairs]}
def tag_tokens(self, tokens):
word_pos_pairs = self.tagger.tag(tokens)
return {'tokens': tokens, 'pos': [tag for (w, tag) in word_pos_pairs]}
def get_stdeng_nltk_tagger(suppress_errors=False):
try:
tagger = NLTKTagger()
throw_away = tagger.tag_text("The red cat sat down.")
return NLTKTagger()
except ImportError:
if not suppress_errors: raise
except LookupError:
if not suppress_errors: raise
return None
SPACY_WRAPPER = None
class SpacyTagger:
# https://spacy.io/
def __init__(self):
self.spacy_object = None
def tag_text(self, text):
text = unicodify(text)
doc = self.spacy_object(text)
return {
'pos': [token.tag_ for token in doc],
'tokens': [token.text for token in doc],
}
def tag_tokens(self, tokens):
# tokens: a list of strings
# todo: would be better to force spacy to use the given tokenization
newtext = safejoin(tokens)
newtext = unicodify(newtext) ## spacy wants unicode objects only. problem if user gave us a string.
return self.tag_text(newtext)
def get_stdeng_spacy_tagger(suppress_errors=False):
global SPACY_WRAPPER
if SPACY_WRAPPER is not None:
return SPACY_WRAPPER
try:
import spacy
SPACY_WRAPPER = SpacyTagger()
SPACY_WRAPPER.spacy_object = spacy.load('en', parser=False, entity=False)
return SPACY_WRAPPER
except ImportError:
if not suppress_errors: raise
except RuntimeError:
## this seems to happen if the 'en' model is not installed. it might
## look like this:
# RuntimeError: Model 'en' not installed. Please run 'python -m spacy.en.download' to install latest compatible model.
if not suppress_errors: raise
return None
TAGGER_NAMES = {
'nltk': get_stdeng_nltk_tagger,
'spacy': get_stdeng_spacy_tagger,
# 'twitter': None,
}
def get_phrases(text=None, tokens=None, postags=None, tagger='nltk', grammar='SimpleNP', regex=None, minlen=2, maxlen=8,
output='counts'):
"""Give a text (or POS tag sequence), return the phrases matching the given
grammar. Works on documents or sentences.
Returns a dict with one or more keys with the phrase information.
text: the text of the document. If supplied, we will try to POS tag it.
You can also do your own tokenzation and/or tagging and supply them as
'tokens' and/or 'postags', which are lists of strings (of the same length).
- Must supply both to get phrase counts back.
- With only postags, can get phrase token spans back.
- With only tokens, we will try to POS-tag them if possible.
output: a string, or list of strings, of information to return. Options include:
- counts: a Counter with phrase frequencies. (default)
- token_spans: a list of the token spans of each matched phrase. This is
a list of (start,end) pairs of integers, which refer to token positions.
- pos, tokens can be returned too.
tagger: if you're passing in raw text, can supply your own tagger, from one
of the get_*_tagger() functions. If this is not supplied, we will try to load one.
grammar: the grammar to use. Only one option right now...
regex: a custom regex to use, instead of a premade grammar. Currently,
this must work on the 5-tag system described near the top of this file.
"""
global SimpleNP
## try to get values for both 'postags' and 'tokens', parallel lists of strings
if postags is None:
try:
tagger = TAGGER_NAMES[tagger]()
except:
raise Exception("We don't support tagger %s" % tagger)
# otherwise, assume it's one of our wrapper *Tagger objects
d = None
if tokens is not None:
d = tagger.tag_tokens(tokens)
elif text is not None:
d = tagger.tag_text(text)
else:
raise Exception("Need to supply text or tokens.")
postags = d['pos']
tokens = d['tokens']
if regex is None:
if grammar == 'SimpleNP':
regex = SimpleNP
else:
assert False, "Don't know grammar %s" % grammar
phrase_tokspans = extract_ngram_filter(postags, minlen=minlen, maxlen=maxlen)
## Handle multiple possible return info outputs
if isinstance(output, str):
output = [output]
our_options = set()
def retopt(x):
our_options.add(x)
return x in output
ret = {}
ret['num_tokens'] = len(postags)
if retopt('token_spans'):
ret['token_spans'] = phrase_tokspans
if retopt('counts'):
counts = Counter()
for (start, end) in phrase_tokspans:
phrase = safejoin([tokens[i] for i in xrange(start, end)])
phrase = phrase.lower()
counts[phrase] += 1
ret['counts'] = counts
if retopt('pos'):
ret['pos'] = postags
if retopt('tokens'):
ret['tokens'] = tokens
xx = set(output) - our_options
if xx:
raise Exception("Don't know how to handle output options: %s" % list(xx))
return ret
|
import logging
from datetime import datetime
import hashlib
import os
import re
import socket
import struct
import requests
from requests.exceptions import SSLError
from six.moves.xmlrpc_client import ProtocolError
from .exceptions import ServiceUnavailable
logger = logging.getLogger(__name__)
def hash_opensubtitles(video_path):
"""Compute a hash using OpenSubtitles' algorithm.
:param str video_path: path of the video.
:return: the hash.
:rtype: str
"""
bytesize = struct.calcsize(b'<q')
with open(video_path, 'rb') as f:
filesize = os.path.getsize(video_path)
filehash = filesize
if filesize < 65536 * 2:
return
for _ in range(65536 // bytesize):
filebuffer = f.read(bytesize)
(l_value,) = struct.unpack(b'<q', filebuffer)
filehash += l_value
filehash &= 0xFFFFFFFFFFFFFFFF # to remain as 64bit number
f.seek(max(0, filesize - 65536), 0)
for _ in range(65536 // bytesize):
filebuffer = f.read(bytesize)
(l_value,) = struct.unpack(b'<q', filebuffer)
filehash += l_value
filehash &= 0xFFFFFFFFFFFFFFFF
returnedhash = '%016x' % filehash
return returnedhash
def hash_thesubdb(video_path):
"""Compute a hash using TheSubDB's algorithm.
:param str video_path: path of the video.
:return: the hash.
:rtype: str
"""
readsize = 64 * 1024
if os.path.getsize(video_path) < readsize:
return
with open(video_path, 'rb') as f:
data = f.read(readsize)
f.seek(-readsize, os.SEEK_END)
data += f.read(readsize)
return hashlib.md5(data).hexdigest()
def hash_napiprojekt(video_path):
"""Compute a hash using NapiProjekt's algorithm.
:param str video_path: path of the video.
:return: the hash.
:rtype: str
"""
readsize = 1024 * 1024 * 10
with open(video_path, 'rb') as f:
data = f.read(readsize)
return hashlib.md5(data).hexdigest()
def hash_shooter(video_path):
"""Compute a hash using Shooter's algorithm
:param string video_path: path of the video
:return: the hash
:rtype: string
"""
filesize = os.path.getsize(video_path)
readsize = 4096
if os.path.getsize(video_path) < readsize * 2:
return None
offsets = (readsize, filesize // 3 * 2, filesize // 3, filesize - readsize * 2)
filehash = []
with open(video_path, 'rb') as f:
for offset in offsets:
f.seek(offset)
filehash.append(hashlib.md5(f.read(readsize)).hexdigest())
return ';'.join(filehash)
def sanitize(string, ignore_characters=None):
"""Sanitize a string to strip special characters.
:param str string: the string to sanitize.
:param set ignore_characters: characters to ignore.
:return: the sanitized string.
:rtype: str
"""
# only deal with strings
if string is None:
return
ignore_characters = ignore_characters or set()
# replace some characters with one space
characters = {'-', ':', '(', ')', '.', ','} - ignore_characters
if characters:
string = re.sub(r'[%s]' % re.escape(''.join(characters)), ' ', string)
# remove some characters
characters = {'\''} - ignore_characters
if characters:
string = re.sub(r'[%s]' % re.escape(''.join(characters)), '', string)
# replace multiple spaces with one
string = re.sub(r'\s+', ' ', string)
# strip and lower case
return string.strip().lower()
def sanitize_release_group(string):
"""Sanitize a `release_group` string to remove content in square brackets.
:param str string: the release group to sanitize.
:return: the sanitized release group.
:rtype: str
"""
# only deal with strings
if string is None:
return
# remove content in square brackets
string = re.sub(r'\[\w+\]', '', string)
# strip and upper case
return string.strip().upper()
def timestamp(date):
"""Get the timestamp of the `date`, python2/3 compatible
:param datetime.datetime date: the utc date.
:return: the timestamp of the date.
:rtype: float
"""
return (date - datetime(1970, 1, 1)).total_seconds()
def matches_title(actual, title, alternative_titles):
"""Whether `actual` matches the `title` or `alternative_titles`
:param str actual: the actual title to check
:param str title: the expected title
:param list alternative_titles: the expected alternative_titles
:return: whether the actual title matches the title or alternative_titles.
:rtype: bool
"""
actual = sanitize(actual)
title = sanitize(title)
if actual == title:
return True
alternative_titles = set(sanitize(t) for t in alternative_titles)
if actual in alternative_titles:
return True
return actual.startswith(title) and actual[len(title):].strip() in alternative_titles
def handle_exception(e, msg):
"""Handle exception, logging the proper error message followed by `msg`.
Exception traceback is only logged for specific cases.
:param exception e: The exception to handle.
:param str msg: The message to log.
"""
if isinstance(e, (requests.Timeout, socket.timeout)):
logger.error('Request timed out. %s', msg)
elif isinstance(e, (ServiceUnavailable, ProtocolError)):
# OpenSubtitles raises xmlrpclib.ProtocolError when unavailable
logger.error('Service unavailable. %s', msg)
elif isinstance(e, requests.exceptions.HTTPError):
logger.error('HTTP error %r. %s', e.response.status_code, msg,
exc_info=e.response.status_code not in range(500, 600))
elif isinstance(e, SSLError):
logger.error('SSL error %r. %s', e.args[0], msg,
exc_info=e.args[0] != 'The read operation timed out')
else:
logger.exception('Unexpected error. %s', msg)
|
from django.core.exceptions import PermissionDenied
from django.shortcuts import redirect
from django.urls import reverse
from django.utils.translation import gettext as _
from django.views.generic import ListView, UpdateView
from weblate.addons.models import ADDONS, Addon
from weblate.utils import messages
from weblate.utils.views import ComponentViewMixin
class AddonViewMixin(ComponentViewMixin):
def get_queryset(self):
component = self.get_component()
if not self.request.user.has_perm("component.edit", component):
raise PermissionDenied("Can not edit component")
self.kwargs["component_obj"] = component
return Addon.objects.filter_component(component)
def get_success_url(self):
component = self.get_component()
return reverse(
"addons",
kwargs={"project": component.project.slug, "component": component.slug},
)
def redirect_list(self, message=None):
if message:
messages.error(self.request, message)
return redirect(self.get_success_url())
class AddonList(AddonViewMixin, ListView):
paginate_by = None
model = Addon
def get_context_data(self, **kwargs):
result = super().get_context_data(**kwargs)
component = self.kwargs["component_obj"]
result["object"] = component
installed = {x.addon.name for x in result["object_list"]}
result["available"] = sorted(
(
x
for x in ADDONS.values()
if x.can_install(component, self.request.user)
and (x.multiple or x.name not in installed)
),
key=lambda x: x.name,
)
return result
def post(self, request, **kwargs):
component = self.get_component()
name = request.POST.get("name")
addon = ADDONS.get(name)
installed = {x.addon.name for x in self.get_queryset()}
if (
not name
or addon is None
or not addon.can_install(component, request.user)
or (name in installed and not addon.multiple)
):
return self.redirect_list(_("Invalid addon name specified!"))
form = None
if addon.settings_form is None:
addon.create(component)
return self.redirect_list()
if "form" in request.POST:
form = addon.get_add_form(request.user, component, data=request.POST)
if form.is_valid():
instance = form.save()
if addon.stay_on_create:
messages.info(
self.request,
_("Addon installed, please review integration instructions."),
)
return redirect(instance)
return self.redirect_list()
else:
form = addon.get_add_form(request.user, component)
addon.pre_install(component, request)
return self.response_class(
request=self.request,
template=["addons/addon_detail.html"],
context={
"addon": addon,
"form": form,
"object": self.kwargs["component_obj"],
},
)
class AddonDetail(AddonViewMixin, UpdateView):
model = Addon
template_name_suffix = "_detail"
def get_form(self, form_class=None):
return self.object.addon.get_settings_form(
self.request.user, **self.get_form_kwargs()
)
def get_context_data(self, **kwargs):
result = super().get_context_data(**kwargs)
result["object"] = self.object.component
result["instance"] = self.object
result["addon"] = self.object.addon
return result
def post(self, request, *args, **kwargs):
obj = self.get_object()
if "delete" in request.POST:
obj.delete()
return self.redirect_list()
return super().post(request, *args, **kwargs)
|
from absl import flags
FLAGS = flags.FLAGS
MXNET_GIT = 'https://github.com/apache/incubator-mxnet.git'
flags.DEFINE_string('mxnet_commit_hash',
'2700ddbbeef212879802f7f0c0812192ec5c2b77',
'git commit hash of desired mxnet commit.')
def Install(vm):
"""Installs MXNet on the VM."""
vm.InstallPackages('git')
vm.RemoteCommand('git clone %s' % MXNET_GIT, should_log=True)
vm.RemoteCommand('cd incubator-mxnet && git checkout %s' %
FLAGS.mxnet_commit_hash)
def Uninstall(vm):
"""Uninstalls MXNet on the VM."""
vm.RemoteCommand('rm -rf tpu-demos', should_log=True)
def GetCommit(vm):
stdout, _ = vm.RemoteCommand('cd incubator-mxnet && git rev-parse HEAD',
should_log=True)
return stdout
|
from collections import OrderedDict
import math
from auto_ml import utils
import pandas as pd
from sklearn.ensemble import GradientBoostingRegressor, GradientBoostingClassifier
from sklearn.metrics import mean_squared_error, make_scorer, brier_score_loss, accuracy_score, explained_variance_score, mean_absolute_error, median_absolute_error, r2_score, log_loss, roc_auc_score
import numpy as np
from tabulate import tabulate
bad_vals_as_strings = set([str(float('nan')), str(float('inf')), str(float('-inf')), 'None', 'none', 'NaN', 'NAN', 'nan', 'NULL', 'null', '', 'inf', '-inf', 'np.nan', 'numpy.nan'])
def advanced_scoring_classifiers(probas, actuals, name=None):
# pandas Series don't play nice here. Make sure our actuals list is indeed a list
actuals = list(actuals)
predictions = list(probas)
print('Here is our brier-score-loss, which is the default value we optimized for while training, and is the value returned from .score() unless you requested a custom scoring metric')
print('It is a measure of how close the PROBABILITY predictions are.')
if name != None:
print(name)
# Sometimes we will be given "flattened" probabilities (only the probability of our positive label), while other times we might be given "nested" probabilities (probabilities of both positive and negative, in a list, for each item).
try:
probas = [proba[1] for proba in probas]
except:
pass
brier_score = brier_score_loss(actuals, probas)
print(format(brier_score, '.4f'))
print('\nHere is the trained estimator\'s overall accuracy (when it predicts a label, how frequently is that the correct label?)')
predicted_labels = []
for pred in probas:
if pred >= 0.5:
predicted_labels.append(1)
else:
predicted_labels.append(0)
print(format(accuracy_score(y_true=actuals, y_pred=predicted_labels) * 100, '.1f') + '%')
print('\nHere is a confusion matrix showing predictions vs. actuals by label:')
#it would make sense to use sklearn's confusion_matrix here but it apparently has no labels
#took this idea instead from: http://stats.stackexchange.com/a/109015
conf = pd.crosstab(pd.Series(actuals), pd.Series(predicted_labels), rownames=['v Actual v'], colnames=['Predicted >'], margins=True)
print(conf)
#I like knowing the per class accuracy to see if the model is mishandling imbalanced data.
#For example, if it is predicting 100% of observations to one class just because it is the majority
#Wikipedia seems to call that Positive/negative predictive value
print('\nHere is predictive value by class:')
df = pd.concat([pd.Series(actuals,name='actuals'),pd.Series(predicted_labels,name='predicted')],axis=1)
targets = list(df.predicted.unique())
for i in range(0,len(targets)):
tot_count = len(df[df.predicted==targets[i]])
true_count = len(df[(df.predicted==targets[i]) & (df.actuals == targets[i])])
print('Class: ',targets[i],'=',float(true_count)/tot_count)
# qcut is super fickle. so, try to use 10 buckets first, then 5 if that fails, then nothing
try:
try:
bucket_results = pd.qcut(probas, q=10, duplicates='drop')
except:
bucket_results = pd.qcut(probas, q=5, duplicates='drop')
df_probas = pd.DataFrame(probas, columns=['Predicted Probability Of Bucket'])
df_probas['Actual Probability of Bucket'] = actuals
df_probas['Bucket Edges'] = bucket_results
df_buckets = df_probas.groupby(df_probas['Bucket Edges'])
try:
print(tabulate(df_buckets.mean(), headers='keys', floatfmt='.4f', tablefmt='psql', showindex='always'))
except TypeError:
print(tabulate(df_buckets.mean(), headers='keys', floatfmt='.4f', tablefmt='psql'))
print('\nHere is the accuracy of our trained estimator at each level of predicted probabilities')
print('For a verbose description of what this means, please visit the docs:')
print('http://auto-ml.readthedocs.io/en/latest/analytics.html#interpreting-predicted-probability-buckets-for-classifiers')
except:
pass
print('\n\n')
return brier_score
def calculate_and_print_differences(predictions, actuals, name=None):
pos_differences = []
neg_differences = []
# Technically, we're ignoring cases where we are spot on
for idx, pred in enumerate(predictions):
difference = pred - actuals[idx]
if difference > 0:
pos_differences.append(difference)
elif difference < 0:
neg_differences.append(difference)
if name != None:
print(name)
print('Count of positive differences (prediction > actual):')
print(len(pos_differences))
print('Count of negative differences:')
print(len(neg_differences))
if len(pos_differences) > 0:
print('Average positive difference:')
print(sum(pos_differences) * 1.0 / len(pos_differences))
if len(neg_differences) > 0:
print('Average negative difference:')
print(sum(neg_differences) * 1.0 / len(neg_differences))
def advanced_scoring_regressors(predictions, actuals, verbose=2, name=None):
# pandas Series don't play nice here. Make sure our actuals list is indeed a list
actuals = list(actuals)
predictions = list(predictions)
print('\n\n***********************************************')
if name != None:
print(name)
print('Advanced scoring metrics for the trained regression model on this particular dataset:\n')
# 1. overall RMSE
print('Here is the overall RMSE for these predictions:')
rmse = mean_squared_error(actuals, predictions)**0.5
print(rmse)
# 2. overall avg predictions
print('\nHere is the average of the predictions:')
print(sum(predictions) * 1.0 / len(predictions))
# 3. overall avg actuals
print('\nHere is the average actual value on this validation set:')
print(sum(actuals) * 1.0 / len(actuals))
# 2(a). median predictions
print('\nHere is the median prediction:')
print(np.median(predictions))
# 3(a). median actuals
print('\nHere is the median actual value:')
print(np.median(actuals))
# 4. avg differences (not RMSE)
print('\nHere is the mean absolute error:')
print(mean_absolute_error(actuals, predictions))
print('\nHere is the median absolute error (robust to outliers):')
print(median_absolute_error(actuals, predictions))
print('\nHere is the explained variance:')
print(explained_variance_score(actuals, predictions))
print('\nHere is the R-squared value:')
print(r2_score(actuals, predictions))
# 5. pos and neg differences
calculate_and_print_differences(predictions=predictions, actuals=actuals, name=name)
actuals_preds = list(zip(actuals, predictions))
# Sort by PREDICTED value, since this is what what we will know at the time we make a prediction
actuals_preds.sort(key=lambda pair: pair[1])
actuals_sorted = [act for act, pred in actuals_preds]
predictions_sorted = [pred for act, pred in actuals_preds]
if verbose > 2:
print('Here\'s how the trained predictor did on each successive decile (ten percent chunk) of the predictions:')
for i in range(1,11):
print('\n**************')
print('Bucket number:')
print(i)
# There's probably some fenceposting error here
min_idx = int((i - 1) / 10.0 * len(actuals_sorted))
max_idx = int(i / 10.0 * len(actuals_sorted))
actuals_for_this_decile = actuals_sorted[min_idx:max_idx]
predictions_for_this_decile = predictions_sorted[min_idx:max_idx]
print('Avg predicted val in this bucket')
print(sum(predictions_for_this_decile) * 1.0 / len(predictions_for_this_decile))
print('Avg actual val in this bucket')
print(sum(actuals_for_this_decile) * 1.0 / len(actuals_for_this_decile))
print('RMSE for this bucket')
print(mean_squared_error(actuals_for_this_decile, predictions_for_this_decile)**0.5)
calculate_and_print_differences(predictions_for_this_decile, actuals_for_this_decile)
print('')
print('\n***********************************************\n\n')
return rmse
def rmse_func(y, predictions):
return mean_squared_error(y, predictions)**0.5
scoring_name_function_map = {
'rmse': rmse_func
, 'median_absolute_error': median_absolute_error
, 'r2': r2_score
, 'r-squared': r2_score
, 'mean_absolute_error': mean_absolute_error
, 'accuracy': accuracy_score
, 'accuracy_score': accuracy_score
, 'log_loss': log_loss
, 'roc_auc': roc_auc_score
, 'brier_score_loss': brier_score_loss
}
class RegressionScorer(object):
def __init__(self, scoring_method=None):
if scoring_method is None:
scoring_method = 'rmse'
self.scoring_method = scoring_method
if callable(scoring_method):
self.scoring_func = scoring_method
else:
self.scoring_func = scoring_name_function_map[scoring_method]
self.scoring_method = scoring_method
def get(self, prop_name, default=None):
try:
return getattr(self, prop_name)
except AttributeError:
return default
def score(self, estimator, X, y, took_log_of_y=False, advanced_scoring=False, verbose=2, name=None):
X, y = utils.drop_missing_y_vals(X, y, output_column=None)
if isinstance(estimator, GradientBoostingRegressor):
X = X.toarray()
predictions = estimator.predict(X)
if took_log_of_y:
for idx, val in enumerate(predictions):
predictions[idx] = math.exp(val)
try:
score = self.scoring_func(y, predictions)
except ValueError:
bad_val_indices = []
for idx, val in enumerate(y):
if str(val) in bad_vals_as_strings or str(predictions[idx]) in bad_vals_as_strings:
bad_val_indices.append(idx)
predictions = [val for idx, val in enumerate(predictions) if idx not in bad_val_indices]
y = [val for idx, val in enumerate(y) if idx not in bad_val_indices]
print('Found ' + str(len(bad_val_indices)) + ' null or infinity values in the predicted or y values. We will ignore these, and report the score on the rest of the dataset')
score = self.scoring_func(y, predictions)
if advanced_scoring == True:
if hasattr(estimator, 'name'):
print(estimator.name)
advanced_scoring_regressors(predictions, y, verbose=verbose, name=name)
return - 1 * score
class ClassificationScorer(object):
def __init__(self, scoring_method=None):
if scoring_method is None:
scoring_method = 'brier_score_loss'
self.scoring_method = scoring_method
if callable(scoring_method):
self.scoring_func = scoring_method
else:
self.scoring_func = scoring_name_function_map[scoring_method]
def get(self, prop_name, default=None):
try:
return getattr(self, prop_name)
except AttributeError:
return default
def clean_probas(self, probas):
print('Warning: We have found some values in the predicted probabilities that fall outside the range {0, 1}')
print('This is likely the result of a model being trained on too little data, or with a bad set of hyperparameters. If you get this warning while doing a hyperparameter search, for instance, you can probably safely ignore it')
print('We will cap those values at 0 or 1 for the purposes of scoring, but you should be careful to have similar safeguards in place in prod if you use this model')
if not isinstance(probas[0], list):
probas = [val if str(val) not in bad_vals_as_strings else 0 for val in probas]
probas = [min(max(pred, 0), 1) for pred in probas]
return probas
else:
cleaned_probas = []
for proba_tuple in probas:
cleaned_tuple = []
for item in proba_tuple:
if str(item) in bad_vals_as_strings:
item = 0
cleaned_tuple.append(max(min(item, 1), 0))
cleaned_probas.append(cleaned_tuple)
return cleaned_probas
def score(self, estimator, X, y, advanced_scoring=False):
X, y = utils.drop_missing_y_vals(X, y, output_column=None)
if isinstance(estimator, GradientBoostingClassifier):
X = X.toarray()
predictions = estimator.predict_proba(X)
if self.scoring_method == 'brier_score_loss':
# At the moment, Microsoft's LightGBM returns probabilities > 1 and < 0, which can break some scoring functions. So we have to take the max of 1 and the pred, and the min of 0 and the pred.
probas = [max(min(row[1], 1), 0) for row in predictions]
predictions = probas
try:
score = self.scoring_func(y, predictions)
except ValueError as e:
bad_val_indices = []
for idx, val in enumerate(y):
if str(val) in bad_vals_as_strings:
bad_val_indices.append(idx)
predictions = [val for idx, val in enumerate(predictions) if idx not in bad_val_indices]
y = [val for idx, val in enumerate(y) if idx not in bad_val_indices]
print('Found ' + str(len(bad_val_indices)) + ' null or infinity values in the y values. We will ignore these, and report the score on the rest of the dataset')
try:
score = self.scoring_func(y, predictions)
except ValueError:
# Sometimes, particularly for a badly fit model using either too little data, or a really bad set of hyperparameters during a grid search, we can predict probas that are > 1 or < 0. We'll cap those here, while warning the user about them, because they're unlikely to occur in a model that's properly trained with enough data and reasonable params
predictions = self.clean_probas(predictions)
score = self.scoring_func(y, predictions)
if advanced_scoring:
return (-1 * score, predictions)
else:
return -1 * score
|
import logging
import re
from pyialarm import IAlarm
import voluptuous as vol
import homeassistant.components.alarm_control_panel as alarm
from homeassistant.components.alarm_control_panel import PLATFORM_SCHEMA
from homeassistant.components.alarm_control_panel.const import (
SUPPORT_ALARM_ARM_AWAY,
SUPPORT_ALARM_ARM_HOME,
)
from homeassistant.const import (
CONF_CODE,
CONF_HOST,
CONF_NAME,
CONF_PASSWORD,
CONF_USERNAME,
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_DISARMED,
STATE_ALARM_TRIGGERED,
)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "iAlarm"
def no_application_protocol(value):
"""Validate that value is without the application protocol."""
protocol_separator = "://"
if not value or protocol_separator in value:
raise vol.Invalid(f"Invalid host, {protocol_separator} is not allowed")
return value
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): vol.All(cv.string, no_application_protocol),
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Optional(CONF_CODE): cv.positive_int,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up an iAlarm control panel."""
name = config.get(CONF_NAME)
code = config.get(CONF_CODE)
username = config.get(CONF_USERNAME)
password = config.get(CONF_PASSWORD)
host = config.get(CONF_HOST)
url = f"http://{host}"
ialarm = IAlarmPanel(name, code, username, password, url)
add_entities([ialarm], True)
class IAlarmPanel(alarm.AlarmControlPanelEntity):
"""Representation of an iAlarm status."""
def __init__(self, name, code, username, password, url):
"""Initialize the iAlarm status."""
self._name = name
self._code = str(code) if code else None
self._username = username
self._password = password
self._url = url
self._state = None
self._client = IAlarm(username, password, url)
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def code_format(self):
"""Return one or more digits/characters."""
if self._code is None:
return None
if isinstance(self._code, str) and re.search("^\\d+$", self._code):
return alarm.FORMAT_NUMBER
return alarm.FORMAT_TEXT
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def supported_features(self) -> int:
"""Return the list of supported features."""
return SUPPORT_ALARM_ARM_HOME | SUPPORT_ALARM_ARM_AWAY
def update(self):
"""Return the state of the device."""
status = self._client.get_status()
_LOGGER.debug("iAlarm status: %s", status)
if status:
status = int(status)
if status == self._client.DISARMED:
state = STATE_ALARM_DISARMED
elif status == self._client.ARMED_AWAY:
state = STATE_ALARM_ARMED_AWAY
elif status == self._client.ARMED_STAY:
state = STATE_ALARM_ARMED_HOME
elif status == self._client.TRIGGERED:
state = STATE_ALARM_TRIGGERED
else:
state = None
self._state = state
def alarm_disarm(self, code=None):
"""Send disarm command."""
if self._validate_code(code):
self._client.disarm()
def alarm_arm_away(self, code=None):
"""Send arm away command."""
if self._validate_code(code):
self._client.arm_away()
def alarm_arm_home(self, code=None):
"""Send arm home command."""
if self._validate_code(code):
self._client.arm_stay()
def _validate_code(self, code):
"""Validate given code."""
check = self._code is None or code == self._code
if not check:
_LOGGER.warning("Wrong code entered")
return check
|
from scattertext.termranking.TermRanker import TermRanker
class AbsoluteFrequencyRanker(TermRanker):
'''Ranks terms by the number of times they occur in each category.
'''
def get_ranks(self, label_append=' freq'):
'''
Returns
-------
pd.DataFrame
'''
if self._use_non_text_features:
return self._term_doc_matrix.get_metadata_freq_df(label_append=label_append)
else:
return self._term_doc_matrix.get_term_freq_df(label_append=label_append)
|
import os
import shutil
import tempfile
import pytest
from jinja2 import ChainableUndefined
from jinja2 import DebugUndefined
from jinja2 import DictLoader
from jinja2 import Environment
from jinja2 import is_undefined
from jinja2 import make_logging_undefined
from jinja2 import meta
from jinja2 import StrictUndefined
from jinja2 import Template
from jinja2 import TemplatesNotFound
from jinja2 import Undefined
from jinja2 import UndefinedError
from jinja2.compiler import CodeGenerator
from jinja2.runtime import Context
from jinja2.utils import contextfunction
from jinja2.utils import Cycler
from jinja2.utils import environmentfunction
from jinja2.utils import evalcontextfunction
class TestExtendedAPI:
def test_item_and_attribute(self, env):
from jinja2.sandbox import SandboxedEnvironment
for env in Environment(), SandboxedEnvironment():
tmpl = env.from_string("{{ foo.items()|list }}")
assert tmpl.render(foo={"items": 42}) == "[('items', 42)]"
tmpl = env.from_string('{{ foo|attr("items")()|list }}')
assert tmpl.render(foo={"items": 42}) == "[('items', 42)]"
tmpl = env.from_string('{{ foo["items"] }}')
assert tmpl.render(foo={"items": 42}) == "42"
def test_finalize(self):
e = Environment(finalize=lambda v: "" if v is None else v)
t = e.from_string("{% for item in seq %}|{{ item }}{% endfor %}")
assert t.render(seq=(None, 1, "foo")) == "||1|foo"
def test_finalize_constant_expression(self):
e = Environment(finalize=lambda v: "" if v is None else v)
t = e.from_string("<{{ none }}>")
assert t.render() == "<>"
def test_no_finalize_template_data(self):
e = Environment(finalize=lambda v: type(v).__name__)
t = e.from_string("<{{ value }}>")
# If template data was finalized, it would print "strintstr".
assert t.render(value=123) == "<int>"
def test_context_finalize(self):
@contextfunction
def finalize(context, value):
return value * context["scale"]
e = Environment(finalize=finalize)
t = e.from_string("{{ value }}")
assert t.render(value=5, scale=3) == "15"
def test_eval_finalize(self):
@evalcontextfunction
def finalize(eval_ctx, value):
return str(eval_ctx.autoescape) + value
e = Environment(finalize=finalize, autoescape=True)
t = e.from_string("{{ value }}")
assert t.render(value="<script>") == "True<script>"
def test_env_autoescape(self):
@environmentfunction
def finalize(env, value):
return " ".join(
(env.variable_start_string, repr(value), env.variable_end_string)
)
e = Environment(finalize=finalize)
t = e.from_string("{{ value }}")
assert t.render(value="hello") == "{{ 'hello' }}"
def test_cycler(self, env):
items = 1, 2, 3
c = Cycler(*items)
for item in items + items:
assert c.current == item
assert next(c) == item
next(c)
assert c.current == 2
c.reset()
assert c.current == 1
def test_expressions(self, env):
expr = env.compile_expression("foo")
assert expr() is None
assert expr(foo=42) == 42
expr2 = env.compile_expression("foo", undefined_to_none=False)
assert is_undefined(expr2())
expr = env.compile_expression("42 + foo")
assert expr(foo=42) == 84
def test_template_passthrough(self, env):
t = Template("Content")
assert env.get_template(t) is t
assert env.select_template([t]) is t
assert env.get_or_select_template([t]) is t
assert env.get_or_select_template(t) is t
def test_get_template_undefined(self, env):
"""Passing Undefined to get/select_template raises an
UndefinedError or shows the undefined message in the list.
"""
env.loader = DictLoader({})
t = Undefined(name="no_name_1")
with pytest.raises(UndefinedError):
env.get_template(t)
with pytest.raises(UndefinedError):
env.get_or_select_template(t)
with pytest.raises(UndefinedError):
env.select_template(t)
with pytest.raises(TemplatesNotFound) as exc_info:
env.select_template([t, "no_name_2"])
exc_message = str(exc_info.value)
assert "'no_name_1' is undefined" in exc_message
assert "no_name_2" in exc_message
def test_autoescape_autoselect(self, env):
def select_autoescape(name):
if name is None or "." not in name:
return False
return name.endswith(".html")
env = Environment(
autoescape=select_autoescape,
loader=DictLoader({"test.txt": "{{ foo }}", "test.html": "{{ foo }}"}),
)
t = env.get_template("test.txt")
assert t.render(foo="<foo>") == "<foo>"
t = env.get_template("test.html")
assert t.render(foo="<foo>") == "<foo>"
t = env.from_string("{{ foo }}")
assert t.render(foo="<foo>") == "<foo>"
def test_sandbox_max_range(self, env):
from jinja2.sandbox import SandboxedEnvironment, MAX_RANGE
env = SandboxedEnvironment()
t = env.from_string("{% for item in range(total) %}{{ item }}{% endfor %}")
with pytest.raises(OverflowError):
t.render(total=MAX_RANGE + 1)
class TestMeta:
def test_find_undeclared_variables(self, env):
ast = env.parse("{% set foo = 42 %}{{ bar + foo }}")
x = meta.find_undeclared_variables(ast)
assert x == {"bar"}
ast = env.parse(
"{% set foo = 42 %}{{ bar + foo }}"
"{% macro meh(x) %}{{ x }}{% endmacro %}"
"{% for item in seq %}{{ muh(item) + meh(seq) }}"
"{% endfor %}"
)
x = meta.find_undeclared_variables(ast)
assert x == {"bar", "seq", "muh"}
ast = env.parse("{% for x in range(5) %}{{ x }}{% endfor %}{{ foo }}")
x = meta.find_undeclared_variables(ast)
assert x == {"foo"}
def test_find_refererenced_templates(self, env):
ast = env.parse('{% extends "layout.html" %}{% include helper %}')
i = meta.find_referenced_templates(ast)
assert next(i) == "layout.html"
assert next(i) is None
assert list(i) == []
ast = env.parse(
'{% extends "layout.html" %}'
'{% from "test.html" import a, b as c %}'
'{% import "meh.html" as meh %}'
'{% include "muh.html" %}'
)
i = meta.find_referenced_templates(ast)
assert list(i) == ["layout.html", "test.html", "meh.html", "muh.html"]
def test_find_included_templates(self, env):
ast = env.parse('{% include ["foo.html", "bar.html"] %}')
i = meta.find_referenced_templates(ast)
assert list(i) == ["foo.html", "bar.html"]
ast = env.parse('{% include ("foo.html", "bar.html") %}')
i = meta.find_referenced_templates(ast)
assert list(i) == ["foo.html", "bar.html"]
ast = env.parse('{% include ["foo.html", "bar.html", foo] %}')
i = meta.find_referenced_templates(ast)
assert list(i) == ["foo.html", "bar.html", None]
ast = env.parse('{% include ("foo.html", "bar.html", foo) %}')
i = meta.find_referenced_templates(ast)
assert list(i) == ["foo.html", "bar.html", None]
class TestStreaming:
def test_basic_streaming(self, env):
t = env.from_string(
"<ul>{% for item in seq %}<li>{{ loop.index }} - {{ item }}</li>"
"{%- endfor %}</ul>"
)
stream = t.stream(seq=list(range(3)))
assert next(stream) == "<ul>"
assert "".join(stream) == "<li>1 - 0</li><li>2 - 1</li><li>3 - 2</li></ul>"
def test_buffered_streaming(self, env):
tmpl = env.from_string(
"<ul>{% for item in seq %}<li>{{ loop.index }} - {{ item }}</li>"
"{%- endfor %}</ul>"
)
stream = tmpl.stream(seq=list(range(3)))
stream.enable_buffering(size=3)
assert next(stream) == "<ul><li>1"
assert next(stream) == " - 0</li>"
def test_streaming_behavior(self, env):
tmpl = env.from_string("")
stream = tmpl.stream()
assert not stream.buffered
stream.enable_buffering(20)
assert stream.buffered
stream.disable_buffering()
assert not stream.buffered
def test_dump_stream(self, env):
tmp = tempfile.mkdtemp()
try:
tmpl = env.from_string("\u2713")
stream = tmpl.stream()
stream.dump(os.path.join(tmp, "dump.txt"), "utf-8")
with open(os.path.join(tmp, "dump.txt"), "rb") as f:
assert f.read() == b"\xe2\x9c\x93"
finally:
shutil.rmtree(tmp)
class TestUndefined:
def test_stopiteration_is_undefined(self):
def test():
raise StopIteration()
t = Template("A{{ test() }}B")
assert t.render(test=test) == "AB"
t = Template("A{{ test().missingattribute }}B")
pytest.raises(UndefinedError, t.render, test=test)
def test_undefined_and_special_attributes(self):
with pytest.raises(AttributeError):
Undefined("Foo").__dict__
def test_undefined_attribute_error(self):
# Django's LazyObject turns the __class__ attribute into a
# property that resolves the wrapped function. If that wrapped
# function raises an AttributeError, printing the repr of the
# object in the undefined message would cause a RecursionError.
class Error:
@property
def __class__(self):
raise AttributeError()
u = Undefined(obj=Error(), name="hello")
with pytest.raises(UndefinedError):
getattr(u, "recursion", None)
def test_logging_undefined(self):
_messages = []
class DebugLogger:
def warning(self, msg, *args):
_messages.append("W:" + msg % args)
def error(self, msg, *args):
_messages.append("E:" + msg % args)
logging_undefined = make_logging_undefined(DebugLogger())
env = Environment(undefined=logging_undefined)
assert env.from_string("{{ missing }}").render() == ""
pytest.raises(UndefinedError, env.from_string("{{ missing.attribute }}").render)
assert env.from_string("{{ missing|list }}").render() == "[]"
assert env.from_string("{{ missing is not defined }}").render() == "True"
assert env.from_string("{{ foo.missing }}").render(foo=42) == ""
assert env.from_string("{{ not missing }}").render() == "True"
assert _messages == [
"W:Template variable warning: 'missing' is undefined",
"E:Template variable error: 'missing' is undefined",
"W:Template variable warning: 'missing' is undefined",
"W:Template variable warning: 'int object' has no attribute 'missing'",
"W:Template variable warning: 'missing' is undefined",
]
def test_default_undefined(self):
env = Environment(undefined=Undefined)
assert env.from_string("{{ missing }}").render() == ""
pytest.raises(UndefinedError, env.from_string("{{ missing.attribute }}").render)
assert env.from_string("{{ missing|list }}").render() == "[]"
assert env.from_string("{{ missing is not defined }}").render() == "True"
assert env.from_string("{{ foo.missing }}").render(foo=42) == ""
assert env.from_string("{{ not missing }}").render() == "True"
pytest.raises(UndefinedError, env.from_string("{{ missing - 1}}").render)
und1 = Undefined(name="x")
und2 = Undefined(name="y")
assert und1 == und2
assert und1 != 42
assert hash(und1) == hash(und2) == hash(Undefined())
with pytest.raises(AttributeError):
getattr(Undefined, "__slots__") # noqa: B009
def test_chainable_undefined(self):
env = Environment(undefined=ChainableUndefined)
# The following tests are copied from test_default_undefined
assert env.from_string("{{ missing }}").render() == ""
assert env.from_string("{{ missing|list }}").render() == "[]"
assert env.from_string("{{ missing is not defined }}").render() == "True"
assert env.from_string("{{ foo.missing }}").render(foo=42) == ""
assert env.from_string("{{ not missing }}").render() == "True"
pytest.raises(UndefinedError, env.from_string("{{ missing - 1}}").render)
with pytest.raises(AttributeError):
getattr(ChainableUndefined, "__slots__") # noqa: B009
# The following tests ensure subclass functionality works as expected
assert env.from_string('{{ missing.bar["baz"] }}').render() == ""
assert env.from_string('{{ foo.bar["baz"]._undefined_name }}').render() == "foo"
assert (
env.from_string('{{ foo.bar["baz"]._undefined_name }}').render(foo=42)
== "bar"
)
assert (
env.from_string('{{ foo.bar["baz"]._undefined_name }}').render(
foo={"bar": 42}
)
== "baz"
)
def test_debug_undefined(self):
env = Environment(undefined=DebugUndefined)
assert env.from_string("{{ missing }}").render() == "{{ missing }}"
pytest.raises(UndefinedError, env.from_string("{{ missing.attribute }}").render)
assert env.from_string("{{ missing|list }}").render() == "[]"
assert env.from_string("{{ missing is not defined }}").render() == "True"
assert (
env.from_string("{{ foo.missing }}").render(foo=42)
== "{{ no such element: int object['missing'] }}"
)
assert env.from_string("{{ not missing }}").render() == "True"
undefined_hint = "this is testing undefined hint of DebugUndefined"
assert (
str(DebugUndefined(hint=undefined_hint))
== f"{{{{ undefined value printed: {undefined_hint} }}}}"
)
with pytest.raises(AttributeError):
getattr(DebugUndefined, "__slots__") # noqa: B009
def test_strict_undefined(self):
env = Environment(undefined=StrictUndefined)
pytest.raises(UndefinedError, env.from_string("{{ missing }}").render)
pytest.raises(UndefinedError, env.from_string("{{ missing.attribute }}").render)
pytest.raises(UndefinedError, env.from_string("{{ missing|list }}").render)
assert env.from_string("{{ missing is not defined }}").render() == "True"
pytest.raises(
UndefinedError, env.from_string("{{ foo.missing }}").render, foo=42
)
pytest.raises(UndefinedError, env.from_string("{{ not missing }}").render)
assert (
env.from_string('{{ missing|default("default", true) }}').render()
== "default"
)
with pytest.raises(AttributeError):
getattr(StrictUndefined, "__slots__") # noqa: B009
assert env.from_string('{{ "foo" if false }}').render() == ""
def test_indexing_gives_undefined(self):
t = Template("{{ var[42].foo }}")
pytest.raises(UndefinedError, t.render, var=0)
def test_none_gives_proper_error(self):
with pytest.raises(UndefinedError, match="'None' has no attribute 'split'"):
Environment().getattr(None, "split")()
def test_object_repr(self):
with pytest.raises(
UndefinedError, match="'int object' has no attribute 'upper'"
):
Undefined(obj=42, name="upper")()
class TestLowLevel:
def test_custom_code_generator(self):
class CustomCodeGenerator(CodeGenerator):
def visit_Const(self, node, frame=None):
# This method is pure nonsense, but works fine for testing...
if node.value == "foo":
self.write(repr("bar"))
else:
super().visit_Const(node, frame)
class CustomEnvironment(Environment):
code_generator_class = CustomCodeGenerator
env = CustomEnvironment()
tmpl = env.from_string('{% set foo = "foo" %}{{ foo }}')
assert tmpl.render() == "bar"
def test_custom_context(self):
class CustomContext(Context):
def resolve_or_missing(self, key):
return "resolve-" + key
class CustomEnvironment(Environment):
context_class = CustomContext
env = CustomEnvironment()
tmpl = env.from_string("{{ foo }}")
assert tmpl.render() == "resolve-foo"
|
from __future__ import print_function
from scattertext import CorpusFromParsedDocuments, produce_scattertext_explorer
from scattertext import FeatsFromOnlyEmpath
from scattertext import SampleCorpora
def main():
convention_df = SampleCorpora.ConventionData2012.get_data()
feat_builder = FeatsFromOnlyEmpath()
corpus = CorpusFromParsedDocuments(convention_df,
category_col='party',
parsed_col='text',
feats_from_spacy_doc=feat_builder).build()
html = produce_scattertext_explorer(corpus,
category='democrat',
category_name='Democratic',
not_category_name='Republican',
width_in_pixels=1000,
metadata=convention_df['speaker'],
use_non_text_features=True,
use_full_doc=True,
topic_model_term_lists=feat_builder.get_top_model_term_lists())
open('./Convention-Visualization-Empath.html', 'wb').write(html.encode('utf-8'))
print('Open ./Convention-Visualization-Empath.html in Chrome or Firefox.')
if __name__ == '__main__':
main()
|
from datetime import datetime, timedelta
import logging
from typing import Any
import holidays
import voluptuous as vol
from homeassistant.components.binary_sensor import PLATFORM_SCHEMA, BinarySensorEntity
from homeassistant.const import CONF_NAME, WEEKDAYS
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
ALLOWED_DAYS = WEEKDAYS + ["holiday"]
CONF_COUNTRY = "country"
CONF_PROVINCE = "province"
CONF_WORKDAYS = "workdays"
CONF_EXCLUDES = "excludes"
CONF_OFFSET = "days_offset"
CONF_ADD_HOLIDAYS = "add_holidays"
# By default, Monday - Friday are workdays
DEFAULT_WORKDAYS = ["mon", "tue", "wed", "thu", "fri"]
# By default, public holidays, Saturdays and Sundays are excluded from workdays
DEFAULT_EXCLUDES = ["sat", "sun", "holiday"]
DEFAULT_NAME = "Workday Sensor"
DEFAULT_OFFSET = 0
def valid_country(value: Any) -> str:
"""Validate that the given country is supported."""
value = cv.string(value)
all_supported_countries = holidays.list_supported_countries()
try:
raw_value = value.encode("utf-8")
except UnicodeError as err:
raise vol.Invalid(
"The country name or the abbreviation must be a valid UTF-8 string."
) from err
if not raw_value:
raise vol.Invalid("Country name or the abbreviation must not be empty.")
if value not in all_supported_countries:
raise vol.Invalid("Country is not supported.")
return value
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_COUNTRY): valid_country,
vol.Optional(CONF_EXCLUDES, default=DEFAULT_EXCLUDES): vol.All(
cv.ensure_list, [vol.In(ALLOWED_DAYS)]
),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_OFFSET, default=DEFAULT_OFFSET): vol.Coerce(int),
vol.Optional(CONF_PROVINCE): cv.string,
vol.Optional(CONF_WORKDAYS, default=DEFAULT_WORKDAYS): vol.All(
cv.ensure_list, [vol.In(ALLOWED_DAYS)]
),
vol.Optional(CONF_ADD_HOLIDAYS): vol.All(cv.ensure_list, [cv.string]),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Workday sensor."""
add_holidays = config.get(CONF_ADD_HOLIDAYS)
country = config[CONF_COUNTRY]
days_offset = config[CONF_OFFSET]
excludes = config[CONF_EXCLUDES]
province = config.get(CONF_PROVINCE)
sensor_name = config[CONF_NAME]
workdays = config[CONF_WORKDAYS]
year = (get_date(datetime.today()) + timedelta(days=days_offset)).year
obj_holidays = getattr(holidays, country)(years=year)
if province:
# 'state' and 'prov' are not interchangeable, so need to make
# sure we use the right one
if hasattr(obj_holidays, "PROVINCES") and province in obj_holidays.PROVINCES:
obj_holidays = getattr(holidays, country)(prov=province, years=year)
elif hasattr(obj_holidays, "STATES") and province in obj_holidays.STATES:
obj_holidays = getattr(holidays, country)(state=province, years=year)
else:
_LOGGER.error(
"There is no province/state %s in country %s", province, country
)
return
# Add custom holidays
try:
obj_holidays.append(add_holidays)
except TypeError:
_LOGGER.debug("No custom holidays or invalid holidays")
_LOGGER.debug("Found the following holidays for your configuration:")
for date, name in sorted(obj_holidays.items()):
_LOGGER.debug("%s %s", date, name)
add_entities(
[IsWorkdaySensor(obj_holidays, workdays, excludes, days_offset, sensor_name)],
True,
)
def day_to_string(day):
"""Convert day index 0 - 7 to string."""
try:
return ALLOWED_DAYS[day]
except IndexError:
return None
def get_date(date):
"""Return date. Needed for testing."""
return date
class IsWorkdaySensor(BinarySensorEntity):
"""Implementation of a Workday sensor."""
def __init__(self, obj_holidays, workdays, excludes, days_offset, name):
"""Initialize the Workday sensor."""
self._name = name
self._obj_holidays = obj_holidays
self._workdays = workdays
self._excludes = excludes
self._days_offset = days_offset
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def is_on(self):
"""Return the state of the device."""
return self._state
def is_include(self, day, now):
"""Check if given day is in the includes list."""
if day in self._workdays:
return True
if "holiday" in self._workdays and now in self._obj_holidays:
return True
return False
def is_exclude(self, day, now):
"""Check if given day is in the excludes list."""
if day in self._excludes:
return True
if "holiday" in self._excludes and now in self._obj_holidays:
return True
return False
@property
def state_attributes(self):
"""Return the attributes of the entity."""
# return self._attributes
return {
CONF_WORKDAYS: self._workdays,
CONF_EXCLUDES: self._excludes,
CONF_OFFSET: self._days_offset,
}
async def async_update(self):
"""Get date and look whether it is a holiday."""
# Default is no workday
self._state = False
# Get ISO day of the week (1 = Monday, 7 = Sunday)
date = get_date(datetime.today()) + timedelta(days=self._days_offset)
day = date.isoweekday() - 1
day_of_week = day_to_string(day)
if self.is_include(day_of_week, date):
self._state = True
if self.is_exclude(day_of_week, date):
self._state = False
|
import os
from homeassistant import setup
from homeassistant.components.profiler import (
CONF_SECONDS,
SERVICE_MEMORY,
SERVICE_START,
)
from homeassistant.components.profiler.const import DOMAIN
from tests.async_mock import patch
from tests.common import MockConfigEntry
async def test_basic_usage(hass, tmpdir):
"""Test we can setup and the service is registered."""
test_dir = tmpdir.mkdir("profiles")
await setup.async_setup_component(hass, "persistent_notification", {})
entry = MockConfigEntry(domain=DOMAIN)
entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert hass.services.has_service(DOMAIN, SERVICE_START)
last_filename = None
def _mock_path(filename):
nonlocal last_filename
last_filename = f"{test_dir}/{filename}"
return last_filename
with patch("homeassistant.components.profiler.cProfile.Profile"), patch.object(
hass.config, "path", _mock_path
):
await hass.services.async_call(DOMAIN, SERVICE_START, {CONF_SECONDS: 0.000001})
await hass.async_block_till_done()
assert os.path.exists(last_filename)
assert await hass.config_entries.async_unload(entry.entry_id)
await hass.async_block_till_done()
async def test_memory_usage(hass, tmpdir):
"""Test we can setup and the service is registered."""
test_dir = tmpdir.mkdir("profiles")
await setup.async_setup_component(hass, "persistent_notification", {})
entry = MockConfigEntry(domain=DOMAIN)
entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert hass.services.has_service(DOMAIN, SERVICE_MEMORY)
last_filename = None
def _mock_path(filename):
nonlocal last_filename
last_filename = f"{test_dir}/{filename}"
return last_filename
with patch("homeassistant.components.profiler.hpy") as mock_hpy, patch.object(
hass.config, "path", _mock_path
):
await hass.services.async_call(DOMAIN, SERVICE_MEMORY, {CONF_SECONDS: 0.000001})
await hass.async_block_till_done()
mock_hpy.assert_called_once()
assert await hass.config_entries.async_unload(entry.entry_id)
await hass.async_block_till_done()
|
import unittest
import subprocess
class TestJupyterNbconvert(unittest.TestCase):
def test_nbconvert(self):
result = subprocess.run([
'jupyter',
'nbconvert',
'--to',
'notebook',
'--template',
'/opt/kaggle/nbconvert-extensions.tpl',
'--execute',
'--stdout',
'/input/tests/data/notebook.ipynb',
], stdout=subprocess.PIPE)
self.assertEqual(0, result.returncode)
self.assertTrue(b'999' in result.stdout)
|
import subprocess
import mock
import pytest
import yaml
from paasta_tools import firewall
from paasta_tools import firewall_update
from paasta_tools.utils import TimeoutError
def test_parse_args_daemon():
args = firewall_update.parse_args(
[
"-d",
"mysoadir",
"-v",
"--synapse-service-dir",
"myservicedir",
"daemon",
"-u",
"123",
]
)
assert args.mode == "daemon"
assert args.synapse_service_dir == "myservicedir"
assert args.soa_dir == "mysoadir"
assert args.update_secs == 123
assert args.verbose
def test_parse_args_default_daemon():
args = firewall_update.parse_args(["daemon"])
assert args.mode == "daemon"
assert args.synapse_service_dir == firewall.DEFAULT_SYNAPSE_SERVICE_DIR
assert args.soa_dir == firewall_update.DEFAULT_SOA_DIR
assert args.update_secs == firewall_update.DEFAULT_UPDATE_SECS
assert not args.verbose
def test_parse_args_cron():
args = firewall_update.parse_args(["-d", "mysoadir", "-v", "cron"])
assert args.mode == "cron"
assert args.soa_dir == "mysoadir"
assert args.verbose
def test_parse_args_default_cron():
args = firewall_update.parse_args(["cron"])
assert args.mode == "cron"
assert args.soa_dir == firewall_update.DEFAULT_SOA_DIR
assert not args.verbose
@mock.patch(
"paasta_tools.utils.load_system_paasta_config", autospec=True,
)
@mock.patch.object(
firewall_update,
"load_system_paasta_config",
autospec=True,
return_value=mock.Mock(**{"get_cluster.return_value": "mycluster"}),
)
@mock.patch.object(
firewall,
"services_running_here",
autospec=True,
return_value=(
("myservice", "hassecurityinbound", "02:42:a9:fe:00:0a", "1.1.1.1"),
("myservice", "hassecurityoutbound", "02:42:a9:fe:00:0a", "1.1.1.1"),
),
)
def test_smartstack_dependencies_of_running_firewalled_services(_, __, ___, tmpdir):
soa_dir = tmpdir.mkdir("yelpsoa")
myservice_dir = soa_dir.mkdir("myservice")
marathon_config = {
"hassecurityinbound": {
"dependencies_reference": "my_ref",
"security": {"inbound_firewall": "reject"},
},
"hassecurityoutbound": {
"dependencies_reference": "my_ref",
"security": {"outbound_firewall": "block"},
},
"nosecurity": {"dependencies_reference": "my_ref"},
}
myservice_dir.join("marathon-mycluster.yaml").write(yaml.safe_dump(marathon_config))
dependencies_config = {
"my_ref": [
{"well-known": "internet"},
{"smartstack": "mydependency.depinstance"},
{"smartstack": "another.one"},
]
}
myservice_dir.join("dependencies.yaml").write(yaml.safe_dump(dependencies_config))
result = firewall_update.smartstack_dependencies_of_running_firewalled_services(
soa_dir=str(soa_dir)
)
assert dict(result) == {
"mydependency.depinstance": {
("myservice", "hassecurityinbound"),
("myservice", "hassecurityoutbound"),
},
"another.one": {
("myservice", "hassecurityinbound"),
("myservice", "hassecurityoutbound"),
},
}
@mock.patch.object(
firewall_update,
"smartstack_dependencies_of_running_firewalled_services",
autospec=True,
)
@mock.patch.object(
firewall_update, "process_inotify_event", side_effect=StopIteration, autospec=True
)
def test_run_daemon(process_inotify_mock, smartstack_deps_mock, mock_daemon_args):
class kill_after_too_long:
def __init__(self):
self.count = 0
def __call__(self, *args, **kwargs):
self.count += 1
assert self.count <= 5, "Took too long to detect file change"
return {}
smartstack_deps_mock.side_effect = kill_after_too_long()
subprocess.Popen(
[
"bash",
"-c",
"sleep 0.2; echo > %s/mydep.depinstance.json"
% mock_daemon_args.synapse_service_dir,
]
)
with pytest.raises(StopIteration):
firewall_update.run_daemon(mock_daemon_args)
assert smartstack_deps_mock.call_count > 0
assert process_inotify_mock.call_args[0][0][3] == b"mydep.depinstance.json"
assert process_inotify_mock.call_args[0][1] == {}
@mock.patch.object(firewall, "firewall_flock", autospec=True)
@mock.patch.object(firewall, "general_update", autospec=True)
def test_run_cron(mock_general_update, mock_firewall_flock, mock_cron_args):
firewall_update.run_cron(mock_cron_args)
assert mock_general_update.called is True
assert mock_firewall_flock.return_value.__enter__.called is True
@mock.patch.object(
firewall, "firewall_flock", autospec=True, side_effect=TimeoutError("Oh noes")
)
@mock.patch.object(firewall, "general_update", autospec=True)
def test_run_cron_flock_error(mock_general_update, mock_firewall_flock, mock_cron_args):
with pytest.raises(TimeoutError):
firewall_update.run_cron(mock_cron_args)
@mock.patch.object(firewall_update, "log", autospec=True)
@mock.patch.object(firewall_update.firewall, "ensure_service_chains", autospec=True)
@mock.patch.object(firewall_update.firewall, "active_service_groups", autospec=True)
@mock.patch.object(firewall, "firewall_flock", autospec=True)
def test_process_inotify_event(
firewall_flock_mock,
active_service_groups_mock,
ensure_service_chains_mock,
log_mock,
):
active_service_groups_mock.return_value = {
firewall.ServiceGroup("myservice", "myinstance"): {"00:00:00:00:00:00"},
firewall.ServiceGroup("anotherservice", "instance"): {"11:11:11:11:11:11"},
firewall.ServiceGroup("thirdservice", "instance"): {"22:22:22:22:22:22"},
}
services_by_dependencies = {
"mydep.depinstance": {
("myservice", "myinstance"),
("anotherservice", "instance"),
}
}
soa_dir = mock.Mock()
synapse_service_dir = mock.Mock()
firewall_update.process_inotify_event(
(None, None, None, b"mydep.depinstance.json"),
services_by_dependencies,
soa_dir,
synapse_service_dir,
)
assert log_mock.debug.call_count == 3
log_mock.debug.assert_any_call("Updated ('myservice', 'myinstance')")
log_mock.debug.assert_any_call("Updated ('anotherservice', 'instance')")
assert ensure_service_chains_mock.mock_calls == [
mock.call(
{
firewall.ServiceGroup("myservice", "myinstance"): {"00:00:00:00:00:00"},
firewall.ServiceGroup("anotherservice", "instance"): {
"11:11:11:11:11:11"
},
},
soa_dir,
synapse_service_dir,
)
]
assert firewall_flock_mock.return_value.__enter__.called is True
# Verify that tmp writes do not apply
log_mock.reset_mock()
ensure_service_chains_mock.reset_mock()
firewall_update.process_inotify_event(
(None, None, None, b"mydep.depinstance.tmp"),
services_by_dependencies,
soa_dir,
synapse_service_dir,
)
assert log_mock.debug.call_count == 1
assert ensure_service_chains_mock.call_count == 0
@mock.patch.object(firewall_update, "log", autospec=True)
@mock.patch.object(firewall_update.firewall, "ensure_service_chains", autospec=True)
@mock.patch.object(firewall_update.firewall, "active_service_groups", autospec=True)
@mock.patch.object(
firewall, "firewall_flock", autospec=True, side_effect=TimeoutError("Oh noes")
)
def test_process_inotify_event_flock_error(
firewall_flock_mock,
active_service_groups_mock,
ensure_service_chains_mock,
log_mock,
):
active_service_groups_mock.return_value = {
firewall.ServiceGroup("myservice", "myinstance"): {"00:00:00:00:00:00"},
firewall.ServiceGroup("anotherservice", "instance"): {"11:11:11:11:11:11"},
firewall.ServiceGroup("thirdservice", "instance"): {"22:22:22:22:22:22"},
}
services_by_dependencies = {
"mydep.depinstance": {
("myservice", "myinstance"),
("anotherservice", "instance"),
}
}
soa_dir = mock.Mock()
synapse_service_dir = mock.Mock()
firewall_update.process_inotify_event(
(None, None, None, b"mydep.depinstance.json"),
services_by_dependencies,
soa_dir,
synapse_service_dir,
)
assert log_mock.debug.call_count == 1
assert log_mock.error.call_count == 1
@pytest.fixture
def mock_daemon_args(tmpdir):
return firewall_update.parse_args(
[
"-d",
str(tmpdir.mkdir("yelpsoa")),
"--synapse-service-dir",
str(tmpdir.mkdir("synapse")),
"daemon",
]
)
@pytest.fixture
def mock_cron_args(tmpdir):
return firewall_update.parse_args(
[
"-d",
str(tmpdir.mkdir("yelpsoa")),
"--synapse-service-dir",
str(tmpdir.mkdir("synapse")),
"cron",
]
)
|
from datetime import timedelta
import pytest
import homeassistant.components.automation as automation
from homeassistant.components.binary_sensor import DEVICE_CLASSES, DOMAIN
from homeassistant.components.binary_sensor.device_trigger import ENTITY_TRIGGERS
from homeassistant.const import CONF_PLATFORM, STATE_OFF, STATE_ON
from homeassistant.helpers import device_registry
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from tests.common import (
MockConfigEntry,
async_fire_time_changed,
async_get_device_automation_capabilities,
async_get_device_automations,
async_mock_service,
mock_device_registry,
mock_registry,
)
@pytest.fixture
def device_reg(hass):
"""Return an empty, loaded, registry."""
return mock_device_registry(hass)
@pytest.fixture
def entity_reg(hass):
"""Return an empty, loaded, registry."""
return mock_registry(hass)
@pytest.fixture
def calls(hass):
"""Track calls to a mock service."""
return async_mock_service(hass, "test", "automation")
async def test_get_triggers(hass, device_reg, entity_reg):
"""Test we get the expected triggers from a binary_sensor."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
for device_class in DEVICE_CLASSES:
entity_reg.async_get_or_create(
DOMAIN,
"test",
platform.ENTITIES[device_class].unique_id,
device_id=device_entry.id,
)
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
expected_triggers = [
{
"platform": "device",
"domain": DOMAIN,
"type": trigger["type"],
"device_id": device_entry.id,
"entity_id": platform.ENTITIES[device_class].entity_id,
}
for device_class in DEVICE_CLASSES
for trigger in ENTITY_TRIGGERS[device_class]
]
triggers = await async_get_device_automations(hass, "trigger", device_entry.id)
assert triggers == expected_triggers
async def test_get_trigger_capabilities(hass, device_reg, entity_reg):
"""Test we get the expected capabilities from a binary_sensor trigger."""
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(DOMAIN, "test", "5678", device_id=device_entry.id)
expected_capabilities = {
"extra_fields": [
{"name": "for", "optional": True, "type": "positive_time_period_dict"}
]
}
triggers = await async_get_device_automations(hass, "trigger", device_entry.id)
for trigger in triggers:
capabilities = await async_get_device_automation_capabilities(
hass, "trigger", trigger
)
assert capabilities == expected_capabilities
async def test_if_fires_on_state_change(hass, calls):
"""Test for on and off triggers firing."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
sensor1 = platform.ENTITIES["battery"]
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {
"platform": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": sensor1.entity_id,
"type": "bat_low",
},
"action": {
"service": "test.automation",
"data_template": {
"some": "bat_low {{ trigger.%s }}"
% "}} - {{ trigger.".join(
(
"platform",
"entity_id",
"from_state.state",
"to_state.state",
"for",
)
)
},
},
},
{
"trigger": {
"platform": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": sensor1.entity_id,
"type": "not_bat_low",
},
"action": {
"service": "test.automation",
"data_template": {
"some": "not_bat_low {{ trigger.%s }}"
% "}} - {{ trigger.".join(
(
"platform",
"entity_id",
"from_state.state",
"to_state.state",
"for",
)
)
},
},
},
]
},
)
await hass.async_block_till_done()
assert hass.states.get(sensor1.entity_id).state == STATE_ON
assert len(calls) == 0
hass.states.async_set(sensor1.entity_id, STATE_OFF)
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data["some"] == "not_bat_low device - {} - on - off - None".format(
sensor1.entity_id
)
hass.states.async_set(sensor1.entity_id, STATE_ON)
await hass.async_block_till_done()
assert len(calls) == 2
assert calls[1].data["some"] == "bat_low device - {} - off - on - None".format(
sensor1.entity_id
)
async def test_if_fires_on_state_change_with_for(hass, calls):
"""Test for triggers firing with delay."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
sensor1 = platform.ENTITIES["battery"]
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {
"platform": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": sensor1.entity_id,
"type": "turned_off",
"for": {"seconds": 5},
},
"action": {
"service": "test.automation",
"data_template": {
"some": "turn_off {{ trigger.%s }}"
% "}} - {{ trigger.".join(
(
"platform",
"entity_id",
"from_state.state",
"to_state.state",
"for",
)
)
},
},
}
]
},
)
await hass.async_block_till_done()
assert hass.states.get(sensor1.entity_id).state == STATE_ON
assert len(calls) == 0
hass.states.async_set(sensor1.entity_id, STATE_OFF)
await hass.async_block_till_done()
assert len(calls) == 0
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10))
await hass.async_block_till_done()
assert len(calls) == 1
await hass.async_block_till_done()
assert calls[0].data["some"] == "turn_off device - {} - on - off - 0:00:05".format(
sensor1.entity_id
)
|
import numpy as np
import xarray as xr
from . import requires_dask
class Reindex:
def setup(self):
data = np.random.RandomState(0).randn(1000, 100, 100)
self.ds = xr.Dataset(
{"temperature": (("time", "x", "y"), data)},
coords={"time": np.arange(1000), "x": np.arange(100), "y": np.arange(100)},
)
def time_1d_coarse(self):
self.ds.reindex(time=np.arange(0, 1000, 5)).load()
def time_1d_fine_all_found(self):
self.ds.reindex(time=np.arange(0, 1000, 0.5), method="nearest").load()
def time_1d_fine_some_missing(self):
self.ds.reindex(
time=np.arange(0, 1000, 0.5), method="nearest", tolerance=0.1
).load()
def time_2d_coarse(self):
self.ds.reindex(x=np.arange(0, 100, 2), y=np.arange(0, 100, 2)).load()
def time_2d_fine_all_found(self):
self.ds.reindex(
x=np.arange(0, 100, 0.5), y=np.arange(0, 100, 0.5), method="nearest"
).load()
def time_2d_fine_some_missing(self):
self.ds.reindex(
x=np.arange(0, 100, 0.5),
y=np.arange(0, 100, 0.5),
method="nearest",
tolerance=0.1,
).load()
class ReindexDask(Reindex):
def setup(self):
requires_dask()
super().setup()
self.ds = self.ds.chunk({"time": 100})
|
import logging
from mficlient.client import FailedToLogin, MFiClient
import requests
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_HOST,
CONF_PASSWORD,
CONF_PORT,
CONF_SSL,
CONF_USERNAME,
CONF_VERIFY_SSL,
STATE_OFF,
STATE_ON,
TEMP_CELSIUS,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
DEFAULT_SSL = True
DEFAULT_VERIFY_SSL = True
DIGITS = {"volts": 1, "amps": 1, "active_power": 0, "temperature": 1}
SENSOR_MODELS = [
"Ubiquiti mFi-THS",
"Ubiquiti mFi-CS",
"Ubiquiti mFi-DS",
"Outlet",
"Input Analog",
"Input Digital",
]
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_PORT): cv.port,
vol.Optional(CONF_SSL, default=DEFAULT_SSL): cv.boolean,
vol.Optional(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL): cv.boolean,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up mFi sensors."""
host = config.get(CONF_HOST)
username = config.get(CONF_USERNAME)
password = config.get(CONF_PASSWORD)
use_tls = config.get(CONF_SSL)
verify_tls = config.get(CONF_VERIFY_SSL)
default_port = 6443 if use_tls else 6080
port = int(config.get(CONF_PORT, default_port))
try:
client = MFiClient(
host, username, password, port=port, use_tls=use_tls, verify=verify_tls
)
except (FailedToLogin, requests.exceptions.ConnectionError) as ex:
_LOGGER.error("Unable to connect to mFi: %s", str(ex))
return False
add_entities(
MfiSensor(port, hass)
for device in client.get_devices()
for port in device.ports.values()
if port.model in SENSOR_MODELS
)
class MfiSensor(Entity):
"""Representation of a mFi sensor."""
def __init__(self, port, hass):
"""Initialize the sensor."""
self._port = port
self._hass = hass
@property
def name(self):
"""Return the name of th sensor."""
return self._port.label
@property
def state(self):
"""Return the state of the sensor."""
try:
tag = self._port.tag
except ValueError:
tag = None
if tag is None:
return STATE_OFF
if self._port.model == "Input Digital":
return STATE_ON if self._port.value > 0 else STATE_OFF
digits = DIGITS.get(self._port.tag, 0)
return round(self._port.value, digits)
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
try:
tag = self._port.tag
except ValueError:
return "State"
if tag == "temperature":
return TEMP_CELSIUS
if tag == "active_pwr":
return "Watts"
if self._port.model == "Input Digital":
return "State"
return tag
def update(self):
"""Get the latest data."""
self._port.refresh()
|
import asyncio
from datetime import timedelta
import logging
from advantage_air import ApiError, advantage_air
from homeassistant.const import CONF_IP_ADDRESS, CONF_PORT
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import ADVANTAGE_AIR_RETRY, DOMAIN
ADVANTAGE_AIR_SYNC_INTERVAL = 15
ADVANTAGE_AIR_PLATFORMS = ["climate", "cover", "binary_sensor", "sensor", "switch"]
_LOGGER = logging.getLogger(__name__)
async def async_setup(hass, config):
"""Set up Advantage Air integration."""
hass.data[DOMAIN] = {}
return True
async def async_setup_entry(hass, entry):
"""Set up Advantage Air config."""
ip_address = entry.data[CONF_IP_ADDRESS]
port = entry.data[CONF_PORT]
api = advantage_air(
ip_address,
port=port,
session=async_get_clientsession(hass),
retry=ADVANTAGE_AIR_RETRY,
)
async def async_get():
try:
return await api.async_get()
except ApiError as err:
raise UpdateFailed(err) from err
coordinator = DataUpdateCoordinator(
hass,
_LOGGER,
name="Advantage Air",
update_method=async_get,
update_interval=timedelta(seconds=ADVANTAGE_AIR_SYNC_INTERVAL),
)
async def async_change(change):
try:
if await api.async_change(change):
await coordinator.async_refresh()
except ApiError as err:
_LOGGER.warning(err)
await coordinator.async_refresh()
if not coordinator.data:
raise ConfigEntryNotReady
hass.data[DOMAIN][entry.entry_id] = {
"coordinator": coordinator,
"async_change": async_change,
}
for platform in ADVANTAGE_AIR_PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, platform)
)
return True
async def async_unload_entry(hass, entry):
"""Unload Advantage Air Config."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in ADVANTAGE_AIR_PLATFORMS
]
)
)
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
|
import numpy as np
import functools
from tensornetwork.network_components import contract, contract_between
from tensornetwork.backends import backend_factory
from typing import Any, List, Optional, Text, Type, Union, Dict, Sequence
from tensornetwork.matrixproductstates.base_mps import BaseMPS
from tensornetwork.ncon_interface import ncon
from tensornetwork.backends.abstract_backend import AbstractBackend
Tensor = Any
#TODO (mganahl): add jit
class InfiniteMPS(BaseMPS):
"""An MPS class for infinite systems.
MPS tensors are stored as a list.
`InfiniteMPS` has a central site, also called orthogonality center.
The position of this central site is stored in `InfiniteMPS.center_position`,
and it can be be shifted using the `InfiniteMPS.position` method.
`InfiniteMPS.position` uses QR and RQ methods to shift `center_position`.
`InfiniteMPS` can be initialized either from a `list` of tensors, or
by calling the classmethod `InfiniteMPS.random`.
"""
def __init__(self,
tensors: List[Tensor],
center_position: Optional[int] = None,
connector_matrix: Optional[Tensor] = None,
backend: Optional[Union[AbstractBackend, Text]] = None) -> None:
"""Initialize a InfiniteMPS.
Args:
tensors: A list of `Tensor` objects.
center_position: The initial position of the center site.
connector_matrix: A `Tensor` of rank 2 connecting
different unitcells. A value `None` is equivalent to an identity
`connector_matrix`.
backend: The name of the backend that should be used to perform
contractions. Available backends are currently 'numpy', 'tensorflow',
'pytorch', 'jax'
"""
super().__init__(
tensors=tensors,
center_position=center_position,
connector_matrix=connector_matrix,
backend=backend)
@classmethod
def random(
cls,
d: List[int],
D: List[int],
dtype: Type[np.number],
backend: Optional[Union[AbstractBackend, Text]] = None) -> "InfiniteMPS":
"""Initialize a random `InfiniteMPS`. The resulting state is normalized.
Its center-position is at 0.
Args:
d: A list of physical dimensions.
D: A list of bond dimensions.
dtype: A numpy dtype.
backend: An optional backend.
Returns:
`InfiniteMPS`
"""
#use numpy backend for tensor initialization
be = backend_factory.get_backend('numpy')
if len(D) != len(d) + 1:
raise ValueError('len(D) = {} is different from len(d) + 1= {}'.format(
len(D),
len(d) + 1))
if D[-1] != D[0]:
raise ValueError('D[0]={} != D[-1]={}.'.format(D[0], D[-1]))
tensors = [
be.randn((D[n], d[n], D[n + 1]), dtype=dtype) for n in range(len(d))
]
return cls(tensors=tensors, center_position=0, backend=backend)
def unit_cell_transfer_operator(self, direction: Union[Text, int],
matrix: Tensor) -> Tensor:
sites = range(len(self))
if direction in (-1, 'r', 'right'):
sites = reversed(sites)
for site in sites:
matrix = self.apply_transfer_operator(site, direction, matrix)
return matrix
def transfer_matrix_eigs(self,
direction: Union[Text, int],
initial_state: Optional[Tensor] = None,
precision: Optional[float] = 1E-10,
num_krylov_vecs: Optional[int] = 30,
maxiter: Optional[int] = None) -> Tensor:
"""Compute the dominant eigenvector of the MPS transfer matrix.
Ars:
direction:
* If `'1','l' or 'left'`: return the left dominant eigenvalue
and eigenvector
* If `'-1','r' or 'right'`: return the right dominant eigenvalue
and eigenvector
initial_state: An optional initial state.
num_krylov_vecs: Number of Krylov vectors to be used in `eigs`.
precision: The desired precision of the eigen values.
maxiter: The maximum number of iterations.
Returns:
`float` or `complex`: The dominant eigenvalue.
Tensor: The dominant eigenvector.
"""
D = self.bond_dimensions[0]
def mv(vector):
result = self.unit_cell_transfer_operator(
direction, self.backend.reshape(vector, (D, D)))
return self.backend.reshape(result, (D * D,))
if not initial_state:
initial_state = self.backend.randn((self.bond_dimensions[0]**2,),
dtype=self.dtype)
else:
initial_state = self.backend.reshape(initial_state,
(self.bond_dimensions[0]**2,))
#note: for real dtype eta and dens are real.
#but scipy.linalg.eigs returns complex dtypes in any case
#since we know that for an MPS transfer matrix the largest
#eigenvalue and corresponding eigenvector are real
# we cast them.
eta, dens = self.backend.eigs(
A=mv,
initial_state=initial_state,
num_krylov_vecs=num_krylov_vecs,
numeig=1,
tol=precision,
which='LR',
maxiter=maxiter,
dtype=self.dtype)
result = self.backend.reshape(
dens[0], (self.bond_dimensions[0], self.bond_dimensions[0]))
return eta[0], result
def right_envs(self, sites: Sequence[int]) -> Dict:
raise NotImplementedError()
def left_envs(self, sites: Sequence[int]) -> Dict:
raise NotImplementedError()
def save(self, path: str):
raise NotImplementedError()
# pylint: disable=arguments-differ
def canonicalize(self,
left_initial_state: Optional[Tensor] = None,
right_initial_state: Optional[Tensor] = None,
precision: Optional[float] = 1E-10,
truncation_threshold: Optional[float] = 1E-15,
D: Optional[int] = None,
num_krylov_vecs: Optional[int] = 50,
maxiter: Optional[int] = 1000,
pseudo_inverse_cutoff: Optional[float] = None) -> None:
"""Canonicalize an InfiniteMPS (i.e. bring it into Schmidt-canonical form).
Args:
left_initial_state: An initial guess for the left eigenvector of
the unit-cell mps transfer matrix
right_initial_state: An initial guess for the right eigenvector of
the unit-cell transfer matrix
precision: The desired precision of the dominant eigenvalues (passed
to InfiniteMPS.transfer_matrix_eigs)
truncation_threshold: Truncation threshold for Schmidt-values at the
boundaries of the mps.
D: The maximum number of Schmidt values to be kept at the boundaries
of the mps.
num_krylov_vecs: Number of Krylov vectors to diagonalize transfer_matrix
maxiter: Maximum number of iterations in `eigs`
pseudo_inverse_cutoff: A cutoff for taking the Moore-Penrose
pseudo-inverse of a matrix. Given the SVD of a matrix :math:`M=U S V`,
the inverse isd is computed as :math:`V^* S^{-1}_+ U^*`,
where :math:`S^{-1}_+` equals `S^{-1}` for all values in `S` which
are larger than `pseudo_inverse_cutoff`, and is 0 for all others.
Returns:
None
"""
if self.center_position is None:
self.center_position = 0
# bring center_position to 0
self.position(0)
# dtype of eta is the same as InfiniteMPS.dtype
# this is assured in the backend.
eta, l = self.transfer_matrix_eigs(
direction='left',
initial_state=left_initial_state,
precision=precision,
num_krylov_vecs=num_krylov_vecs,
maxiter=maxiter)
sqrteta = self.backend.sqrt(self.backend.abs(eta))
self.tensors[0] /= sqrteta
# TODO: would be nice to do the algebra directly on the nodes here
l /= self.backend.trace(l)
l = (l + self.backend.transpose(self.backend.conj(l), (1, 0))) / 2.0
# eigvals_left and u_left are both `Tensor` objects
eigvals_left, u_left = self.backend.eigh(l)
eigvals_left /= self.backend.norm(eigvals_left)
if pseudo_inverse_cutoff:
mask = eigvals_left <= pseudo_inverse_cutoff
inveigvals_left = 1.0 / eigvals_left
if pseudo_inverse_cutoff:
inveigvals_left = self.backend.index_update(inveigvals_left, mask, 0.0)
sqrtl = ncon(
[u_left, self.backend.diagflat(self.backend.sqrt(eigvals_left))],
[[-2, 1], [1, -1]],
backend=self.backend.name)
inv_sqrtl = ncon([
self.backend.diagflat(self.backend.sqrt(inveigvals_left)),
self.backend.conj(u_left)
], [[-2, 1], [-1, 1]],
backend=self.backend.name)
eta, r = self.transfer_matrix_eigs(
direction='right',
initial_state=right_initial_state,
precision=precision,
num_krylov_vecs=num_krylov_vecs,
maxiter=maxiter)
r /= self.backend.trace(r)
r = (r + self.backend.transpose(self.backend.conj(r), (1, 0))) / 2.0
# eigvals_right and u_right are both `Tensor` objects
eigvals_right, u_right = self.backend.eigh(r)
eigvals_right /= self.backend.norm(eigvals_right)
if pseudo_inverse_cutoff:
mask = eigvals_right <= pseudo_inverse_cutoff
inveigvals_right = 1.0 / eigvals_right
if pseudo_inverse_cutoff:
inveigvals_right = self.backend.index_update(inveigvals_right, mask, 0.0)
sqrtr = ncon(
[u_right, self.backend.diagflat(self.backend.sqrt(eigvals_right))],
[[-1, 1], [1, -2]],
backend=self.backend.name)
inv_sqrtr = ncon([
self.backend.diagflat(self.backend.sqrt(inveigvals_right)),
self.backend.conj(u_right)
], [[-1, 1], [-2, 1]],
backend=self.backend.name)
tmp = ncon([sqrtl, sqrtr], [[-1, 1], [1, -2]], backend=self.backend.name)
U, singvals, V, _ = self.backend.svd(
tmp,
pivot_axis=1,
max_singular_values=D,
max_truncation_error=truncation_threshold,
relative=True)
lam = self.backend.diagflat(singvals)
self.tensors[0] = ncon([lam, V, inv_sqrtr, self.tensors[0]],
[[-1, 1], [1, 2], [2, 3], [3, -2, -3]],
backend=self.backend.name)
# absorb connector * inv_sqrtl * U * lam into the right-most tensor
# Note that lam is absorbed here, which means that the state
# is in the parallel decomposition
# Note that we absorb connector_matrix here
self.tensors[-1] = ncon([self.get_tensor(len(self) - 1), inv_sqrtl, U, lam],
[[-1, -2, 1], [1, 2], [2, 3], [3, -3]],
backend=self.backend.name)
# now do a sweep of QR decompositions to bring the mps tensors into
# left canonical form (except the last one)
self.position(len(self) - 1)
lam_norm = self.backend.norm(singvals)
lam /= lam_norm
self.center_position = len(self) - 1
self.connector_matrix = self.backend.inv(lam)
return lam_norm
|
import gzip
import os
import shlex
import subprocess
from nikola.plugin_categories import TaskMultiplier
class GzipFiles(TaskMultiplier):
"""If appropiate, create tasks to create gzipped versions of files."""
name = "gzip"
is_default = True
def process(self, task, prefix):
"""Process tasks."""
if not self.site.config['GZIP_FILES']:
return []
if task.get('name') is None:
return []
gzip_task = {
'file_dep': [],
'targets': [],
'actions': [],
'basename': '{0}_gzip'.format(prefix),
'name': task.get('name').split(":", 1)[-1] + '.gz',
'clean': True,
}
targets = task.get('targets', [])
flag = False
for target in targets:
ext = os.path.splitext(target)[1]
if (ext.lower() in self.site.config['GZIP_EXTENSIONS'] and
target.startswith(self.site.config['OUTPUT_FOLDER'])):
flag = True
gzipped = target + '.gz'
gzip_task['file_dep'].append(target)
gzip_task['targets'].append(gzipped)
gzip_task['actions'].append((create_gzipped_copy, (target, gzipped, self.site.config['GZIP_COMMAND'])))
if not flag:
return []
return [gzip_task]
def create_gzipped_copy(in_path, out_path, command=None):
"""Create gzipped copy of in_path and save it as out_path."""
if command:
subprocess.check_call(shlex.split(command.format(filename=in_path)))
else:
with gzip.GzipFile(out_path, 'wb+') as outf:
with open(in_path, 'rb') as inf:
outf.write(inf.read())
|
from homeassistant.components.alexa import state_report
from . import DEFAULT_CONFIG, TEST_URL
async def test_report_state(hass, aioclient_mock):
"""Test proactive state reports."""
aioclient_mock.post(TEST_URL, text="", status=202)
hass.states.async_set(
"binary_sensor.test_contact",
"on",
{"friendly_name": "Test Contact Sensor", "device_class": "door"},
)
await state_report.async_enable_proactive_mode(hass, DEFAULT_CONFIG)
hass.states.async_set(
"binary_sensor.test_contact",
"off",
{"friendly_name": "Test Contact Sensor", "device_class": "door"},
)
# To trigger event listener
await hass.async_block_till_done()
assert len(aioclient_mock.mock_calls) == 1
call = aioclient_mock.mock_calls
call_json = call[0][2]
assert call_json["event"]["header"]["namespace"] == "Alexa"
assert call_json["event"]["header"]["name"] == "ChangeReport"
assert (
call_json["event"]["payload"]["change"]["properties"][0]["value"]
== "NOT_DETECTED"
)
assert call_json["event"]["endpoint"]["endpointId"] == "binary_sensor#test_contact"
async def test_report_state_instance(hass, aioclient_mock):
"""Test proactive state reports with instance."""
aioclient_mock.post(TEST_URL, text="", status=202)
hass.states.async_set(
"fan.test_fan",
"off",
{
"friendly_name": "Test fan",
"supported_features": 3,
"speed": "off",
"speed_list": ["off", "low", "high"],
"oscillating": False,
},
)
await state_report.async_enable_proactive_mode(hass, DEFAULT_CONFIG)
hass.states.async_set(
"fan.test_fan",
"on",
{
"friendly_name": "Test fan",
"supported_features": 3,
"speed": "high",
"speed_list": ["off", "low", "high"],
"oscillating": True,
},
)
# To trigger event listener
await hass.async_block_till_done()
assert len(aioclient_mock.mock_calls) == 1
call = aioclient_mock.mock_calls
call_json = call[0][2]
assert call_json["event"]["header"]["namespace"] == "Alexa"
assert call_json["event"]["header"]["name"] == "ChangeReport"
change_reports = call_json["event"]["payload"]["change"]["properties"]
for report in change_reports:
if report["name"] == "toggleState":
assert report["value"] == "ON"
assert report["instance"] == "fan.oscillating"
assert report["namespace"] == "Alexa.ToggleController"
assert call_json["event"]["endpoint"]["endpointId"] == "fan#test_fan"
async def test_send_add_or_update_message(hass, aioclient_mock):
"""Test sending an AddOrUpdateReport message."""
aioclient_mock.post(TEST_URL, text="")
hass.states.async_set(
"binary_sensor.test_contact",
"on",
{"friendly_name": "Test Contact Sensor", "device_class": "door"},
)
await state_report.async_send_add_or_update_message(
hass, DEFAULT_CONFIG, ["binary_sensor.test_contact", "zwave.bla"]
)
assert len(aioclient_mock.mock_calls) == 1
call = aioclient_mock.mock_calls
call_json = call[0][2]
assert call_json["event"]["header"]["namespace"] == "Alexa.Discovery"
assert call_json["event"]["header"]["name"] == "AddOrUpdateReport"
assert len(call_json["event"]["payload"]["endpoints"]) == 1
assert (
call_json["event"]["payload"]["endpoints"][0]["endpointId"]
== "binary_sensor#test_contact"
)
async def test_send_delete_message(hass, aioclient_mock):
"""Test sending an AddOrUpdateReport message."""
aioclient_mock.post(TEST_URL, json={"data": "is irrelevant"})
hass.states.async_set(
"binary_sensor.test_contact",
"on",
{"friendly_name": "Test Contact Sensor", "device_class": "door"},
)
await state_report.async_send_delete_message(
hass, DEFAULT_CONFIG, ["binary_sensor.test_contact", "zwave.bla"]
)
assert len(aioclient_mock.mock_calls) == 1
call = aioclient_mock.mock_calls
call_json = call[0][2]
assert call_json["event"]["header"]["namespace"] == "Alexa.Discovery"
assert call_json["event"]["header"]["name"] == "DeleteReport"
assert len(call_json["event"]["payload"]["endpoints"]) == 1
assert (
call_json["event"]["payload"]["endpoints"][0]["endpointId"]
== "binary_sensor#test_contact"
)
async def test_doorbell_event(hass, aioclient_mock):
"""Test doorbell press reports."""
aioclient_mock.post(TEST_URL, text="", status=202)
hass.states.async_set(
"binary_sensor.test_doorbell",
"off",
{"friendly_name": "Test Doorbell Sensor", "device_class": "occupancy"},
)
await state_report.async_enable_proactive_mode(hass, DEFAULT_CONFIG)
hass.states.async_set(
"binary_sensor.test_doorbell",
"on",
{"friendly_name": "Test Doorbell Sensor", "device_class": "occupancy"},
)
# To trigger event listener
await hass.async_block_till_done()
assert len(aioclient_mock.mock_calls) == 1
call = aioclient_mock.mock_calls
call_json = call[0][2]
assert call_json["event"]["header"]["namespace"] == "Alexa.DoorbellEventSource"
assert call_json["event"]["header"]["name"] == "DoorbellPress"
assert call_json["event"]["payload"]["cause"]["type"] == "PHYSICAL_INTERACTION"
assert call_json["event"]["endpoint"]["endpointId"] == "binary_sensor#test_doorbell"
|
from adafruit_mcp230xx.mcp23017 import MCP23017 # pylint: disable=import-error
import board # pylint: disable=import-error
import busio # pylint: disable=import-error
import digitalio # pylint: disable=import-error
import voluptuous as vol
from homeassistant.components.binary_sensor import PLATFORM_SCHEMA, BinarySensorEntity
from homeassistant.const import DEVICE_DEFAULT_NAME
import homeassistant.helpers.config_validation as cv
CONF_INVERT_LOGIC = "invert_logic"
CONF_I2C_ADDRESS = "i2c_address"
CONF_PINS = "pins"
CONF_PULL_MODE = "pull_mode"
MODE_UP = "UP"
MODE_DOWN = "DOWN"
DEFAULT_INVERT_LOGIC = False
DEFAULT_I2C_ADDRESS = 0x20
DEFAULT_PULL_MODE = MODE_UP
_SENSORS_SCHEMA = vol.Schema({cv.positive_int: cv.string})
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_PINS): _SENSORS_SCHEMA,
vol.Optional(CONF_INVERT_LOGIC, default=DEFAULT_INVERT_LOGIC): cv.boolean,
vol.Optional(CONF_PULL_MODE, default=DEFAULT_PULL_MODE): vol.All(
vol.Upper, vol.In([MODE_UP, MODE_DOWN])
),
vol.Optional(CONF_I2C_ADDRESS, default=DEFAULT_I2C_ADDRESS): vol.Coerce(int),
}
)
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the MCP23017 binary sensors."""
pull_mode = config[CONF_PULL_MODE]
invert_logic = config[CONF_INVERT_LOGIC]
i2c_address = config[CONF_I2C_ADDRESS]
i2c = busio.I2C(board.SCL, board.SDA)
mcp = MCP23017(i2c, address=i2c_address)
binary_sensors = []
pins = config[CONF_PINS]
for pin_num, pin_name in pins.items():
pin = mcp.get_pin(pin_num)
binary_sensors.append(
MCP23017BinarySensor(pin_name, pin, pull_mode, invert_logic)
)
add_devices(binary_sensors, True)
class MCP23017BinarySensor(BinarySensorEntity):
"""Represent a binary sensor that uses MCP23017."""
def __init__(self, name, pin, pull_mode, invert_logic):
"""Initialize the MCP23017 binary sensor."""
self._name = name or DEVICE_DEFAULT_NAME
self._pin = pin
self._pull_mode = pull_mode
self._invert_logic = invert_logic
self._state = None
self._pin.direction = digitalio.Direction.INPUT
self._pin.pull = digitalio.Pull.UP
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def is_on(self):
"""Return the state of the entity."""
return self._state != self._invert_logic
def update(self):
"""Update the GPIO state."""
self._state = self._pin.value
|
import asyncio
from datetime import timedelta
import errno
import logging
from wiffi import WiffiTcpServer
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_PORT, CONF_TIMEOUT
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import device_registry
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.event import async_track_time_interval
from homeassistant.util.dt import utcnow
from .const import (
CHECK_ENTITIES_SIGNAL,
CREATE_ENTITY_SIGNAL,
DEFAULT_TIMEOUT,
DOMAIN,
UPDATE_ENTITY_SIGNAL,
)
_LOGGER = logging.getLogger(__name__)
PLATFORMS = ["sensor", "binary_sensor"]
async def async_setup(hass: HomeAssistant, config: dict):
"""Set up the wiffi component. config contains data from configuration.yaml."""
return True
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry):
"""Set up wiffi from a config entry, config_entry contains data from config entry database."""
if not config_entry.update_listeners:
config_entry.add_update_listener(async_update_options)
# create api object
api = WiffiIntegrationApi(hass)
api.async_setup(config_entry)
# store api object
hass.data.setdefault(DOMAIN, {})[config_entry.entry_id] = api
try:
await api.server.start_server()
except OSError as exc:
if exc.errno != errno.EADDRINUSE:
_LOGGER.error("Start_server failed, errno: %d", exc.errno)
return False
_LOGGER.error("Port %s already in use", config_entry.data[CONF_PORT])
raise ConfigEntryNotReady from exc
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, component)
)
return True
async def async_update_options(hass: HomeAssistant, config_entry: ConfigEntry):
"""Update options."""
await hass.config_entries.async_reload(config_entry.entry_id)
async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry):
"""Unload a config entry."""
api: "WiffiIntegrationApi" = hass.data[DOMAIN][config_entry.entry_id]
await api.server.close_server()
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(config_entry, component)
for component in PLATFORMS
]
)
)
if unload_ok:
api = hass.data[DOMAIN].pop(config_entry.entry_id)
api.shutdown()
return unload_ok
def generate_unique_id(device, metric):
"""Generate a unique string for the entity."""
return f"{device.mac_address.replace(':', '')}-{metric.name}"
class WiffiIntegrationApi:
"""API object for wiffi handling. Stored in hass.data."""
def __init__(self, hass):
"""Initialize the instance."""
self._hass = hass
self._server = None
self._known_devices = {}
self._periodic_callback = None
def async_setup(self, config_entry):
"""Set up api instance."""
self._server = WiffiTcpServer(config_entry.data[CONF_PORT], self)
self._periodic_callback = async_track_time_interval(
self._hass, self._periodic_tick, timedelta(seconds=10)
)
def shutdown(self):
"""Shutdown wiffi api.
Remove listener for periodic callbacks.
"""
remove_listener = self._periodic_callback
if remove_listener is not None:
remove_listener()
async def __call__(self, device, metrics):
"""Process callback from TCP server if new data arrives from a device."""
if device.mac_address not in self._known_devices:
# add empty set for new device
self._known_devices[device.mac_address] = set()
for metric in metrics:
if metric.id not in self._known_devices[device.mac_address]:
self._known_devices[device.mac_address].add(metric.id)
async_dispatcher_send(self._hass, CREATE_ENTITY_SIGNAL, device, metric)
else:
async_dispatcher_send(
self._hass,
f"{UPDATE_ENTITY_SIGNAL}-{generate_unique_id(device, metric)}",
device,
metric,
)
@property
def server(self):
"""Return TCP server instance for start + close."""
return self._server
@callback
def _periodic_tick(self, now=None):
"""Check if any entity has timed out because it has not been updated."""
async_dispatcher_send(self._hass, CHECK_ENTITIES_SIGNAL)
class WiffiEntity(Entity):
"""Common functionality for all wiffi entities."""
def __init__(self, device, metric, options):
"""Initialize the base elements of a wiffi entity."""
self._id = generate_unique_id(device, metric)
self._device_info = {
"connections": {
(device_registry.CONNECTION_NETWORK_MAC, device.mac_address)
},
"identifiers": {(DOMAIN, device.mac_address)},
"manufacturer": "stall.biz",
"name": f"{device.moduletype} {device.mac_address}",
"model": device.moduletype,
"sw_version": device.sw_version,
}
self._name = metric.description
self._expiration_date = None
self._value = None
self._timeout = options.get(CONF_TIMEOUT, DEFAULT_TIMEOUT)
async def async_added_to_hass(self):
"""Entity has been added to hass."""
self.async_on_remove(
async_dispatcher_connect(
self.hass,
f"{UPDATE_ENTITY_SIGNAL}-{self._id}",
self._update_value_callback,
)
)
self.async_on_remove(
async_dispatcher_connect(
self.hass, CHECK_ENTITIES_SIGNAL, self._check_expiration_date
)
)
@property
def should_poll(self):
"""Disable polling because data driven ."""
return False
@property
def device_info(self):
"""Return wiffi device info which is shared between all entities of a device."""
return self._device_info
@property
def unique_id(self):
"""Return unique id for entity."""
return self._id
@property
def name(self):
"""Return entity name."""
return self._name
@property
def available(self):
"""Return true if value is valid."""
return self._value is not None
def reset_expiration_date(self):
"""Reset value expiration date.
Will be called by derived classes after a value update has been received.
"""
self._expiration_date = utcnow() + timedelta(minutes=self._timeout)
@callback
def _update_value_callback(self, device, metric):
"""Update the value of the entity."""
@callback
def _check_expiration_date(self):
"""Periodically check if entity value has been updated.
If there are no more updates from the wiffi device, the value will be
set to unavailable.
"""
if (
self._value is not None
and self._expiration_date is not None
and utcnow() > self._expiration_date
):
self._value = None
self.async_write_ha_state()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import logging
import os
from pyspark.sql import SparkSession
from pyspark.sql.utils import AnalysisException
def main():
parser = argparse.ArgumentParser()
parser.add_argument('root_dir')
parser.add_argument('tables', type=lambda csv: csv.split(','))
args = parser.parse_args()
spark = (SparkSession.builder
.appName('Setup Spark tables')
.enableHiveSupport()
.getOrCreate())
for table in args.tables:
logging.info('Creating table %s', table)
table_dir = os.path.join(args.root_dir, table)
# clean up previous table
spark.sql('DROP TABLE IF EXISTS ' + table)
# register new table
spark.catalog.createTable(table, table_dir, source='parquet')
try:
# This loads the partitions under the table if table is partitioned.
spark.sql('MSCK REPAIR TABLE ' + table)
except AnalysisException:
# The table was not partitioned, which was presumably expected
pass
# Compute column statistics. Spark persists them in the TBL_PARAMS table of
# the Hive Metastore. I do not believe this interoperates with Hive's own
# statistics. See
# https://jaceklaskowski.gitbooks.io/mastering-spark-sql/content/spark-sql-LogicalPlan-AnalyzeColumnCommand.html
columns = ','.join(spark.table(table).columns)
spark.sql(
'ANALYZE TABLE {} COMPUTE STATISTICS FOR COLUMNS {}'.format(
table, columns))
if __name__ == '__main__':
main()
|
import datetime
import sys
import threading
import time
import cherrypy
from cherrypy.lib import cptools, httputil
class Cache(object):
"""Base class for Cache implementations."""
def get(self):
"""Return the current variant if in the cache, else None."""
raise NotImplementedError
def put(self, obj, size):
"""Store the current variant in the cache."""
raise NotImplementedError
def delete(self):
"""Remove ALL cached variants of the current resource."""
raise NotImplementedError
def clear(self):
"""Reset the cache to its initial, empty state."""
raise NotImplementedError
# ------------------------------ Memory Cache ------------------------------- #
class AntiStampedeCache(dict):
"""A storage system for cached items which reduces stampede collisions."""
def wait(self, key, timeout=5, debug=False):
"""Return the cached value for the given key, or None.
If timeout is not None, and the value is already
being calculated by another thread, wait until the given timeout has
elapsed. If the value is available before the timeout expires, it is
returned. If not, None is returned, and a sentinel placed in the cache
to signal other threads to wait.
If timeout is None, no waiting is performed nor sentinels used.
"""
value = self.get(key)
if isinstance(value, threading.Event):
if timeout is None:
# Ignore the other thread and recalc it ourselves.
if debug:
cherrypy.log('No timeout', 'TOOLS.CACHING')
return None
# Wait until it's done or times out.
if debug:
cherrypy.log('Waiting up to %s seconds' %
timeout, 'TOOLS.CACHING')
value.wait(timeout)
if value.result is not None:
# The other thread finished its calculation. Use it.
if debug:
cherrypy.log('Result!', 'TOOLS.CACHING')
return value.result
# Timed out. Stick an Event in the slot so other threads wait
# on this one to finish calculating the value.
if debug:
cherrypy.log('Timed out', 'TOOLS.CACHING')
e = threading.Event()
e.result = None
dict.__setitem__(self, key, e)
return None
elif value is None:
# Stick an Event in the slot so other threads wait
# on this one to finish calculating the value.
if debug:
cherrypy.log('Timed out', 'TOOLS.CACHING')
e = threading.Event()
e.result = None
dict.__setitem__(self, key, e)
return value
def __setitem__(self, key, value):
"""Set the cached value for the given key."""
existing = self.get(key)
dict.__setitem__(self, key, value)
if isinstance(existing, threading.Event):
# Set Event.result so other threads waiting on it have
# immediate access without needing to poll the cache again.
existing.result = value
existing.set()
class MemoryCache(Cache):
"""An in-memory cache for varying response content.
Each key in self.store is a URI, and each value is an AntiStampedeCache.
The response for any given URI may vary based on the values of
"selecting request headers"; that is, those named in the Vary
response header. We assume the list of header names to be constant
for each URI throughout the lifetime of the application, and store
that list in ``self.store[uri].selecting_headers``.
The items contained in ``self.store[uri]`` have keys which are tuples of
request header values (in the same order as the names in its
selecting_headers), and values which are the actual responses.
"""
maxobjects = 1000
"""The maximum number of cached objects; defaults to 1000."""
maxobj_size = 100000
"""The maximum size of each cached object in bytes; defaults to 100 KB."""
maxsize = 10000000
"""The maximum size of the entire cache in bytes; defaults to 10 MB."""
delay = 600
"""Seconds until the cached content expires; defaults to 600 (10 minutes).
"""
antistampede_timeout = 5
"""Seconds to wait for other threads to release a cache lock."""
expire_freq = 0.1
"""Seconds to sleep between cache expiration sweeps."""
debug = False
def __init__(self):
self.clear()
# Run self.expire_cache in a separate daemon thread.
t = threading.Thread(target=self.expire_cache, name='expire_cache')
self.expiration_thread = t
t.daemon = True
t.start()
def clear(self):
"""Reset the cache to its initial, empty state."""
self.store = {}
self.expirations = {}
self.tot_puts = 0
self.tot_gets = 0
self.tot_hist = 0
self.tot_expires = 0
self.tot_non_modified = 0
self.cursize = 0
def expire_cache(self):
"""Continuously examine cached objects, expiring stale ones.
This function is designed to be run in its own daemon thread,
referenced at ``self.expiration_thread``.
"""
# It's possible that "time" will be set to None
# arbitrarily, so we check "while time" to avoid exceptions.
# See tickets #99 and #180 for more information.
while time:
now = time.time()
# Must make a copy of expirations so it doesn't change size
# during iteration
for expiration_time, objects in self.expirations.copy().items():
if expiration_time <= now:
for obj_size, uri, sel_header_values in objects:
try:
del self.store[uri][tuple(sel_header_values)]
self.tot_expires += 1
self.cursize -= obj_size
except KeyError:
# the key may have been deleted elsewhere
pass
del self.expirations[expiration_time]
time.sleep(self.expire_freq)
def get(self):
"""Return the current variant if in the cache, else None."""
request = cherrypy.serving.request
self.tot_gets += 1
uri = cherrypy.url(qs=request.query_string)
uricache = self.store.get(uri)
if uricache is None:
return None
header_values = [request.headers.get(h, '')
for h in uricache.selecting_headers]
variant = uricache.wait(key=tuple(sorted(header_values)),
timeout=self.antistampede_timeout,
debug=self.debug)
if variant is not None:
self.tot_hist += 1
return variant
def put(self, variant, size):
"""Store the current variant in the cache."""
request = cherrypy.serving.request
response = cherrypy.serving.response
uri = cherrypy.url(qs=request.query_string)
uricache = self.store.get(uri)
if uricache is None:
uricache = AntiStampedeCache()
uricache.selecting_headers = [
e.value for e in response.headers.elements('Vary')]
self.store[uri] = uricache
if len(self.store) < self.maxobjects:
total_size = self.cursize + size
# checks if there's space for the object
if (size < self.maxobj_size and total_size < self.maxsize):
# add to the expirations list
expiration_time = response.time + self.delay
bucket = self.expirations.setdefault(expiration_time, [])
bucket.append((size, uri, uricache.selecting_headers))
# add to the cache
header_values = [request.headers.get(h, '')
for h in uricache.selecting_headers]
uricache[tuple(sorted(header_values))] = variant
self.tot_puts += 1
self.cursize = total_size
def delete(self):
"""Remove ALL cached variants of the current resource."""
uri = cherrypy.url(qs=cherrypy.serving.request.query_string)
self.store.pop(uri, None)
def get(invalid_methods=('POST', 'PUT', 'DELETE'), debug=False, **kwargs):
"""Try to obtain cached output. If fresh enough, raise HTTPError(304).
If POST, PUT, or DELETE:
* invalidates (deletes) any cached response for this resource
* sets request.cached = False
* sets request.cacheable = False
else if a cached copy exists:
* sets request.cached = True
* sets request.cacheable = False
* sets response.headers to the cached values
* checks the cached Last-Modified response header against the
current If-(Un)Modified-Since request headers; raises 304
if necessary.
* sets response.status and response.body to the cached values
* returns True
otherwise:
* sets request.cached = False
* sets request.cacheable = True
* returns False
"""
request = cherrypy.serving.request
response = cherrypy.serving.response
if not hasattr(cherrypy, '_cache'):
# Make a process-wide Cache object.
cherrypy._cache = kwargs.pop('cache_class', MemoryCache)()
# Take all remaining kwargs and set them on the Cache object.
for k, v in kwargs.items():
setattr(cherrypy._cache, k, v)
cherrypy._cache.debug = debug
# POST, PUT, DELETE should invalidate (delete) the cached copy.
# See http://www.w3.org/Protocols/rfc2616/rfc2616-sec13.html#sec13.10.
if request.method in invalid_methods:
if debug:
cherrypy.log('request.method %r in invalid_methods %r' %
(request.method, invalid_methods), 'TOOLS.CACHING')
cherrypy._cache.delete()
request.cached = False
request.cacheable = False
return False
if 'no-cache' in [e.value for e in request.headers.elements('Pragma')]:
request.cached = False
request.cacheable = True
return False
cache_data = cherrypy._cache.get()
request.cached = bool(cache_data)
request.cacheable = not request.cached
if request.cached:
# Serve the cached copy.
max_age = cherrypy._cache.delay
for v in [e.value for e in request.headers.elements('Cache-Control')]:
atoms = v.split('=', 1)
directive = atoms.pop(0)
if directive == 'max-age':
if len(atoms) != 1 or not atoms[0].isdigit():
raise cherrypy.HTTPError(
400, 'Invalid Cache-Control header')
max_age = int(atoms[0])
break
elif directive == 'no-cache':
if debug:
cherrypy.log(
'Ignoring cache due to Cache-Control: no-cache',
'TOOLS.CACHING')
request.cached = False
request.cacheable = True
return False
if debug:
cherrypy.log('Reading response from cache', 'TOOLS.CACHING')
s, h, b, create_time = cache_data
age = int(response.time - create_time)
if (age > max_age):
if debug:
cherrypy.log('Ignoring cache due to age > %d' % max_age,
'TOOLS.CACHING')
request.cached = False
request.cacheable = True
return False
# Copy the response headers. See
# https://github.com/cherrypy/cherrypy/issues/721.
response.headers = rh = httputil.HeaderMap()
for k in h:
dict.__setitem__(rh, k, dict.__getitem__(h, k))
# Add the required Age header
response.headers['Age'] = str(age)
try:
# Note that validate_since depends on a Last-Modified header;
# this was put into the cached copy, and should have been
# resurrected just above (response.headers = cache_data[1]).
cptools.validate_since()
except cherrypy.HTTPRedirect:
x = sys.exc_info()[1]
if x.status == 304:
cherrypy._cache.tot_non_modified += 1
raise
# serve it & get out from the request
response.status = s
response.body = b
else:
if debug:
cherrypy.log('request is not cached', 'TOOLS.CACHING')
return request.cached
def tee_output():
"""Tee response output to cache storage. Internal."""
# Used by CachingTool by attaching to request.hooks
request = cherrypy.serving.request
if 'no-store' in request.headers.values('Cache-Control'):
return
def tee(body):
"""Tee response.body into a list."""
if ('no-cache' in response.headers.values('Pragma') or
'no-store' in response.headers.values('Cache-Control')):
for chunk in body:
yield chunk
return
output = []
for chunk in body:
output.append(chunk)
yield chunk
# Save the cache data, but only if the body isn't empty.
# e.g. a 304 Not Modified on a static file response will
# have an empty body.
# If the body is empty, delete the cache because it
# contains a stale Threading._Event object that will
# stall all consecutive requests until the _Event times
# out
body = b''.join(output)
if not body:
cherrypy._cache.delete()
else:
cherrypy._cache.put((response.status, response.headers or {},
body, response.time), len(body))
response = cherrypy.serving.response
response.body = tee(response.body)
def expires(secs=0, force=False, debug=False):
"""Tool for influencing cache mechanisms using the 'Expires' header.
secs
Must be either an int or a datetime.timedelta, and indicates the
number of seconds between response.time and when the response should
expire. The 'Expires' header will be set to response.time + secs.
If secs is zero, the 'Expires' header is set one year in the past, and
the following "cache prevention" headers are also set:
* Pragma: no-cache
* Cache-Control': no-cache, must-revalidate
force
If False, the following headers are checked:
* Etag
* Last-Modified
* Age
* Expires
If any are already present, none of the above response headers are set.
"""
response = cherrypy.serving.response
headers = response.headers
cacheable = False
if not force:
# some header names that indicate that the response can be cached
for indicator in ('Etag', 'Last-Modified', 'Age', 'Expires'):
if indicator in headers:
cacheable = True
break
if not cacheable and not force:
if debug:
cherrypy.log('request is not cacheable', 'TOOLS.EXPIRES')
else:
if debug:
cherrypy.log('request is cacheable', 'TOOLS.EXPIRES')
if isinstance(secs, datetime.timedelta):
secs = (86400 * secs.days) + secs.seconds
if secs == 0:
if force or ('Pragma' not in headers):
headers['Pragma'] = 'no-cache'
if cherrypy.serving.request.protocol >= (1, 1):
if force or 'Cache-Control' not in headers:
headers['Cache-Control'] = 'no-cache, must-revalidate'
# Set an explicit Expires date in the past.
expiry = httputil.HTTPDate(1169942400.0)
else:
expiry = httputil.HTTPDate(response.time + secs)
if force or 'Expires' not in headers:
headers['Expires'] = expiry
|
import abc
import numpy as np
class BaseMetric(abc.ABC):
"""Metric base class."""
ALIAS = 'base_metric'
@abc.abstractmethod
def __call__(self, y_true: np.array, y_pred: np.array) -> float:
"""
Call to compute the metric.
:param y_true: An array of groud truth labels.
:param y_pred: An array of predicted values.
:return: Evaluation of the metric.
"""
@abc.abstractmethod
def __repr__(self):
""":return: Formated string representation of the metric."""
def __eq__(self, other):
""":return: `True` if two metrics are equal, `False` otherwise."""
return (type(self) is type(other)) and (vars(self) == vars(other))
def __hash__(self):
""":return: Hashing value using the metric as `str`."""
return str(self).__hash__()
def sort_and_couple(labels: np.array, scores: np.array) -> np.array:
"""Zip the `labels` with `scores` into a single list."""
couple = list(zip(labels, scores))
return np.array(sorted(couple, key=lambda x: x[1], reverse=True))
|
import asyncio
from datetime import timedelta
import logging
from typing import Dict
from Plugwise_Smile.Smile import Smile
import async_timeout
import voluptuous as vol
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_SCAN_INTERVAL
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
UpdateFailed,
)
from .const import (
ALL_PLATFORMS,
COORDINATOR,
DEFAULT_PORT,
DEFAULT_SCAN_INTERVAL,
DOMAIN,
SENSOR_PLATFORMS,
UNDO_UPDATE_LISTENER,
)
CONFIG_SCHEMA = vol.Schema({DOMAIN: vol.Schema({})}, extra=vol.ALLOW_EXTRA)
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry_gw(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up Plugwise Smiles from a config entry."""
websession = async_get_clientsession(hass, verify_ssl=False)
api = Smile(
host=entry.data[CONF_HOST],
password=entry.data[CONF_PASSWORD],
port=entry.data.get(CONF_PORT, DEFAULT_PORT),
timeout=30,
websession=websession,
)
try:
connected = await api.connect()
if not connected:
_LOGGER.error("Unable to connect to Smile")
raise ConfigEntryNotReady
except Smile.InvalidAuthentication:
_LOGGER.error("Invalid Smile ID")
return False
except Smile.PlugwiseError as err:
_LOGGER.error("Error while communicating to device")
raise ConfigEntryNotReady from err
except asyncio.TimeoutError as err:
_LOGGER.error("Timeout while connecting to Smile")
raise ConfigEntryNotReady from err
update_interval = timedelta(
seconds=entry.options.get(
CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL[api.smile_type]
)
)
async def async_update_data():
"""Update data via API endpoint."""
try:
async with async_timeout.timeout(10):
await api.full_update_device()
return True
except Smile.XMLDataMissingError as err:
raise UpdateFailed("Smile update failed") from err
coordinator = DataUpdateCoordinator(
hass,
_LOGGER,
name="Smile",
update_method=async_update_data,
update_interval=update_interval,
)
await coordinator.async_refresh()
if not coordinator.last_update_success:
raise ConfigEntryNotReady
api.get_all_devices()
if entry.unique_id is None:
if api.smile_version[0] != "1.8.0":
hass.config_entries.async_update_entry(entry, unique_id=api.smile_hostname)
undo_listener = entry.add_update_listener(_update_listener)
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = {
"api": api,
COORDINATOR: coordinator,
UNDO_UPDATE_LISTENER: undo_listener,
}
device_registry = await dr.async_get_registry(hass)
device_registry.async_get_or_create(
config_entry_id=entry.entry_id,
identifiers={(DOMAIN, api.gateway_id)},
manufacturer="Plugwise",
name=entry.title,
model=f"Smile {api.smile_name}",
sw_version=api.smile_version[0],
)
single_master_thermostat = api.single_master_thermostat()
platforms = ALL_PLATFORMS
if single_master_thermostat is None:
platforms = SENSOR_PLATFORMS
for component in platforms:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
return True
async def _update_listener(hass: HomeAssistant, entry: ConfigEntry):
"""Handle options update."""
coordinator = hass.data[DOMAIN][entry.entry_id][COORDINATOR]
coordinator.update_interval = timedelta(
seconds=entry.options.get(CONF_SCAN_INTERVAL)
)
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in ALL_PLATFORMS
]
)
)
hass.data[DOMAIN][entry.entry_id][UNDO_UPDATE_LISTENER]()
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
class SmileGateway(CoordinatorEntity):
"""Represent Smile Gateway."""
def __init__(self, api, coordinator, name, dev_id):
"""Initialise the gateway."""
super().__init__(coordinator)
self._api = api
self._name = name
self._dev_id = dev_id
self._unique_id = None
self._model = None
self._entity_name = self._name
@property
def unique_id(self):
"""Return a unique ID."""
return self._unique_id
@property
def name(self):
"""Return the name of the entity, if any."""
return self._name
@property
def device_info(self) -> Dict[str, any]:
"""Return the device information."""
device_information = {
"identifiers": {(DOMAIN, self._dev_id)},
"name": self._entity_name,
"manufacturer": "Plugwise",
}
if self._model is not None:
device_information["model"] = self._model.replace("_", " ").title()
if self._dev_id != self._api.gateway_id:
device_information["via_device"] = (DOMAIN, self._api.gateway_id)
return device_information
async def async_added_to_hass(self):
"""Subscribe to updates."""
self._async_process_data()
self.async_on_remove(
self.coordinator.async_add_listener(self._async_process_data)
)
@callback
def _async_process_data(self):
"""Interpret and process API data."""
raise NotImplementedError
|
from datetime import timedelta
import logging
from nx584 import client
import requests
import voluptuous as vol
import homeassistant.components.alarm_control_panel as alarm
from homeassistant.components.alarm_control_panel import PLATFORM_SCHEMA
from homeassistant.components.alarm_control_panel.const import (
SUPPORT_ALARM_ARM_AWAY,
SUPPORT_ALARM_ARM_HOME,
)
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PORT,
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_DISARMED,
STATE_ALARM_TRIGGERED,
)
from homeassistant.exceptions import PlatformNotReady
from homeassistant.helpers import config_validation as cv, entity_platform
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(seconds=10)
DEFAULT_HOST = "localhost"
DEFAULT_NAME = "NX584"
DEFAULT_PORT = 5007
SERVICE_BYPASS_ZONE = "bypass_zone"
SERVICE_UNBYPASS_ZONE = "unbypass_zone"
ATTR_ZONE = "zone"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the NX584 platform."""
name = config.get(CONF_NAME)
host = config.get(CONF_HOST)
port = config.get(CONF_PORT)
url = f"http://{host}:{port}"
try:
alarm_client = client.Client(url)
await hass.async_add_executor_job(alarm_client.list_zones)
except requests.exceptions.ConnectionError as ex:
_LOGGER.error(
"Unable to connect to %(host)s: %(reason)s",
{"host": url, "reason": ex},
)
raise PlatformNotReady from ex
entity = NX584Alarm(name, alarm_client, url)
async_add_entities([entity])
platform = entity_platform.current_platform.get()
platform.async_register_entity_service(
SERVICE_BYPASS_ZONE,
{vol.Required(ATTR_ZONE): cv.positive_int},
"alarm_bypass",
)
platform.async_register_entity_service(
SERVICE_UNBYPASS_ZONE,
{vol.Required(ATTR_ZONE): cv.positive_int},
"alarm_unbypass",
)
class NX584Alarm(alarm.AlarmControlPanelEntity):
"""Representation of a NX584-based alarm panel."""
def __init__(self, name, alarm_client, url):
"""Init the nx584 alarm panel."""
self._name = name
self._state = None
self._alarm = alarm_client
self._url = url
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def code_format(self):
"""Return one or more digits/characters."""
return alarm.FORMAT_NUMBER
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def supported_features(self) -> int:
"""Return the list of supported features."""
return SUPPORT_ALARM_ARM_HOME | SUPPORT_ALARM_ARM_AWAY
def update(self):
"""Process new events from panel."""
try:
part = self._alarm.list_partitions()[0]
zones = self._alarm.list_zones()
except requests.exceptions.ConnectionError as ex:
_LOGGER.error(
"Unable to connect to %(host)s: %(reason)s",
{"host": self._url, "reason": ex},
)
self._state = None
zones = []
except IndexError:
_LOGGER.error("NX584 reports no partitions")
self._state = None
zones = []
bypassed = False
for zone in zones:
if zone["bypassed"]:
_LOGGER.debug(
"Zone %(zone)s is bypassed, assuming HOME",
{"zone": zone["number"]},
)
bypassed = True
break
if not part["armed"]:
self._state = STATE_ALARM_DISARMED
elif bypassed:
self._state = STATE_ALARM_ARMED_HOME
else:
self._state = STATE_ALARM_ARMED_AWAY
for flag in part["condition_flags"]:
if flag == "Siren on":
self._state = STATE_ALARM_TRIGGERED
def alarm_disarm(self, code=None):
"""Send disarm command."""
self._alarm.disarm(code)
def alarm_arm_home(self, code=None):
"""Send arm home command."""
self._alarm.arm("stay")
def alarm_arm_away(self, code=None):
"""Send arm away command."""
self._alarm.arm("exit")
def alarm_bypass(self, zone):
"""Send bypass command."""
self._alarm.set_bypass(zone, True)
def alarm_unbypass(self, zone):
"""Send bypass command."""
self._alarm.set_bypass(zone, False)
|
import typing
import keras
import matchzoo
from matchzoo.engine.base_model import BaseModel
from matchzoo.engine.param import Param
from matchzoo.engine.param_table import ParamTable
from matchzoo.engine import hyper_spaces
class MatchPyramid(BaseModel):
"""
MatchPyramid Model.
Examples:
>>> model = MatchPyramid()
>>> model.params['embedding_output_dim'] = 300
>>> model.params['num_blocks'] = 2
>>> model.params['kernel_count'] = [16, 32]
>>> model.params['kernel_size'] = [[3, 3], [3, 3]]
>>> model.params['dpool_size'] = [3, 10]
>>> model.guess_and_fill_missing_params(verbose=0)
>>> model.build()
"""
@classmethod
def get_default_params(cls) -> ParamTable:
""":return: model default parameters."""
params = super().get_default_params(with_embedding=True)
params.add(Param(name='num_blocks', value=1,
desc="Number of convolution blocks."))
params.add(Param(name='kernel_count', value=[32],
desc="The kernel count of the 2D convolution "
"of each block."))
params.add(Param(name='kernel_size', value=[[3, 3]],
desc="The kernel size of the 2D convolution "
"of each block."))
params.add(Param(name='activation', value='relu',
desc="The activation function."))
params.add(Param(name='dpool_size', value=[3, 10],
desc="The max-pooling size of each block."))
params.add(Param(
name='padding', value='same',
desc="The padding mode in the convolution layer."
))
params.add(Param(
name='dropout_rate', value=0.0,
hyper_space=hyper_spaces.quniform(low=0.0, high=0.8,
q=0.01),
desc="The dropout rate."
))
return params
def build(self):
"""
Build model structure.
MatchPyramid text matching as image recognition.
"""
input_left, input_right = self._make_inputs()
input_dpool_index = keras.layers.Input(
name='dpool_index',
shape=[self._params['input_shapes'][0][0],
self._params['input_shapes'][1][0],
2],
dtype='int32')
embedding = self._make_embedding_layer()
embed_left = embedding(input_left)
embed_right = embedding(input_right)
# Interaction
matching_layer = matchzoo.layers.MatchingLayer(matching_type='dot')
embed_cross = matching_layer([embed_left, embed_right])
for i in range(self._params['num_blocks']):
embed_cross = self._conv_block(
embed_cross,
self._params['kernel_count'][i],
self._params['kernel_size'][i],
self._params['padding'],
self._params['activation']
)
# Dynamic Pooling
dpool_layer = matchzoo.layers.DynamicPoolingLayer(
*self._params['dpool_size'])
embed_pool = dpool_layer([embed_cross, input_dpool_index])
embed_flat = keras.layers.Flatten()(embed_pool)
x = keras.layers.Dropout(rate=self._params['dropout_rate'])(embed_flat)
inputs = [input_left, input_right, input_dpool_index]
x_out = self._make_output_layer()(x)
self._backend = keras.Model(inputs=inputs, outputs=x_out)
@classmethod
def _conv_block(
cls, x,
kernel_count: int,
kernel_size: int,
padding: str,
activation: str
) -> typing.Any:
output = keras.layers.Conv2D(kernel_count,
kernel_size,
padding=padding,
activation=activation)(x)
return output
|
import json
import dbus as _dbus
from openrazer.client.fx import RazerFX as _RazerFX
from xml.etree import ElementTree as _ET
from openrazer.client.macro import RazerMacro as _RazerMacro
class RazerDevice(object):
"""
Raw razer base device
"""
_FX = _RazerFX
_MACRO_CLASS = _RazerMacro
def __init__(self, serial, vid_pid=None, daemon_dbus=None):
# Load up the DBus
if daemon_dbus is None:
session_bus = _dbus.SessionBus()
daemon_dbus = session_bus.get_object("org.razer", "/org/razer/device/{0}".format(serial))
self._dbus = daemon_dbus
self._available_features = self._get_available_features()
self._dbus_interfaces = {
'device': _dbus.Interface(self._dbus, "razer.device.misc"),
'brightness': _dbus.Interface(self._dbus, "razer.device.lighting.brightness")
}
self._name = str(self._dbus_interfaces['device'].getDeviceName())
self._type = str(self._dbus_interfaces['device'].getDeviceType())
self._fw = str(self._dbus_interfaces['device'].getFirmware())
self._drv_version = str(self._dbus_interfaces['device'].getDriverVersion())
self._has_dedicated_macro = None
self._device_image = None
# Deprecated API, but kept for backwards compatibility
self._urls = None
if vid_pid is None:
self._vid, self._pid = self._dbus_interfaces['device'].getVidPid()
else:
self._vid, self._pid = vid_pid
self._serial = serial
self._capabilities = {
'name': True,
'type': True,
'firmware_version': True,
'serial': True,
'brightness': self._has_feature('razer.device.lighting.brightness'),
'macro_logic': self._has_feature('razer.device.macro'),
'keyboard_layout': self._has_feature('razer.device.misc', 'getKeyboardLayout'),
# Default device is a chroma so lighting capabilities
'lighting': self._has_feature('razer.device.lighting.chroma'),
'lighting_breath_single': self._has_feature('razer.device.lighting.chroma', 'setBreathSingle'),
'lighting_breath_dual': self._has_feature('razer.device.lighting.chroma', 'setBreathDual'),
'lighting_breath_triple': self._has_feature('razer.device.lighting.chroma', 'setBreathTriple'),
'lighting_breath_random': self._has_feature('razer.device.lighting.chroma', 'setBreathRandom'),
'lighting_wave': self._has_feature('razer.device.lighting.chroma', 'setWave'),
'lighting_reactive': self._has_feature('razer.device.lighting.chroma', 'setReactive'),
'lighting_none': self._has_feature('razer.device.lighting.chroma', 'setNone'),
'lighting_spectrum': self._has_feature('razer.device.lighting.chroma', 'setSpectrum'),
'lighting_static': self._has_feature('razer.device.lighting.chroma', 'setStatic'),
'lighting_starlight_single': self._has_feature('razer.device.lighting.chroma', 'setStarlightSingle'),
'lighting_starlight_dual': self._has_feature('razer.device.lighting.chroma', 'setStarlightDual'),
'lighting_starlight_random': self._has_feature('razer.device.lighting.chroma', 'setStarlightRandom'),
'lighting_ripple': self._has_feature('razer.device.lighting.custom', 'setRipple'), # Thinking of extending custom to do more hence the key check
'lighting_ripple_random': self._has_feature('razer.device.lighting.custom', 'setRippleRandomColour'),
'lighting_pulsate': self._has_feature('razer.device.lighting.bw2013', 'setPulsate'),
# Get if the device has an LED Matrix, == True as its a DBus boolean otherwise, so for consistency sake we coerce it into a native bool
'lighting_led_matrix': self._dbus_interfaces['device'].hasMatrix() == True,
'lighting_led_single': self._has_feature('razer.device.lighting.chroma', 'setKey'),
# Mouse lighting attrs
'lighting_logo': self._has_feature('razer.device.lighting.logo'),
'lighting_logo_active': self._has_feature('razer.device.lighting.logo', 'setLogoActive'),
'lighting_logo_blinking': self._has_feature('razer.device.lighting.logo', 'setLogoBlinking'),
'lighting_logo_brightness': self._has_feature('razer.device.lighting.logo', 'setLogoBrightness'),
'lighting_logo_pulsate': self._has_feature('razer.device.lighting.logo', 'setLogoPulsate'),
'lighting_logo_spectrum': self._has_feature('razer.device.lighting.logo', 'setLogoSpectrum'),
'lighting_logo_static': self._has_feature('razer.device.lighting.logo', 'setLogoStatic'),
'lighting_logo_none': self._has_feature('razer.device.lighting.logo', 'setLogoNone'),
'lighting_logo_reactive': self._has_feature('razer.device.lighting.logo', 'setLogoReactive'),
'lighting_logo_wave': self._has_feature('razer.device.lighting.logo', 'setLogoWave'),
'lighting_logo_breath_single': self._has_feature('razer.device.lighting.logo', 'setLogoBreathSingle'),
'lighting_logo_breath_dual': self._has_feature('razer.device.lighting.logo', 'setLogoBreathDual'),
'lighting_logo_breath_random': self._has_feature('razer.device.lighting.logo', 'setLogoBreathRandom'),
'lighting_scroll': self._has_feature('razer.device.lighting.scroll'),
'lighting_scroll_active': self._has_feature('razer.device.lighting.scroll', 'setScrollActive'),
'lighting_scroll_blinking': self._has_feature('razer.device.lighting.scroll', 'setScrollBlinking'),
'lighting_scroll_brightness': self._has_feature('razer.device.lighting.scroll', 'setScrollBrightness'),
'lighting_scroll_pulsate': self._has_feature('razer.device.lighting.scroll', 'setScrollPulsate'),
'lighting_scroll_spectrum': self._has_feature('razer.device.lighting.scroll', 'setScrollSpectrum'),
'lighting_scroll_static': self._has_feature('razer.device.lighting.scroll', 'setScrollStatic'),
'lighting_scroll_none': self._has_feature('razer.device.lighting.scroll', 'setScrollNone'),
'lighting_scroll_reactive': self._has_feature('razer.device.lighting.scroll', 'setScrollReactive'),
'lighting_scroll_wave': self._has_feature('razer.device.lighting.scroll', 'setScrollWave'),
'lighting_scroll_breath_single': self._has_feature('razer.device.lighting.scroll', 'setScrollBreathSingle'),
'lighting_scroll_breath_dual': self._has_feature('razer.device.lighting.scroll', 'setScrollBreathDual'),
'lighting_scroll_breath_random': self._has_feature('razer.device.lighting.scroll', 'setScrollBreathRandom'),
'lighting_left': self._has_feature('razer.device.lighting.left'),
'lighting_left_active': self._has_feature('razer.device.lighting.left', 'setLeftActive'),
'lighting_left_brightness': self._has_feature('razer.device.lighting.left', 'setLeftBrightness'),
'lighting_left_spectrum': self._has_feature('razer.device.lighting.left', 'setLeftSpectrum'),
'lighting_left_static': self._has_feature('razer.device.lighting.left', 'setLeftStatic'),
'lighting_left_none': self._has_feature('razer.device.lighting.left', 'setLeftNone'),
'lighting_left_reactive': self._has_feature('razer.device.lighting.left', 'setLeftReactive'),
'lighting_left_wave': self._has_feature('razer.device.lighting.left', 'setLeftWave'),
'lighting_left_breath_single': self._has_feature('razer.device.lighting.left', 'setLeftBreathSingle'),
'lighting_left_breath_dual': self._has_feature('razer.device.lighting.left', 'setLeftBreathDual'),
'lighting_left_breath_random': self._has_feature('razer.device.lighting.left', 'setLeftBreathRandom'),
'lighting_right': self._has_feature('razer.device.lighting.right'),
'lighting_right_active': self._has_feature('razer.device.lighting.right', 'setRightActive'),
'lighting_right_brightness': self._has_feature('razer.device.lighting.right', 'setRightBrightness'),
'lighting_right_spectrum': self._has_feature('razer.device.lighting.right', 'setRightSpectrum'),
'lighting_right_static': self._has_feature('razer.device.lighting.right', 'setRightStatic'),
'lighting_right_none': self._has_feature('razer.device.lighting.right', 'setRightNone'),
'lighting_right_reactive': self._has_feature('razer.device.lighting.right', 'setRightReactive'),
'lighting_right_wave': self._has_feature('razer.device.lighting.right', 'setRightWave'),
'lighting_right_breath_single': self._has_feature('razer.device.lighting.right', 'setRightBreathSingle'),
'lighting_right_breath_dual': self._has_feature('razer.device.lighting.right', 'setRightBreathDual'),
'lighting_right_breath_random': self._has_feature('razer.device.lighting.right', 'setRightBreathRandom'),
'lighting_backlight': self._has_feature('razer.device.lighting.backlight'),
'lighting_backlight_active': self._has_feature('razer.device.lighting.backlight', 'setBacklightActive'),
'lighting_profile_led_red': self._has_feature('razer.device.lighting.profile_led', 'setRedLED'),
'lighting_profile_led_green': self._has_feature('razer.device.lighting.profile_led', 'setGreenLED'),
'lighting_profile_led_blue': self._has_feature('razer.device.lighting.profile_led', 'setBlueLED'),
}
# Nasty hack to convert dbus.Int32 into native
self._matrix_dimensions = tuple([int(dim) for dim in self._dbus_interfaces['device'].getMatrixDimensions()])
if self.has('keyboard_layout'):
self._kbd_layout = str(self._dbus_interfaces['device'].getKeyboardLayout())
else:
self._kbd_layout = None
# Setup FX
if self._FX is None:
self.fx = None
else:
self.fx = self._FX(serial, capabilities=self._capabilities, daemon_dbus=daemon_dbus, matrix_dims=self._matrix_dimensions)
# Setup Macro
if self.has('macro_logic'):
if self._MACRO_CLASS is not None:
self.macro = self._MACRO_CLASS(serial, self.name, daemon_dbus=daemon_dbus, capabilities=self._capabilities)
else:
self._capabilities['macro_logic'] = False
self.macro = None
else:
self.macro = None
def _get_available_features(self):
introspect_interface = _dbus.Interface(self._dbus, 'org.freedesktop.DBus.Introspectable')
xml_spec = introspect_interface.Introspect()
root = _ET.fromstring(xml_spec)
interfaces = {}
for child in root:
if child.tag != 'interface' or child.attrib.get('name') == 'org.freedesktop.DBus.Introspectable':
continue
current_interface = child.attrib['name']
current_interface_methods = []
for method in child:
if method.tag != 'method':
continue
current_interface_methods.append(method.attrib.get('name'))
interfaces[current_interface] = current_interface_methods
return interfaces
def _has_feature(self, object_path: str, method_name=None) -> bool:
"""
Checks to see if the device has said DBus method
:param object_path: Object path
:type object_path: str
:param method_name: Method name, or list of methods
:type method_name: str or list or tuple
:return: True if method/s exist
:rtype: bool
"""
if method_name is None:
return object_path in self._available_features
elif isinstance(method_name, str):
return object_path in self._available_features and method_name in self._available_features[object_path]
elif isinstance(method_name, (list, tuple)):
result = True
for method in method_name:
result &= object_path in self._available_features and method in self._available_features[object_path]
return result
else:
return False
def has(self, capability: str) -> bool:
"""
Convenience function to check capability
:param capability: Device capability
:type capability: str
:return: True or False
:rtype: bool
"""
# Could do capability in self._capabilitys but they might be explicitly disabled
return self._capabilities.get(capability, False)
@property
def name(self) -> str:
"""
Device name
:return: Device Name
:rtype: str
"""
return self._name
@property
def type(self) -> str:
"""
Get device type
:return: Device Type
:rtype: str
"""
return self._type
@property
def firmware_version(self) -> str:
"""
Device's firmware version
:return: FW Version
:rtype: str
"""
return self._fw
@property
def driver_version(self) -> str:
"""
Device's driver version
:return: Driver Version
:rtype: str
"""
return self._drv_version
@property
def serial(self) -> str:
"""
Device's serial
:return: Device Serial
:rtype: str
"""
return self._serial
@property
def keyboard_layout(self) -> str:
"""
Device's keyboard layout
:return: Keyboard layout
:rtype: str
"""
return self._kbd_layout
@property
def brightness(self) -> float:
"""
Get device brightness
:return: Device brightness
:rtype: float
"""
return self._dbus_interfaces['brightness'].getBrightness()
@brightness.setter
def brightness(self, value: float):
"""
Set device brightness
:param value: Device brightness
:type value: float
:raises ValueError: When brightness is not a float or not in range 0.0->100.0
"""
if isinstance(value, int):
value = float(value)
if not isinstance(value, float):
raise ValueError("Brightness must be a float")
if value < 0.0 or value > 100.0:
raise ValueError("Brightness must be between 0 and 100")
self._dbus_interfaces['brightness'].setBrightness(value)
@property
def capabilities(self) -> dict:
"""
Device capabilities
:return: Device capabilities
:rtype: dict
"""
return self._capabilities
@property
def dedicated_macro(self) -> bool:
"""
Device has dedicated macro keys
:return: If the device has macro keys
:rtype: bool
"""
if self._has_dedicated_macro is None:
self._has_dedicated_macro = self._dbus_interfaces['device'].hasDedicatedMacroKeys()
return self._has_dedicated_macro
@property
def device_image(self) -> str:
if self._device_image is None:
self._device_image = str(self._dbus_interfaces['device'].getDeviceImage())
return self._device_image
@property
def razer_urls(self) -> dict:
# Deprecated API, but kept for backwards compatibility
return {
"DEPRECATED": True,
"top_img": self.device_image,
"side_img": self.device_image,
"perspective_img": self.device_image
}
def __str__(self):
return self._name
def __repr__(self):
return '<{0} {1}>'.format(self.__class__.__name__, self._serial)
class BaseDeviceFactory(object):
@staticmethod
def get_device(serial: str, daemon_dbus=None) -> RazerDevice:
raise NotImplementedError()
|
import os
import os.path as op
import numpy as np
from traits.api import (Any, HasTraits, HasPrivateTraits, cached_property,
on_trait_change, Array, Bool, Button, DelegatesTo,
Directory, Enum, Event, File, Instance, Int, List,
Property, Str, ArrayOrNone)
from traitsui.api import View, Item, VGroup
from pyface.api import DirectoryDialog, OK, ProgressDialog, error, information
from ._viewer import _DIG_SOURCE_WIDTH
from ..bem import read_bem_surfaces
from ..io.constants import FIFF
from ..io import read_info, read_fiducials
from ..io.meas_info import _empty_info
from ..io.open import fiff_open, dir_tree_find
from ..surface import read_surface, complete_surface_info
from ..coreg import (_is_mri_subject, _mri_subject_has_bem,
create_default_subject)
from ..utils import get_config, set_config
from ..viz._3d import _fiducial_coords
from ..channels import read_dig_fif, DigMontage
fid_wildcard = "*.fif"
trans_wildcard = "*.fif"
# for wx backend:
# fid_wildcard = "Fiducials FIFF file (*.fif)|*.fif"
# trans_wildcard = "Trans File (*.fif)|*.fif"
def _expand_path(p):
return op.abspath(op.expandvars(op.expanduser(p)))
def get_fs_home():
"""Get the FREESURFER_HOME directory.
Returns
-------
fs_home : None | str
The FREESURFER_HOME path or None if the user cancels.
Notes
-----
If FREESURFER_HOME can't be found, the user is prompted with a file dialog.
If specified successfully, the resulting path is stored with
mne.set_config().
"""
return _get_root_home('FREESURFER_HOME', 'freesurfer', _fs_home_problem)
def _get_root_home(cfg, name, check_fun):
root = get_config(cfg)
problem = check_fun(root)
while problem:
info = ("Please select the %s directory. This is the root "
"directory of the %s installation." % (cfg, name))
msg = '\n\n'.join((problem, info))
information(None, msg, "Select the %s Directory" % cfg)
msg = "Please select the %s Directory" % cfg
dlg = DirectoryDialog(message=msg, new_directory=False)
if dlg.open() == OK:
root = dlg.path
problem = check_fun(root)
if problem is None:
set_config(cfg, root, set_env=False)
else:
return None
return root
def set_fs_home():
"""Set the FREESURFER_HOME environment variable.
Returns
-------
success : bool
True if the environment variable could be set, False if FREESURFER_HOME
could not be found.
Notes
-----
If FREESURFER_HOME can't be found, the user is prompted with a file dialog.
If specified successfully, the resulting path is stored with
mne.set_config().
"""
fs_home = get_fs_home()
if fs_home is None:
return False
else:
os.environ['FREESURFER_HOME'] = fs_home
return True
def _fs_home_problem(fs_home):
"""Check FREESURFER_HOME path.
Return str describing problem or None if the path is okay.
"""
if fs_home is None:
return "FREESURFER_HOME is not set."
elif not op.exists(fs_home):
return "FREESURFER_HOME (%s) does not exist." % fs_home
else:
test_dir = op.join(fs_home, 'subjects', 'fsaverage')
if not op.exists(test_dir):
return ("FREESURFER_HOME (%s) does not contain the fsaverage "
"subject." % fs_home)
def _mne_root_problem(mne_root):
"""Check MNE_ROOT path.
Return str describing problem or None if the path is okay.
"""
if mne_root is None:
return "MNE_ROOT is not set."
elif not op.exists(mne_root):
return "MNE_ROOT (%s) does not exist." % mne_root
else:
test_dir = op.join(mne_root, 'share', 'mne', 'mne_analyze')
if not op.exists(test_dir):
return ("MNE_ROOT (%s) is missing files. If this is your MNE "
"installation, consider reinstalling." % mne_root)
class Surf(HasTraits):
"""Expose a surface similar to the ones used elsewhere in MNE."""
rr = Array(shape=(None, 3), value=np.empty((0, 3)))
nn = Array(shape=(None, 3), value=np.empty((0, 3)))
tris = Array(shape=(None, 3), value=np.empty((0, 3)))
class SurfaceSource(HasTraits):
"""Expose points and tris of a file storing a surface.
Parameters
----------
file : File
Path to a *-bem.fif file or a surface containing a Freesurfer surface.
Attributes
----------
pts : Array, shape = (n_pts, 3)
Point coordinates.
tris : Array, shape = (n_tri, 3)
Triangles.
Notes
-----
tri is always updated after pts, so in case downstream objects depend on
both, they should sync to a change in tris.
"""
file = File(exists=True, filter=['*.fif', '*.*'])
surf = Instance(Surf)
@on_trait_change('file')
def read_file(self):
"""Read the file."""
if op.exists(self.file):
if self.file.endswith('.fif'):
bem = read_bem_surfaces(self.file, verbose=False)[0]
else:
try:
bem = read_surface(self.file, return_dict=True)[2]
bem['rr'] *= 1e-3
complete_surface_info(bem, copy=False)
except Exception:
error(parent=None,
message="Error loading surface from %s (see "
"Terminal for details)." % self.file,
title="Error Loading Surface")
self.reset_traits(['file'])
raise
self.surf = Surf(rr=bem['rr'], tris=bem['tris'], nn=bem['nn'])
else:
self.surf = self._default_surf()
def _surf_default(self):
return Surf(rr=np.empty((0, 3)),
tris=np.empty((0, 3), int), nn=np.empty((0, 3)))
class FiducialsSource(HasTraits):
"""Expose points of a given fiducials fif file.
Parameters
----------
file : File
Path to a fif file with fiducials (*.fif).
Attributes
----------
points : Array, shape = (n_points, 3)
Fiducials file points.
"""
file = File(filter=[fid_wildcard])
fname = Property(depends_on='file')
points = Property(ArrayOrNone, depends_on='file')
mni_points = ArrayOrNone(float, shape=(3, 3))
def _get_fname(self):
return op.basename(self.file)
@cached_property
def _get_points(self):
if not op.exists(self.file):
return self.mni_points # can be None
try:
return _fiducial_coords(*read_fiducials(self.file))
except Exception as err:
error(None, "Error reading fiducials from %s: %s (See terminal "
"for more information)" % (self.fname, str(err)),
"Error Reading Fiducials")
self.reset_traits(['file'])
raise
class DigSource(HasPrivateTraits):
"""Expose digitization information from a file.
Parameters
----------
file : File
Path to the BEM file (*.fif).
Attributes
----------
fid : Array, shape = (3, 3)
Each row contains the coordinates for one fiducial point, in the order
Nasion, RAP, LAP. If no file is set all values are 0.
"""
file = File(exists=True, filter=['*.fif'])
inst_fname = Property(Str, depends_on='file')
inst_dir = Property(depends_on='file')
_info = Property(depends_on='file')
points_filter = Any(desc="Index to select a subset of the head shape "
"points")
n_omitted = Property(Int, depends_on=['points_filter'])
# head shape
_hsp_points = Property(depends_on='_info',
desc="Head shape points in the file (n x 3 array)")
points = Property(depends_on=['_hsp_points', 'points_filter'],
desc="Head shape points selected by the filter (n x 3 "
"array)")
# fiducials
lpa = Property(depends_on='_info',
desc="LPA coordinates (1 x 3 array)")
nasion = Property(depends_on='_info',
desc="Nasion coordinates (1 x 3 array)")
rpa = Property(depends_on='_info',
desc="RPA coordinates (1 x 3 array)")
# EEG
eeg_points = Property(depends_on='_info',
desc="EEG sensor coordinates (N x 3 array)")
hpi_points = Property(depends_on='_info',
desc='HPI coil coordinates (N x 3 array)')
view = View(Item('file', width=_DIG_SOURCE_WIDTH, tooltip='FIF file '
'(Raw, Epochs, Evoked, or DigMontage)', show_label=False))
@cached_property
def _get_n_omitted(self):
if self.points_filter is None:
return 0
else:
return np.sum(self.points_filter == False) # noqa: E712
@cached_property
def _get__info(self):
if self.file:
info = None
fid, tree, _ = fiff_open(self.file)
fid.close()
if len(dir_tree_find(tree, FIFF.FIFFB_MEAS_INFO)) > 0:
info = read_info(self.file, verbose=False)
elif len(dir_tree_find(tree, FIFF.FIFFB_ISOTRAK)) > 0:
info = read_dig_fif(fname=self.file)
if isinstance(info, DigMontage):
dig = info.dig
info = _empty_info(1)
info['dig'] = dig
elif info is None or info['dig'] is None:
error(None, "The selected FIFF file does not contain "
"digitizer information. Please select a different "
"file.", "Error Reading FIFF File")
self.reset_traits(['file'])
return
else:
# check that all fiducial points are present
has_point = {FIFF.FIFFV_POINT_LPA: False,
FIFF.FIFFV_POINT_NASION: False,
FIFF.FIFFV_POINT_RPA: False}
for d in info['dig']:
if d['kind'] == FIFF.FIFFV_POINT_CARDINAL:
has_point[d['ident']] = True
if not all(has_point.values()):
points = _fiducial_coords(info['dig'])
if len(points) == 3:
_append_fiducials(info['dig'], *points.T)
else:
missing = []
if not has_point[FIFF.FIFFV_POINT_LPA]:
missing.append('LPA')
if not has_point[FIFF.FIFFV_POINT_NASION]:
missing.append('Nasion')
if not has_point[FIFF.FIFFV_POINT_RPA]:
missing.append('RPA')
error(None, "The selected FIFF file does not contain "
"all cardinal points (missing: %s). Please "
"select a different file." % ', '.join(missing),
"Error Reading FIFF File")
self.reset_traits(['file'])
return
return info
@cached_property
def _get_inst_dir(self):
return op.dirname(self.file)
@cached_property
def _get_inst_fname(self):
if self.file:
return op.basename(self.file)
else:
return '-'
@cached_property
def _get__hsp_points(self):
if not self._info:
return np.zeros((0, 3))
points = np.array([d['r'] for d in self._info['dig']
if d['kind'] == FIFF.FIFFV_POINT_EXTRA])
points = np.empty((0, 3)) if len(points) == 0 else points
return points
@cached_property
def _get_points(self):
if self.points_filter is None:
return self._hsp_points
else:
return self._hsp_points[self.points_filter]
def _cardinal_point(self, ident):
"""Coordinates for a cardinal point."""
if self._info:
for d in self._info['dig']:
if (d['kind'] == FIFF.FIFFV_POINT_CARDINAL and
d['ident'] == ident):
return d['r'][None, :]
return np.zeros((1, 3))
@cached_property
def _get_nasion(self):
return self._cardinal_point(FIFF.FIFFV_POINT_NASION)
@cached_property
def _get_lpa(self):
return self._cardinal_point(FIFF.FIFFV_POINT_LPA)
@cached_property
def _get_rpa(self):
return self._cardinal_point(FIFF.FIFFV_POINT_RPA)
@cached_property
def _get_eeg_points(self):
if self._info:
out = [d['r'] for d in self._info['dig'] if
d['kind'] == FIFF.FIFFV_POINT_EEG and
d['coord_frame'] == FIFF.FIFFV_COORD_HEAD]
out = np.empty((0, 3)) if len(out) == 0 else np.array(out)
return out
else:
return np.empty((0, 3))
@cached_property
def _get_hpi_points(self):
if self._info:
out = [d['r'] for d in self._info['dig'] if
d['kind'] == FIFF.FIFFV_POINT_HPI and
d['coord_frame'] == FIFF.FIFFV_COORD_HEAD]
out = np.empty((0, 3)) if len(out) == 0 else np.array(out)
return out
else:
return np.empty((0, 3))
def _file_changed(self):
self.reset_traits(('points_filter',))
def _append_fiducials(dig, lpa, nasion, rpa):
dig.append({'coord_frame': FIFF.FIFFV_COORD_HEAD,
'ident': FIFF.FIFFV_POINT_LPA,
'kind': FIFF.FIFFV_POINT_CARDINAL,
'r': lpa})
dig.append({'coord_frame': FIFF.FIFFV_COORD_HEAD,
'ident': FIFF.FIFFV_POINT_NASION,
'kind': FIFF.FIFFV_POINT_CARDINAL,
'r': nasion})
dig.append({'coord_frame': FIFF.FIFFV_COORD_HEAD,
'ident': FIFF.FIFFV_POINT_RPA,
'kind': FIFF.FIFFV_POINT_CARDINAL,
'r': rpa})
class MRISubjectSource(HasPrivateTraits):
"""Find subjects in SUBJECTS_DIR and select one.
Parameters
----------
subjects_dir : directory
SUBJECTS_DIR.
subject : str
Subject, corresponding to a folder in SUBJECTS_DIR.
"""
refresh = Event(desc="Refresh the subject list based on the directory "
"structure of subjects_dir.")
# settings
subjects_dir = Directory(exists=True)
subjects = Property(List(Str), depends_on=['subjects_dir', 'refresh'])
subject = Enum(values='subjects')
# info
can_create_fsaverage = Property(Bool, depends_on=['subjects_dir',
'subjects'])
subject_has_bem = Property(Bool, depends_on=['subjects_dir', 'subject'],
desc="whether the subject has a file matching "
"the bem file name pattern")
bem_pattern = Property(depends_on='mri_dir')
@cached_property
def _get_can_create_fsaverage(self):
if not op.exists(self.subjects_dir) or 'fsaverage' in self.subjects:
return False
return True
@cached_property
def _get_mri_dir(self):
if not self.subject:
return
elif not self.subjects_dir:
return
else:
return op.join(self.subjects_dir, self.subject)
@cached_property
def _get_subjects(self):
sdir = self.subjects_dir
is_dir = sdir and op.isdir(sdir)
if is_dir:
dir_content = os.listdir(sdir)
subjects = [s for s in dir_content if _is_mri_subject(s, sdir)]
if len(subjects) == 0:
subjects.append('')
else:
subjects = ['']
return sorted(subjects)
@cached_property
def _get_subject_has_bem(self):
if not self.subject:
return False
return _mri_subject_has_bem(self.subject, self.subjects_dir)
def create_fsaverage(self): # noqa: D102
if not self.subjects_dir:
raise RuntimeError(
"No subjects directory is selected. Please specify "
"subjects_dir first.")
fs_home = get_fs_home()
if fs_home is None:
raise RuntimeError(
"FreeSurfer contains files that are needed for copying the "
"fsaverage brain. Please install FreeSurfer and try again.")
create_default_subject(fs_home=fs_home, update=True,
subjects_dir=self.subjects_dir)
self.refresh = True
self.subject = 'fsaverage'
@on_trait_change('subjects_dir')
def _emit_subject(self):
# This silliness is the only way I could figure out to get the
# on_trait_change('subject_panel.subject') in CoregFrame to work!
self.subject = self.subject
class SubjectSelectorPanel(HasPrivateTraits):
"""Subject selector panel."""
model = Instance(MRISubjectSource)
can_create_fsaverage = DelegatesTo('model')
subjects_dir = DelegatesTo('model')
subject = DelegatesTo('model')
subjects = DelegatesTo('model')
create_fsaverage = Button(
u"fsaverage⇨SUBJECTS_DIR",
desc="whether to copy the files for the fsaverage subject to the "
"subjects directory. This button is disabled if "
"fsaverage already exists in the selected subjects directory.")
view = View(VGroup(Item('subjects_dir', width=_DIG_SOURCE_WIDTH,
tooltip='Subject MRI structurals (SUBJECTS_DIR)'),
Item('subject', width=_DIG_SOURCE_WIDTH,
tooltip='Subject to use within SUBJECTS_DIR'),
Item('create_fsaverage',
enabled_when='can_create_fsaverage',
width=_DIG_SOURCE_WIDTH),
show_labels=False))
def _create_fsaverage_fired(self):
# progress dialog with indefinite progress bar
title = "Creating FsAverage ..."
message = "Copying fsaverage files ..."
prog = ProgressDialog(title=title, message=message)
prog.open()
prog.update(0)
try:
self.model.create_fsaverage()
except Exception as err:
error(None, str(err), "Error Creating FsAverage")
raise
finally:
prog.close()
def _subjects_dir_changed(self, old, new):
if new and self.subjects == ['']:
information(None, "The directory selected as subjects-directory "
"(%s) does not contain any valid MRI subjects. If "
"this is not expected make sure all MRI subjects have "
"head surface model files which "
"can be created by running:\n\n $ mne "
"make_scalp_surfaces" % self.subjects_dir,
"No Subjects Found")
|
import binascii
import logging
import struct
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_HS_COLOR,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
LightEntity,
)
import homeassistant.util.color as color_util
from . import XiaomiDevice
from .const import DOMAIN, GATEWAYS_KEY
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Perform the setup for Xiaomi devices."""
entities = []
gateway = hass.data[DOMAIN][GATEWAYS_KEY][config_entry.entry_id]
for device in gateway.devices["light"]:
model = device["model"]
if model in ["gateway", "gateway.v3"]:
entities.append(
XiaomiGatewayLight(device, "Gateway Light", gateway, config_entry)
)
async_add_entities(entities)
class XiaomiGatewayLight(XiaomiDevice, LightEntity):
"""Representation of a XiaomiGatewayLight."""
def __init__(self, device, name, xiaomi_hub, config_entry):
"""Initialize the XiaomiGatewayLight."""
self._data_key = "rgb"
self._hs = (0, 0)
self._brightness = 100
super().__init__(device, name, xiaomi_hub, config_entry)
@property
def is_on(self):
"""Return true if it is on."""
return self._state
def parse_data(self, data, raw_data):
"""Parse data sent by gateway."""
value = data.get(self._data_key)
if value is None:
return False
if value == 0:
if self._state:
self._state = False
return True
rgbhexstr = "%x" % value
if len(rgbhexstr) > 8:
_LOGGER.error(
"Light RGB data error."
" Can't be more than 8 characters. Received: %s",
rgbhexstr,
)
return False
rgbhexstr = rgbhexstr.zfill(8)
rgbhex = bytes.fromhex(rgbhexstr)
rgba = struct.unpack("BBBB", rgbhex)
brightness = rgba[0]
rgb = rgba[1:]
self._brightness = brightness
self._hs = color_util.color_RGB_to_hs(*rgb)
self._state = True
return True
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
return int(255 * self._brightness / 100)
@property
def hs_color(self):
"""Return the hs color value."""
return self._hs
@property
def supported_features(self):
"""Return the supported features."""
return SUPPORT_BRIGHTNESS | SUPPORT_COLOR
def turn_on(self, **kwargs):
"""Turn the light on."""
if ATTR_HS_COLOR in kwargs:
self._hs = kwargs[ATTR_HS_COLOR]
if ATTR_BRIGHTNESS in kwargs:
self._brightness = int(100 * kwargs[ATTR_BRIGHTNESS] / 255)
rgb = color_util.color_hs_to_RGB(*self._hs)
rgba = (self._brightness,) + rgb
rgbhex = binascii.hexlify(struct.pack("BBBB", *rgba)).decode("ASCII")
rgbhex = int(rgbhex, 16)
if self._write_to_hub(self._sid, **{self._data_key: rgbhex}):
self._state = True
self.schedule_update_ha_state()
def turn_off(self, **kwargs):
"""Turn the light off."""
if self._write_to_hub(self._sid, **{self._data_key: 0}):
self._state = False
self.schedule_update_ha_state()
|
import sys
import io
import os
import os.path
import urllib.request
sys.path.insert(0, os.path.join(os.path.dirname(__file__), os.pardir))
from qutebrowser.components import adblock
from qutebrowser.config import configdata
def main():
"""Check by which hostblock list a host was blocked."""
if len(sys.argv) != 2:
print("Usage: {} <host>".format(sys.argv[0]), file=sys.stderr)
sys.exit(1)
configdata.init()
for url in configdata.DATA['content.host_blocking.lists'].default:
print("checking {}...".format(url))
raw_file = urllib.request.urlopen(url)
byte_io = io.BytesIO(raw_file.read())
f = adblock.get_fileobj(byte_io)
for line in f:
line = line.decode('utf-8')
if sys.argv[1] in line:
print("FOUND {} in {}:".format(sys.argv[1], url))
print(" " + line.rstrip())
if __name__ == '__main__':
main()
|
from homeassistant.components.abode import (
DOMAIN as ABODE_DOMAIN,
SERVICE_TRIGGER_AUTOMATION,
)
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
from homeassistant.const import (
ATTR_ENTITY_ID,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_OFF,
STATE_ON,
)
from .common import setup_platform
from tests.async_mock import patch
AUTOMATION_ID = "switch.test_automation"
AUTOMATION_UID = "47fae27488f74f55b964a81a066c3a01"
DEVICE_ID = "switch.test_switch"
DEVICE_UID = "0012a4d3614cb7e2b8c9abea31d2fb2a"
async def test_entity_registry(hass):
"""Tests that the devices are registered in the entity registry."""
await setup_platform(hass, SWITCH_DOMAIN)
entity_registry = await hass.helpers.entity_registry.async_get_registry()
entry = entity_registry.async_get(AUTOMATION_ID)
assert entry.unique_id == AUTOMATION_UID
entry = entity_registry.async_get(DEVICE_ID)
assert entry.unique_id == DEVICE_UID
async def test_attributes(hass):
"""Test the switch attributes are correct."""
await setup_platform(hass, SWITCH_DOMAIN)
state = hass.states.get(DEVICE_ID)
assert state.state == STATE_OFF
async def test_switch_on(hass):
"""Test the switch can be turned on."""
await setup_platform(hass, SWITCH_DOMAIN)
with patch("abodepy.AbodeSwitch.switch_on") as mock_switch_on:
assert await hass.services.async_call(
SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: DEVICE_ID}, blocking=True
)
await hass.async_block_till_done()
mock_switch_on.assert_called_once()
async def test_switch_off(hass):
"""Test the switch can be turned off."""
await setup_platform(hass, SWITCH_DOMAIN)
with patch("abodepy.AbodeSwitch.switch_off") as mock_switch_off:
assert await hass.services.async_call(
SWITCH_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: DEVICE_ID}, blocking=True
)
await hass.async_block_till_done()
mock_switch_off.assert_called_once()
async def test_automation_attributes(hass):
"""Test the automation attributes are correct."""
await setup_platform(hass, SWITCH_DOMAIN)
state = hass.states.get(AUTOMATION_ID)
# State is set based on "enabled" key in automation JSON.
assert state.state == STATE_ON
async def test_turn_automation_off(hass):
"""Test the automation can be turned off."""
with patch("abodepy.AbodeAutomation.enable") as mock_trigger:
await setup_platform(hass, SWITCH_DOMAIN)
await hass.services.async_call(
SWITCH_DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: AUTOMATION_ID},
blocking=True,
)
await hass.async_block_till_done()
mock_trigger.assert_called_once_with(False)
async def test_turn_automation_on(hass):
"""Test the automation can be turned on."""
with patch("abodepy.AbodeAutomation.enable") as mock_trigger:
await setup_platform(hass, SWITCH_DOMAIN)
await hass.services.async_call(
SWITCH_DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: AUTOMATION_ID},
blocking=True,
)
await hass.async_block_till_done()
mock_trigger.assert_called_once_with(True)
async def test_trigger_automation(hass, requests_mock):
"""Test the trigger automation service."""
await setup_platform(hass, SWITCH_DOMAIN)
with patch("abodepy.AbodeAutomation.trigger") as mock:
await hass.services.async_call(
ABODE_DOMAIN,
SERVICE_TRIGGER_AUTOMATION,
{ATTR_ENTITY_ID: AUTOMATION_ID},
blocking=True,
)
await hass.async_block_till_done()
mock.assert_called_once()
|
from plumbum.lib import _setdoc, IS_WIN32
from plumbum.machines.remote import BaseRemoteMachine
from plumbum.machines.session import ShellSession
from plumbum.machines.local import local
from plumbum.path.local import LocalPath
from plumbum.path.remote import RemotePath
from plumbum.commands import ProcessExecutionError, shquote
import warnings
class SshTunnel(object):
"""An object representing an SSH tunnel (created by
:func:`SshMachine.tunnel <plumbum.machines.remote.SshMachine.tunnel>`)"""
__slots__ = ["_session", "__weakref__"]
def __init__(self, session):
self._session = session
def __repr__(self):
if self._session.alive():
return "<SshTunnel %s>" % (self._session.proc, )
else:
return "<SshTunnel (defunct)>"
def __enter__(self):
return self
def __exit__(self, t, v, tb):
self.close()
def close(self):
"""Closes(terminates) the tunnel"""
self._session.close()
class SshMachine(BaseRemoteMachine):
"""
An implementation of :class:`remote machine <plumbum.machines.remote.BaseRemoteMachine>`
over SSH. Invoking a remote command translates to invoking it over SSH ::
with SshMachine("yourhostname") as rem:
r_ls = rem["ls"]
# r_ls is the remote `ls`
# executing r_ls() translates to `ssh yourhostname ls`
:param host: the host name to connect to (SSH server)
:param user: the user to connect as (if ``None``, the default will be used)
:param port: the server's port (if ``None``, the default will be used)
:param keyfile: the path to the identity file (if ``None``, the default will be used)
:param ssh_command: the ``ssh`` command to use; this has to be a ``Command`` object;
if ``None``, the default ssh client will be used.
:param scp_command: the ``scp`` command to use; this has to be a ``Command`` object;
if ``None``, the default scp program will be used.
:param ssh_opts: any additional options for ``ssh`` (a list of strings)
:param scp_opts: any additional options for ``scp`` (a list of strings)
:param password: the password to use; requires ``sshpass`` be installed. Cannot be used
in conjunction with ``ssh_command`` or ``scp_command`` (will be ignored).
NOTE: THIS IS A SECURITY RISK!
:param encoding: the remote machine's encoding (defaults to UTF8)
:param connect_timeout: specify a connection timeout (the time until shell prompt is seen).
The default is 10 seconds. Set to ``None`` to disable
:param new_session: whether or not to start the background session as a new
session leader (setsid). This will prevent it from being killed on
Ctrl+C (SIGINT)
"""
def __init__(self,
host,
user=None,
port=None,
keyfile=None,
ssh_command=None,
scp_command=None,
ssh_opts=(),
scp_opts=(),
password=None,
encoding="utf8",
connect_timeout=10,
new_session=False):
if ssh_command is None:
if password is not None:
ssh_command = local["sshpass"]["-p", password, "ssh"]
else:
ssh_command = local["ssh"]
if scp_command is None:
if password is not None:
scp_command = local["sshpass"]["-p", password, "scp"]
else:
scp_command = local["scp"]
scp_args = []
ssh_args = []
if user:
self._fqhost = "%s@%s" % (user, host)
else:
self._fqhost = host
if port:
ssh_args.extend(["-p", str(port)])
scp_args.extend(["-P", str(port)])
if keyfile:
ssh_args.extend(["-i", str(keyfile)])
scp_args.extend(["-i", str(keyfile)])
scp_args.append("-r")
ssh_args.extend(ssh_opts)
scp_args.extend(scp_opts)
self._ssh_command = ssh_command[tuple(ssh_args)]
self._scp_command = scp_command[tuple(scp_args)]
BaseRemoteMachine.__init__(
self,
encoding=encoding,
connect_timeout=connect_timeout,
new_session=new_session)
def __str__(self):
return "ssh://%s" % (self._fqhost, )
@_setdoc(BaseRemoteMachine)
def popen(self, args, ssh_opts=(), **kwargs):
cmdline = []
cmdline.extend(ssh_opts)
cmdline.append(self._fqhost)
if args and hasattr(self, "env"):
envdelta = self.env.getdelta()
cmdline.extend(["cd", str(self.cwd), "&&"])
if envdelta:
cmdline.append("env")
cmdline.extend("%s=%s" % (k, shquote(v)) for k, v in envdelta.items())
if isinstance(args, (tuple, list)):
cmdline.extend(args)
else:
cmdline.append(args)
return self._ssh_command[tuple(cmdline)].popen(**kwargs)
def nohup(self, command):
"""
Runs the given command using ``nohup`` and redirects std handles,
allowing the command to run "detached" from its controlling TTY or parent.
Does not return anything. Depreciated (use command.nohup or daemonic_popen).
"""
warnings.warn("Use .nohup on the command or use daemonic_popen)",
DeprecationWarning)
self.daemonic_popen(
command, cwd='.', stdout=None, stderr=None, append=False)
def daemonic_popen(self,
command,
cwd='.',
stdout=None,
stderr=None,
append=True):
"""
Runs the given command using ``nohup`` and redirects std handles,
allowing the command to run "detached" from its controlling TTY or parent.
Does not return anything.
.. versionadded:: 1.6.0
"""
if stdout is None:
stdout = "/dev/null"
if stderr is None:
stderr = "&1"
if str(cwd) == '.':
args = []
else:
args = ["cd", str(cwd), "&&"]
args.append("nohup")
args.extend(command.formulate())
args.extend(
[(">>" if append else ">") + str(stdout),
"2" + (">>"
if (append and stderr != "&1") else ">") + str(stderr),
"</dev/null"])
proc = self.popen(args, ssh_opts=["-f"])
rc = proc.wait()
try:
if rc != 0:
raise ProcessExecutionError(args, rc, proc.stdout.read(),
proc.stderr.read())
finally:
proc.stdin.close()
proc.stdout.close()
proc.stderr.close()
@_setdoc(BaseRemoteMachine)
def session(self, isatty=False, new_session=False):
return ShellSession(
self.popen(
["/bin/sh"], (["-tt"] if isatty else ["-T"]),
new_session=new_session), self.custom_encoding, isatty,
self.connect_timeout)
def tunnel(self,
lport,
dport,
lhost="localhost",
dhost="localhost",
connect_timeout=5):
r"""Creates an SSH tunnel from the TCP port (``lport``) of the local machine
(``lhost``, defaults to ``"localhost"``, but it can be any IP you can ``bind()``)
to the remote TCP port (``dport``) of the destination machine (``dhost``, defaults
to ``"localhost"``, which means *this remote machine*). The returned
:class:`SshTunnel <plumbum.machines.remote.SshTunnel>` object can be used as a
*context-manager*.
The more conventional use case is the following::
+---------+ +---------+
| Your | | Remote |
| Machine | | Machine |
+----o----+ +---- ----+
| ^
| |
lport dport
| |
\______SSH TUNNEL____/
(secure)
Here, you wish to communicate safely between port ``lport`` of your machine and
port ``dport`` of the remote machine. Communication is tunneled over SSH, so the
connection is authenticated and encrypted.
The more general case is shown below (where ``dport != "localhost"``)::
+---------+ +-------------+ +-------------+
| Your | | Remote | | Destination |
| Machine | | Machine | | Machine |
+----o----+ +---- ----o---+ +---- --------+
| ^ | ^
| | | |
lhost:lport | | dhost:dport
| | | |
\_____SSH TUNNEL_____/ \_____SOCKET____/
(secure) (not secure)
Usage::
rem = SshMachine("megazord")
with rem.tunnel(1234, 5678):
sock = socket.socket()
sock.connect(("localhost", 1234))
# sock is now tunneled to megazord:5678
"""
ssh_opts = ["-L", "[%s]:%s:[%s]:%s" % (lhost, lport, dhost, dport)]
proc = self.popen((), ssh_opts=ssh_opts, new_session=True)
return SshTunnel(
ShellSession(
proc,
self.custom_encoding,
connect_timeout=self.connect_timeout))
def _translate_drive_letter(self, path):
# replace c:\some\path with /c/some/path
path = str(path)
if ":" in path:
path = "/" + path.replace(":", "").replace("\\", "/")
return path
@_setdoc(BaseRemoteMachine)
def download(self, src, dst):
if isinstance(src, LocalPath):
raise TypeError("src of download cannot be %r" % (src, ))
if isinstance(src, RemotePath) and src.remote != self:
raise TypeError(
"src %r points to a different remote machine" % (src, ))
if isinstance(dst, RemotePath):
raise TypeError("dst of download cannot be %r" % (dst, ))
if IS_WIN32:
src = self._translate_drive_letter(src)
dst = self._translate_drive_letter(dst)
self._scp_command("%s:%s" % (self._fqhost, shquote(src)), dst)
@_setdoc(BaseRemoteMachine)
def upload(self, src, dst):
if isinstance(src, RemotePath):
raise TypeError("src of upload cannot be %r" % (src, ))
if isinstance(dst, LocalPath):
raise TypeError("dst of upload cannot be %r" % (dst, ))
if isinstance(dst, RemotePath) and dst.remote != self:
raise TypeError(
"dst %r points to a different remote machine" % (dst, ))
if IS_WIN32:
src = self._translate_drive_letter(src)
dst = self._translate_drive_letter(dst)
self._scp_command(src, "%s:%s" % (self._fqhost, shquote(dst)))
class PuttyMachine(SshMachine):
"""
PuTTY-flavored SSH connection. The programs ``plink`` and ``pscp`` are expected to
be in the path (or you may provide your own ``ssh_command`` and ``scp_command``)
Arguments are the same as for :class:`plumbum.machines.remote.SshMachine`
"""
def __init__(self,
host,
user=None,
port=None,
keyfile=None,
ssh_command=None,
scp_command=None,
ssh_opts=(),
scp_opts=(),
encoding="utf8",
connect_timeout=10,
new_session=False):
if ssh_command is None:
ssh_command = local["plink"]
if scp_command is None:
scp_command = local["pscp"]
if not ssh_opts:
ssh_opts = ["-ssh"]
if user is None:
user = local.env.user
if port is not None:
ssh_opts.extend(["-P", str(port)])
scp_opts = list(scp_opts) + ["-P", str(port)]
port = None
SshMachine.__init__(
self,
host,
user,
port,
keyfile=keyfile,
ssh_command=ssh_command,
scp_command=scp_command,
ssh_opts=ssh_opts,
scp_opts=scp_opts,
encoding=encoding,
connect_timeout=connect_timeout,
new_session=new_session)
def __str__(self):
return "putty-ssh://%s" % (self._fqhost, )
def _translate_drive_letter(self, path):
# pscp takes care of windows paths automatically
return path
@_setdoc(BaseRemoteMachine)
def session(self, isatty=False, new_session=False):
return ShellSession(
self.popen(
(), (["-t"] if isatty else ["-T"]), new_session=new_session),
self.custom_encoding, isatty, self.connect_timeout)
|
from homeassistant.core import callback
from homeassistant.helpers.entity import Entity
from . import DOMAIN
from .const import MANUFACTURER
class AugustEntityMixin(Entity):
"""Base implementation for August device."""
def __init__(self, data, device):
"""Initialize an August device."""
super().__init__()
self._data = data
self._device = device
@property
def should_poll(self):
"""Return False, updates are controlled via the hub."""
return False
@property
def _device_id(self):
return self._device.device_id
@property
def _detail(self):
return self._data.get_device_detail(self._device.device_id)
@property
def device_info(self):
"""Return the device_info of the device."""
return {
"identifiers": {(DOMAIN, self._device_id)},
"name": self._device.device_name,
"manufacturer": MANUFACTURER,
"sw_version": self._detail.firmware_version,
"model": self._detail.model,
}
@callback
def _update_from_data_and_write_state(self):
self._update_from_data()
self.async_write_ha_state()
async def async_added_to_hass(self):
"""Subscribe to updates."""
self.async_on_remove(
self._data.async_subscribe_device_id(
self._device_id, self._update_from_data_and_write_state
)
)
self.async_on_remove(
self._data.activity_stream.async_subscribe_device_id(
self._device_id, self._update_from_data_and_write_state
)
)
|
import asyncio
import pytest
from homeassistant import data_entry_flow
from homeassistant.components.point import DOMAIN, config_flow
from homeassistant.const import CONF_CLIENT_ID, CONF_CLIENT_SECRET
from tests.async_mock import AsyncMock, patch
def init_config_flow(hass, side_effect=None):
"""Init a configuration flow."""
config_flow.register_flow_implementation(hass, DOMAIN, "id", "secret")
flow = config_flow.PointFlowHandler()
flow._get_authorization_url = AsyncMock( # pylint: disable=protected-access
return_value="https://example.com", side_effect=side_effect
)
flow.hass = hass
return flow
@pytest.fixture
def is_authorized():
"""Set PointSession authorized."""
return True
@pytest.fixture
def mock_pypoint(is_authorized): # pylint: disable=redefined-outer-name
"""Mock pypoint."""
with patch(
"homeassistant.components.point.config_flow.PointSession"
) as PointSession:
PointSession.return_value.get_access_token = AsyncMock(
return_value={"access_token": "boo"}
)
PointSession.return_value.is_authorized = is_authorized
PointSession.return_value.user = AsyncMock(
return_value={"email": "[email protected]"}
)
yield PointSession
async def test_abort_if_no_implementation_registered(hass):
"""Test we abort if no implementation is registered."""
flow = config_flow.PointFlowHandler()
flow.hass = hass
result = await flow.async_step_user()
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "no_flows"
async def test_abort_if_already_setup(hass):
"""Test we abort if Point is already setup."""
flow = init_config_flow(hass)
with patch.object(hass.config_entries, "async_entries", return_value=[{}]):
result = await flow.async_step_user()
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_setup"
with patch.object(hass.config_entries, "async_entries", return_value=[{}]):
result = await flow.async_step_import()
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_setup"
async def test_full_flow_implementation(
hass, mock_pypoint # pylint: disable=redefined-outer-name
):
"""Test registering an implementation and finishing flow works."""
config_flow.register_flow_implementation(hass, "test-other", None, None)
flow = init_config_flow(hass)
result = await flow.async_step_user()
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
result = await flow.async_step_user({"flow_impl": "test"})
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "auth"
assert result["description_placeholders"] == {
"authorization_url": "https://example.com"
}
result = await flow.async_step_code("123ABC")
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["data"]["refresh_args"] == {
CONF_CLIENT_ID: "id",
CONF_CLIENT_SECRET: "secret",
}
assert result["title"] == "[email protected]"
assert result["data"]["token"] == {"access_token": "boo"}
async def test_step_import(hass, mock_pypoint): # pylint: disable=redefined-outer-name
"""Test that we trigger import when configuring with client."""
flow = init_config_flow(hass)
result = await flow.async_step_import()
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "auth"
@pytest.mark.parametrize("is_authorized", [False])
async def test_wrong_code_flow_implementation(
hass, mock_pypoint
): # pylint: disable=redefined-outer-name
"""Test wrong code."""
flow = init_config_flow(hass)
result = await flow.async_step_code("123ABC")
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "auth_error"
async def test_not_pick_implementation_if_only_one(hass):
"""Test we allow picking implementation if we have one flow_imp."""
flow = init_config_flow(hass)
result = await flow.async_step_user()
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "auth"
async def test_abort_if_timeout_generating_auth_url(hass):
"""Test we abort if generating authorize url fails."""
flow = init_config_flow(hass, side_effect=asyncio.TimeoutError)
result = await flow.async_step_user()
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "authorize_url_timeout"
async def test_abort_if_exception_generating_auth_url(hass):
"""Test we abort if generating authorize url blows up."""
flow = init_config_flow(hass, side_effect=ValueError)
result = await flow.async_step_user()
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "authorize_url_fail"
async def test_abort_no_code(hass):
"""Test if no code is given to step_code."""
flow = init_config_flow(hass)
result = await flow.async_step_code()
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "no_code"
|
import unittest
from trashcli.trash import TrashDir
class TestTrashDir_finding_orphans(unittest.TestCase):
def test(self):
self.fs.create_fake_file('/info/foo.trashinfo')
self.find_orphan()
assert [] == self.orphan_found
def test2(self):
self.fs.create_fake_file('/files/foo')
self.find_orphan()
assert ['/files/foo'] == self.orphan_found
def setUp(self):
self.orphan_found=[]
self.fs = FakeFileSystem()
self.trashdir=TrashDir(self.fs)
self.trashdir.open('/', None)
def find_orphan(self):
self.trashdir.each_orphan(self.orphan_found.append)
class FakeFileSystem:
def __init__(self):
self.files={}
self.dirs={}
def contents_of(self, path):
return self.files[path]
def exists(self, path):
return path in self.files
def entries_if_dir_exists(self, path):
return self.dirs.get(path, [])
def create_fake_file(self, path, contents=''):
import os
self.files[path] = contents
self.create_fake_dir(os.path.dirname(path), os.path.basename(path))
def create_fake_dir(self, dir_path, *dir_entries):
self.dirs[dir_path] = dir_entries
class TestFakeFileSystem(unittest.TestCase):
def setUp(self):
self.fs = FakeFileSystem()
def test_you_can_read_from_files(self):
self.fs.create_fake_file('/path/to/file', "file contents")
assert 'file contents' == self.fs.contents_of('/path/to/file')
def test_when_creating_a_fake_file_it_creates_also_the_dir(self):
self.fs.create_fake_file('/dir/file')
assert set(('file',)) == set(self.fs.entries_if_dir_exists('/dir'))
def test_you_can_create_multiple_fake_file(self):
self.fs.create_fake_file('/path/to/file1', "one")
self.fs.create_fake_file('/path/to/file2', "two")
assert 'one' == self.fs.contents_of('/path/to/file1')
assert 'two' == self.fs.contents_of('/path/to/file2')
def test_no_file_exists_at_beginning(self):
assert not self.fs.exists('/filename')
def test_after_a_creation_the_file_exists(self):
self.fs.create_fake_file('/filename')
assert self.fs.exists('/filename')
def test_create_fake_dir(self):
self.fs.create_fake_dir('/etc', 'passwd', 'shadow', 'hosts')
assert (set(['passwd', 'shadow', 'hosts']) ==
set(self.fs.entries_if_dir_exists('/etc')))
|
import itertools
from tensornetwork.tn_keras.layers import DenseMPO
import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras import Input
import numpy as np
import pytest
@pytest.mark.parametrize('in_dim_base,dim1,dim2,num_nodes,bond_dim',
itertools.product([3, 4], [3, 4], [2, 5], [3, 4],
[2, 3]))
def test_shape_sanity_check(in_dim_base, dim1, dim2, num_nodes, bond_dim):
model = Sequential([
Input(in_dim_base**num_nodes),
DenseMPO(dim1**num_nodes, num_nodes=num_nodes, bond_dim=bond_dim),
DenseMPO(dim2**num_nodes, num_nodes=num_nodes, bond_dim=bond_dim),
])
# Hard code batch size.
result = model.predict(np.ones((32, in_dim_base**num_nodes)))
assert result.shape == (32, dim2**num_nodes)
|
from flask import current_app, Blueprint
from flask_restful import Api
from lemur.common.schema import validate_schema
from lemur.authorities.service import get_by_name
from lemur.auth.service import AuthenticatedResource
from lemur.defaults.schemas import default_output_schema
mod = Blueprint("default", __name__)
api = Api(mod)
class LemurDefaults(AuthenticatedResource):
""" Defines the 'defaults' endpoint """
def __init__(self):
super(LemurDefaults)
@validate_schema(None, default_output_schema)
def get(self):
"""
.. http:get:: /defaults
Returns defaults needed to generate CSRs
**Example request**:
.. sourcecode:: http
GET /defaults HTTP/1.1
Host: example.com
Accept: application/json, text/javascript
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: text/javascript
{
"country": "US",
"state": "CA",
"location": "Los Gatos",
"organization": "Netflix",
"organizationalUnit": "Operations",
"dnsProviders": [{"name": "test", ...}, {...}],
}
:reqheader Authorization: OAuth token to authenticate
:statuscode 200: no error
:statuscode 403: unauthenticated
"""
default_authority = get_by_name(
current_app.config.get("LEMUR_DEFAULT_AUTHORITY")
)
return dict(
country=current_app.config.get("LEMUR_DEFAULT_COUNTRY"),
state=current_app.config.get("LEMUR_DEFAULT_STATE"),
location=current_app.config.get("LEMUR_DEFAULT_LOCATION"),
organization=current_app.config.get("LEMUR_DEFAULT_ORGANIZATION"),
organizational_unit=current_app.config.get(
"LEMUR_DEFAULT_ORGANIZATIONAL_UNIT"
),
issuer_plugin=current_app.config.get("LEMUR_DEFAULT_ISSUER_PLUGIN"),
authority=default_authority,
)
api.add_resource(LemurDefaults, "/defaults", endpoint="default")
|
from datetime import datetime, timedelta
import logging
from typing import Optional
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_HOST, CONF_PORT
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DEFAULT_PORT, DOMAIN
from .errors import TemporaryFailure, ValidationFailure
from .helper import get_cert_expiry_timestamp
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(hours=12)
async def async_setup(hass, config):
"""Platform setup, do nothing."""
return True
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry):
"""Load the saved entities."""
host = entry.data[CONF_HOST]
port = entry.data[CONF_PORT]
coordinator = CertExpiryDataUpdateCoordinator(hass, host, port)
await coordinator.async_refresh()
if not coordinator.last_update_success:
raise ConfigEntryNotReady
hass.data.setdefault(DOMAIN, {})
hass.data[DOMAIN][entry.entry_id] = coordinator
if entry.unique_id is None:
hass.config_entries.async_update_entry(entry, unique_id=f"{host}:{port}")
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, "sensor")
)
return True
async def async_unload_entry(hass, entry):
"""Unload a config entry."""
return await hass.config_entries.async_forward_entry_unload(entry, "sensor")
class CertExpiryDataUpdateCoordinator(DataUpdateCoordinator[datetime]):
"""Class to manage fetching Cert Expiry data from single endpoint."""
def __init__(self, hass, host, port):
"""Initialize global Cert Expiry data updater."""
self.host = host
self.port = port
self.cert_error = None
self.is_cert_valid = False
display_port = f":{port}" if port != DEFAULT_PORT else ""
name = f"{self.host}{display_port}"
super().__init__(
hass,
_LOGGER,
name=name,
update_interval=SCAN_INTERVAL,
)
async def _async_update_data(self) -> Optional[datetime]:
"""Fetch certificate."""
try:
timestamp = await get_cert_expiry_timestamp(self.hass, self.host, self.port)
except TemporaryFailure as err:
raise UpdateFailed(err.args[0]) from err
except ValidationFailure as err:
self.cert_error = err
self.is_cert_valid = False
_LOGGER.error("Certificate validation error: %s [%s]", self.host, err)
return None
self.cert_error = None
self.is_cert_valid = True
return timestamp
|
import logging
import queue
import socket
import threading
import time
import voluptuous as vol
from homeassistant.const import (
CONF_HOST,
CONF_PORT,
CONF_PREFIX,
EVENT_HOMEASSISTANT_START,
EVENT_HOMEASSISTANT_STOP,
EVENT_STATE_CHANGED,
)
from homeassistant.helpers import state
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DEFAULT_HOST = "localhost"
DEFAULT_PORT = 2003
DEFAULT_PREFIX = "ha"
DOMAIN = "graphite"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_PREFIX, default=DEFAULT_PREFIX): cv.string,
}
)
},
extra=vol.ALLOW_EXTRA,
)
def setup(hass, config):
"""Set up the Graphite feeder."""
conf = config[DOMAIN]
host = conf.get(CONF_HOST)
prefix = conf.get(CONF_PREFIX)
port = conf.get(CONF_PORT)
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
sock.connect((host, port))
sock.shutdown(2)
_LOGGER.debug("Connection to Graphite possible")
except OSError:
_LOGGER.error("Not able to connect to Graphite")
return False
GraphiteFeeder(hass, host, port, prefix)
return True
class GraphiteFeeder(threading.Thread):
"""Feed data to Graphite."""
def __init__(self, hass, host, port, prefix):
"""Initialize the feeder."""
super().__init__(daemon=True)
self._hass = hass
self._host = host
self._port = port
# rstrip any trailing dots in case they think they need it
self._prefix = prefix.rstrip(".")
self._queue = queue.Queue()
self._quit_object = object()
self._we_started = False
hass.bus.listen_once(EVENT_HOMEASSISTANT_START, self.start_listen)
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, self.shutdown)
hass.bus.listen(EVENT_STATE_CHANGED, self.event_listener)
_LOGGER.debug("Graphite feeding to %s:%i initialized", self._host, self._port)
def start_listen(self, event):
"""Start event-processing thread."""
_LOGGER.debug("Event processing thread started")
self._we_started = True
self.start()
def shutdown(self, event):
"""Signal shutdown of processing event."""
_LOGGER.debug("Event processing signaled exit")
self._queue.put(self._quit_object)
def event_listener(self, event):
"""Queue an event for processing."""
if self.is_alive() or not self._we_started:
_LOGGER.debug("Received event")
self._queue.put(event)
else:
_LOGGER.error("Graphite feeder thread has died, not queuing event")
def _send_to_graphite(self, data):
"""Send data to Graphite."""
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(10)
sock.connect((self._host, self._port))
sock.sendall(data.encode("ascii"))
sock.send(b"\n")
sock.close()
def _report_attributes(self, entity_id, new_state):
"""Report the attributes."""
now = time.time()
things = dict(new_state.attributes)
try:
things["state"] = state.state_as_number(new_state)
except ValueError:
pass
lines = [
"%s.%s.%s %f %i"
% (self._prefix, entity_id, key.replace(" ", "_"), value, now)
for key, value in things.items()
if isinstance(value, (float, int))
]
if not lines:
return
_LOGGER.debug("Sending to graphite: %s", lines)
try:
self._send_to_graphite("\n".join(lines))
except socket.gaierror:
_LOGGER.error("Unable to connect to host %s", self._host)
except OSError:
_LOGGER.exception("Failed to send data to graphite")
def run(self):
"""Run the process to export the data."""
while True:
event = self._queue.get()
if event == self._quit_object:
_LOGGER.debug("Event processing thread stopped")
self._queue.task_done()
return
if event.event_type == EVENT_STATE_CHANGED:
if not event.data.get("new_state"):
_LOGGER.debug(
"Skipping %s without new_state for %s",
event.event_type,
event.data["entity_id"],
)
self._queue.task_done()
continue
_LOGGER.debug(
"Processing STATE_CHANGED event for %s", event.data["entity_id"]
)
try:
self._report_attributes(
event.data["entity_id"], event.data["new_state"]
)
except Exception: # pylint: disable=broad-except
# Catch this so we can avoid the thread dying and
# make it visible.
_LOGGER.exception("Failed to process STATE_CHANGED event")
else:
_LOGGER.warning("Processing unexpected event type %s", event.event_type)
self._queue.task_done()
|
import os.path
import collections
from typing import MutableMapping
from PyQt5.QtCore import pyqtSlot, QObject, QTimer
from qutebrowser.config import config
from qutebrowser.api import cmdutils
from qutebrowser.utils import utils, log, message, usertypes, error
from qutebrowser.misc import objects
class Saveable:
"""A single thing which can be saved.
Attributes:
_name: The name of the thing to be saved.
_dirty: Whether the saveable was changed since the last save.
_save_handler: The function to call to save this Saveable.
_save_on_exit: Whether to always save this saveable on exit.
_config_opt: A config option which decides whether to auto-save or not.
None if no such option exists.
_filename: The filename of the underlying file.
"""
def __init__(self, name, save_handler, changed=None, config_opt=None,
filename=None):
self._name = name
self._dirty = False
self._save_handler = save_handler
self._config_opt = config_opt
if changed is not None:
changed.connect(self.mark_dirty)
self._save_on_exit = False
else:
self._save_on_exit = True
self._filename = filename
if filename is not None and not os.path.exists(filename):
self._dirty = True
self.save()
def __repr__(self):
return utils.get_repr(self, name=self._name, dirty=self._dirty,
save_handler=self._save_handler,
config_opt=self._config_opt,
save_on_exit=self._save_on_exit,
filename=self._filename)
def mark_dirty(self):
"""Mark this saveable as dirty (having changes)."""
log.save.debug("Marking {} as dirty.".format(self._name))
self._dirty = True
def save(self, is_exit=False, explicit=False, silent=False, force=False):
"""Save this saveable.
Args:
is_exit: Whether we're currently exiting qutebrowser.
explicit: Whether the user explicitly requested this save.
silent: Don't write information to log.
force: Force saving, no matter what.
"""
if (self._config_opt is not None and
(not config.instance.get(self._config_opt)) and
(not explicit) and (not force)):
if not silent:
log.save.debug("Not saving {name} because autosaving has been "
"disabled by {cfg[0]} -> {cfg[1]}.".format(
name=self._name, cfg=self._config_opt))
return
do_save = self._dirty or (self._save_on_exit and is_exit) or force
if not silent:
log.save.debug("Save of {} requested - dirty {}, save_on_exit {}, "
"is_exit {}, force {} -> {}".format(
self._name, self._dirty, self._save_on_exit,
is_exit, force, do_save))
if do_save:
self._save_handler()
self._dirty = False
class SaveManager(QObject):
"""Responsible to save 'saveables' periodically and on exit.
Attributes:
saveables: A dict mapping names to Saveable instances.
_save_timer: The Timer used to periodically auto-save things.
"""
def __init__(self, parent=None):
super().__init__(parent)
self.saveables: MutableMapping[str, Saveable] = collections.OrderedDict()
self._save_timer = usertypes.Timer(self, name='save-timer')
self._save_timer.timeout.connect(self.autosave)
self._set_autosave_interval()
config.instance.changed.connect(self._set_autosave_interval)
def __repr__(self):
return utils.get_repr(self, saveables=self.saveables)
@config.change_filter('auto_save.interval')
def _set_autosave_interval(self):
"""Set the auto-save interval."""
interval = config.val.auto_save.interval
if interval == 0:
self._save_timer.stop()
else:
self._save_timer.setInterval(interval)
self._save_timer.start()
def add_saveable(self, name, save, changed=None, config_opt=None,
filename=None, dirty=False):
"""Add a new saveable.
Args:
name: The name to use.
save: The function to call to save this saveable.
changed: The signal emitted when this saveable changed.
config_opt: An option deciding whether to auto-save or not.
filename: The filename of the underlying file, so we can force
saving if it doesn't exist.
dirty: Whether the saveable is already dirty.
"""
if name in self.saveables:
raise ValueError("Saveable {} already registered!".format(name))
saveable = Saveable(name, save, changed, config_opt, filename)
self.saveables[name] = saveable
if dirty:
saveable.mark_dirty()
QTimer.singleShot(0, saveable.save)
def save(self, name, is_exit=False, explicit=False, silent=False,
force=False):
"""Save a saveable by name.
Args:
is_exit: Whether we're currently exiting qutebrowser.
explicit: Whether this save operation was triggered explicitly.
silent: Don't write information to log. Used to reduce log spam
when autosaving.
force: Force saving, no matter what.
"""
self.saveables[name].save(is_exit=is_exit, explicit=explicit,
silent=silent, force=force)
def save_all(self, *args, **kwargs):
"""Save all saveables."""
for saveable in self.saveables:
self.save(saveable, *args, **kwargs)
@pyqtSlot()
def autosave(self):
"""Slot used when the configs are auto-saved."""
for (key, saveable) in self.saveables.items():
try:
saveable.save(silent=True)
except OSError as e:
message.error("Failed to auto-save {}: {}".format(key, e))
@cmdutils.register(instance='save-manager', name='save',
star_args_optional=True)
def save_command(self, *what):
"""Save configs and state.
Args:
*what: What to save (`config`/`key-config`/`cookies`/...).
If not given, everything is saved.
"""
if what:
explicit = True
else:
what = tuple(self.saveables)
explicit = False
for key in what:
if key not in self.saveables:
message.error("{} is nothing which can be saved".format(key))
else:
try:
self.save(key, explicit=explicit, force=True)
except OSError as e:
message.error("Could not save {}: {}".format(key, e))
log.save.debug(":save saved {}".format(', '.join(what)))
@pyqtSlot()
def shutdown(self):
"""Save all saveables when shutting down."""
for key in self.saveables:
try:
self.save(key, is_exit=True)
except OSError as e:
error.handle_fatal_exc(
e, "Error while saving!",
pre_text="Error while saving {}".format(key),
no_err_windows=objects.args.no_err_windows)
|
from contextlib import suppress
import markups
import os
import sys
import tempfile
import time
import unittest
from unittest.mock import patch, MagicMock
import warnings
from markups.abstract import ConvertedMarkup
from PyQt5.QtCore import pyqtSignal, QObject, Qt
from PyQt5.QtWidgets import QApplication
import ReText
from ReText.window import ReTextWindow
defaultEventTimeout = 0.0
path_to_testdata = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'testdata')
QApplication.setAttribute(Qt.AA_ShareOpenGLContexts)
app = QApplication.instance() or QApplication(sys.argv)
ReText.initializeDataDirs()
def handle_timer_event():
print('timer event received')
def processEventsUntilIdle(eventTimeout=defaultEventTimeout):
'''
Process Qt events until the application has not had any events for `eventTimeout` seconds
'''
if not app.hasPendingEvents():
time.sleep(eventTimeout)
while app.hasPendingEvents():
#print ('outer loop')
while app.hasPendingEvents():
#print('inner loop')
app.processEvents()
time.sleep(eventTimeout)
class FakeConverterProcess(QObject):
conversionDone = pyqtSignal()
def start_conversion(self, name, filename, extensions, text, current_dir):
self.conversionDone.emit()
def get_result(self):
return ConvertedMarkup('')
@patch('ReText.tab.converterprocess.ConverterProcess', FakeConverterProcess)
class TestWindow(unittest.TestCase):
def setUp(self):
warnings.simplefilter("ignore", Warning)
self.readListFromSettingsMock = patch('ReText.window.readListFromSettings', return_value=[]).start()
self.writeListToSettingsMock = patch('ReText.window.writeListToSettings').start()
self.globalSettingsMock = patch('ReText.window.globalSettings', MagicMock(**ReText.configOptions)).start()
self.fileSystemWatcherMock = patch('ReText.window.QFileSystemWatcher').start()
ReText.tab.globalSettings = self.globalSettingsMock
def tearDown(self):
patch.stopall()
#
# Helper functions
#
@staticmethod
def get_ui_enabled_states(window):
enabled = set([])
disabled = set([])
for item in ('actionBold',
'actionCopy',
'actionCut',
'actionItalic',
'actionUnderline',
'actionUndo',
'actionRedo',
'actionReload',
'actionSave',
'actionSetEncoding',
'editBar',
'formattingBox',
'symbolBox'):
if getattr(window, item).isEnabled():
enabled.add(item)
else:
disabled.add(item)
return enabled, disabled
def check_widget_state(self, window, expected_enabled, expected_disabled):
actually_enabled, actually_disabled = self.get_ui_enabled_states(window)
self.assertEqual(expected_enabled - actually_enabled, set(), 'These widgets are unexpectedly disabled')
self.assertEqual(expected_disabled - actually_disabled, set(), 'These widgets are unexpectedly enabled')
def check_widgets_enabled_for_markdown(self, window):
self.check_widget_state(window,
set(['actionBold', 'actionItalic', 'actionUnderline', 'formattingBox', 'symbolBox']),
set())
def check_widgets_enabled_for_restructuredtext(self, window):
self.check_widget_state(window,
set(['actionBold', 'actionItalic']),
set(['actionUnderline', 'formattingBox', 'symbolBox']))
def check_widgets_enabled(self, window, widgets):
self.check_widget_state(window,
set(widgets),
set())
def check_widgets_disabled(self, window, widgets):
self.check_widget_state(window,
set(),
set(widgets))
#
# Tests
#
def test_windowTitleAndTabs_afterStartWithEmptyTab(self):
self.window = ReTextWindow()
self.window.createNew('')
processEventsUntilIdle()
self.assertEqual(1, self.window.tabWidget.count())
self.assertEqual('New document[*]', self.window.windowTitle())
self.assertFalse(self.window.currentTab.fileName)
@patch('ReText.window.QFileDialog.getOpenFileNames', return_value=([os.path.join(path_to_testdata, 'existing_file.md')], None))
def test_windowTitleAndTabs_afterLoadingFile(self, getOpenFileNamesMock):
self.window = ReTextWindow()
self.window.createNew('')
self.window.actionOpen.trigger()
processEventsUntilIdle()
# Check that file is opened in the existing empty tab
self.assertEqual(1, self.window.tabWidget.count())
self.assertEqual('existing_file.md[*]', self.window.windowTitle())
self.assertTrue(self.window.currentTab.fileName.endswith('tests/testdata/existing_file.md'))
self.assertEqual(self.window.tabWidget.tabText(0), 'existing_file')
self.assertFalse(self.window.isWindowModified())
@patch('ReText.window.QFileDialog.getOpenFileNames', return_value=([os.path.join(path_to_testdata, 'existing_file.md')], None))
def test_windowTitleAndTabs_afterSwitchingTab(self, getOpenFileNamesMock):
self.window = ReTextWindow()
self.window.createNew('')
self.window.actionOpen.trigger()
processEventsUntilIdle()
tab_with_file = self.window.currentTab
self.window.createNew('bla')
processEventsUntilIdle()
tab_with_unsaved_content = self.window.currentTab
self.assertEqual('New document[*]', self.window.windowTitle())
self.assertIs(self.window.currentTab, tab_with_unsaved_content)
self.assertIs(self.window.tabWidget.currentWidget(), tab_with_unsaved_content)
self.assertEqual(self.window.ind, 1)
self.assertEqual(self.window.tabWidget.tabText(0), 'existing_file')
self.assertEqual(self.window.tabWidget.tabText(1), 'New document*')
self.window.switchTab()
processEventsUntilIdle()
self.assertEqual('existing_file.md[*]', self.window.windowTitle())
self.assertIs(self.window.currentTab, tab_with_file)
self.assertIs(self.window.tabWidget.currentWidget(), tab_with_file)
@patch('ReText.window.QFileDialog.getOpenFileNames', return_value=([os.path.join(path_to_testdata, 'existing_file.md')], None))
def test_activeTab_afterLoadingFileThatIsAlreadyOpenInOtherTab(self, getOpenFileNamesMock):
self.window = ReTextWindow()
self.window.createNew('')
self.window.actionOpen.trigger()
processEventsUntilIdle()
tab_with_file = self.window.currentTab
self.window.createNew('')
processEventsUntilIdle()
# Make sure that the newly created tab is the active one
self.assertFalse(self.window.currentTab.fileName)
# Load the same document again
self.window.actionOpen.trigger()
processEventsUntilIdle()
# Check that we have indeed been switched back to the previous tab
self.assertIs(self.window.currentTab, tab_with_file)
self.assertTrue(self.window.currentTab.fileName.endswith('tests/testdata/existing_file.md'))
def test_markupDependentWidgetStates_afterStartWithEmptyTabAndMarkdownAsDefaultMarkup(self):
self.window = ReTextWindow()
self.window.createNew('')
processEventsUntilIdle()
# markdown is the default markup
self.check_widgets_enabled_for_markdown(self.window)
def test_markupDependentWidgetStates_afterStartWithEmptyTabAndRestructuredtextAsDefaultMarkup(self):
self.globalSettingsMock.defaultMarkup = 'reStructuredText'
self.window = ReTextWindow()
self.window.createNew('')
processEventsUntilIdle()
self.check_widgets_enabled_for_restructuredtext(self.window)
def test_markupDependentWidgetStates_afterChangingDefaultMarkup(self):
self.window = ReTextWindow()
self.window.createNew('')
processEventsUntilIdle()
self.window.setDefaultMarkup(markups.ReStructuredTextMarkup)
self.check_widgets_enabled_for_restructuredtext(self.window)
@patch('ReText.window.QFileDialog.getOpenFileNames', return_value=([os.path.join(path_to_testdata, 'existing_file.md')], None))
def test_markupDependentWidgetStates_afterLoadingMarkdownDocument(self, getOpenFileNamesMock):
self.window = ReTextWindow()
self.window.createNew('')
self.window.actionOpen.trigger()
processEventsUntilIdle()
self.check_widgets_enabled_for_markdown(self.window)
@patch('ReText.window.QFileDialog.getOpenFileNames', return_value=([os.path.join(path_to_testdata, 'existing_file.rst')], None))
def test_markupDependentWidgetStates_afterLoadingRestructuredtextDocument(self, getOpenFileNamesMock):
self.window = ReTextWindow()
self.window.createNew('')
self.window.actionOpen.trigger()
processEventsUntilIdle()
self.check_widgets_enabled_for_restructuredtext(self.window)
@patch('ReText.window.QFileDialog.getOpenFileNames', side_effect=[ ([os.path.join(path_to_testdata, 'existing_file.md')], None),
([os.path.join(path_to_testdata, 'existing_file.rst')], None) ])
def test_markupDependentWidgetStates_afterSwitchingTab(self, getOpenFileNamesMock):
self.window = ReTextWindow()
self.window.createNew('')
self.window.actionOpen.trigger()
self.window.actionOpen.trigger()
processEventsUntilIdle()
# Just to make sure that sending two actionOpen triggers has had the desired effect
self.assertIn('.rst', self.window.windowTitle())
self.window.switchTab()
processEventsUntilIdle()
self.assertIn('.md', self.window.windowTitle())
self.check_widgets_enabled_for_markdown(self.window)
@patch('ReText.window.QFileDialog.getOpenFileNames', return_value=([os.path.join(path_to_testdata, 'existing_file.md')], None))
@patch('ReText.window.QFileDialog.getSaveFileName', return_value=(os.path.join(path_to_testdata, 'not_existing_file.rst'), None))
def test_markupDependentWidgetStates_afterSavingDocumentAsDifferentMarkup(self, getOpenFileNamesMock, getSaveFileNameMock):
self.window = ReTextWindow()
self.window.createNew('')
self.window.actionOpen.trigger()
processEventsUntilIdle()
try:
self.window.actionSaveAs.trigger()
processEventsUntilIdle()
finally:
os.remove(os.path.join(path_to_testdata, 'not_existing_file.rst'))
self.check_widgets_enabled_for_restructuredtext(self.window)
@patch('ReText.window.QFileDialog.getOpenFileNames', return_value=([os.path.join(path_to_testdata, 'existing_file.md')], None))
@patch('ReText.window.QFileDialog.getSaveFileName', return_value=(os.path.join(path_to_testdata, 'not_existing_file.md'), None))
def test_saveWidgetStates(self, getOpenFileNamesMock, getSaveFileNameMock):
self.window = ReTextWindow()
# check if save is disabled at first
self.window.createNew('')
processEventsUntilIdle()
self.check_widgets_disabled(self.window, ('actionSave',))
self.assertFalse(self.window.isWindowModified())
self.assertEqual(self.window.tabWidget.tabText(0), 'New document')
# check if it's enabled after inserting some text
self.window.currentTab.editBox.textCursor().insertText('some text')
processEventsUntilIdle()
self.check_widgets_enabled(self.window, ('actionSave',))
self.assertTrue(self.window.isWindowModified())
self.assertEqual(self.window.tabWidget.tabText(0), 'New document*')
# check if it's disabled again after loading a file in a second tab and switching to it
self.window.actionOpen.trigger()
processEventsUntilIdle()
self.check_widgets_disabled(self.window, ('actionSave',))
self.assertFalse(self.window.isWindowModified())
self.assertEqual(self.window.tabWidget.tabText(0), 'New document*')
self.assertEqual(self.window.tabWidget.tabText(1), 'existing_file')
# check if it's enabled again after switching back
self.window.switchTab()
processEventsUntilIdle()
self.check_widgets_enabled(self.window, ('actionSave',))
self.assertTrue(self.window.isWindowModified())
self.assertEqual(self.window.tabWidget.tabText(0), 'New document*')
self.assertEqual(self.window.tabWidget.tabText(1), 'existing_file')
# check if it's disabled after saving
try:
self.window.actionSaveAs.trigger()
processEventsUntilIdle()
self.check_widgets_disabled(self.window, ('actionSave',))
self.assertFalse(self.window.isWindowModified())
self.assertEqual(self.window.tabWidget.tabText(0), 'not_existing_file')
self.assertEqual(self.window.tabWidget.tabText(1), 'existing_file')
finally:
os.remove(os.path.join(path_to_testdata, 'not_existing_file.md'))
@patch('ReText.window.QFileDialog.getOpenFileNames', return_value=([os.path.join(path_to_testdata, 'existing_file.md')], None))
@patch('ReText.window.QFileDialog.getSaveFileName', return_value=(os.path.join(path_to_testdata, 'not_existing_file.md'), None))
def test_saveWidgetStates_autosaveEnabled(self, getOpenFileNamesMock, getSaveFileNameMock):
self.globalSettingsMock.autoSave = True
self.window = ReTextWindow()
# check if save is disabled at first
self.window.createNew('')
processEventsUntilIdle()
self.check_widgets_disabled(self.window, ('actionSave',))
# check if it stays enabled after inserting some text (because autosave
# can't save without a filename)
self.window.currentTab.editBox.textCursor().insertText('some text')
processEventsUntilIdle()
self.check_widgets_enabled(self.window, ('actionSave',))
# check if it's disabled after saving
try:
self.window.actionSaveAs.trigger()
processEventsUntilIdle()
self.check_widgets_disabled(self.window, ('actionSave',))
# check if it is still disabled after inserting some text (because
# autosave will take care of saving now that the filename is known)
self.window.currentTab.editBox.textCursor().insertText('some text')
processEventsUntilIdle()
self.check_widgets_disabled(self.window, ('actionSave',))
finally:
os.remove(os.path.join(path_to_testdata, 'not_existing_file.md'))
@patch('ReText.window.QFileDialog.getOpenFileNames', return_value=([os.path.join(path_to_testdata, 'existing_file.md')], None))
def test_encodingAndReloadWidgetStates(self, getOpenFileNamesMock):
self.window = ReTextWindow()
# check if reload/set encoding is disabled for a tab without filename set
self.window.createNew('')
processEventsUntilIdle()
self.check_widgets_disabled(self.window, ('actionReload','actionSetEncoding'))
self.window.actionOpen.trigger()
processEventsUntilIdle()
self.check_widgets_enabled(self.window, ('actionReload','actionSetEncoding'))
@patch('ReText.window.QFileDialog.getOpenFileNames', return_value=([os.path.join(path_to_testdata, 'existing_file.md')], None))
def test_encodingAndReloadWidgetStates_alwaysDisabledWhenAutosaveEnabled(self, getOpenFileNamesMock):
self.globalSettingsMock.autoSave = True
self.window = ReTextWindow()
# check if reload/set encoding is disabled for a tab without filename set
self.window.createNew('')
processEventsUntilIdle()
self.check_widgets_disabled(self.window, ('actionReload','actionSetEncoding'))
self.window.actionOpen.trigger()
processEventsUntilIdle()
self.check_widgets_disabled(self.window, ('actionReload','actionSetEncoding'))
def test_doesNotTweakSpecialCharacters(self):
fileName = tempfile.mkstemp(suffix='.mkd')[1]
content = 'Non-breaking\u00a0space\n\nLine\u2028separator\n'
with open(fileName, 'w', encoding='utf-8') as tempFile:
tempFile.write(content)
window = ReTextWindow()
window.openFileWrapper(fileName)
self.assertTrue(window.saveFile())
with open(fileName, encoding='utf-8') as tempFile:
self.assertMultiLineEqual(content, tempFile.read())
with suppress(PermissionError):
os.remove(fileName)
if __name__ == '__main__':
unittest.main()
|
import logging
from typing import Set, Tuple
from pysyncthru import SyncThru
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_URL
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import aiohttp_client, device_registry as dr
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool:
"""Set up."""
hass.data.setdefault(DOMAIN, {})
return True
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool:
"""Set up config entry."""
session = aiohttp_client.async_get_clientsession(hass)
printer = hass.data[DOMAIN][entry.entry_id] = SyncThru(
entry.data[CONF_URL], session
)
try:
await printer.update()
except ValueError:
_LOGGER.error(
"Device at %s not appear to be a SyncThru printer, aborting setup",
printer.url,
)
return False
else:
if printer.is_unknown_state():
raise ConfigEntryNotReady
device_registry = await dr.async_get_registry(hass)
device_registry.async_get_or_create(
config_entry_id=entry.entry_id,
connections=device_connections(printer),
identifiers=device_identifiers(printer),
model=printer.model(),
name=printer.hostname(),
)
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, SENSOR_DOMAIN)
)
return True
async def async_unload_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool:
"""Unload the config entry."""
await hass.config_entries.async_forward_entry_unload(entry, SENSOR_DOMAIN)
hass.data[DOMAIN].pop(entry.entry_id, None)
return True
def device_identifiers(printer: SyncThru) -> Set[Tuple[str, str]]:
"""Get device identifiers for device registry."""
return {(DOMAIN, printer.serial_number())}
def device_connections(printer: SyncThru) -> Set[Tuple[str, str]]:
"""Get device connections for device registry."""
connections = set()
try:
mac = printer.raw()["identity"]["mac_addr"]
if mac:
connections.add((dr.CONNECTION_NETWORK_MAC, mac))
except AttributeError:
pass
return connections
|
import os
import shutil
from stashutils.core import get_stash
from stashutils.fsi.base import BaseFSI
from stashutils.fsi.errors import OperationFailure, IsDir, IsFile
from stashutils.fsi.errors import AlreadyExists
from mlpatches.mount_base import _org_stat, _org_listdir, _org_mkdir
from mlpatches.mount_base import _org_open, _org_remove
_stash = get_stash()
class LocalFSI(BaseFSI):
"""A FSI for the local filesystem."""
def __init__(self, logger=None):
self.logger = logger
self.path = os.getcwd()
def _getabs(self, name):
"""returns the path for name."""
path = os.path.join(self.path, name)
while path.startswith("/"):
path = path[1:]
return os.path.abspath(os.path.join(self.basepath, path))
def connect(self, *args):
if len(args) == 0:
self.basepath = "/"
return True # no setup required; connect is allways successful
else:
self.basepath = args[0]
if not os.path.isdir(self.basepath):
return "No such directory: {p}".format(p=self.basepath)
return True
def repr(self):
return "Local Directory '{bp}' [CWD: {p}]".format(p=self.path, bp=self.basepath)
def listdir(self, path="."):
ap = self._getabs(path)
try:
return _org_listdir(ap)
except Exception as e:
raise OperationFailure(str(e))
def cd(self, name):
if name == "..":
self.path = os.path.abspath(os.path.dirname(self.path))
return
ap = self._getabs(name)
if not os.path.exists(ap):
raise OperationFailure("Not found")
elif not os.path.isdir(ap):
raise IsFile()
else:
self.path = ap
def get_path(self):
return self.path
def remove(self, name):
ap = self._getabs(name)
if not os.path.exists(ap):
raise OperationFailure("Not found")
elif os.path.isdir(ap):
try:
shutil.rmtree(ap)
except Exception as e:
raise OperationFailure(str(e))
elif os.path.isfile(ap):
try:
_org_remove(ap)
except Exception as e:
raise OperationFailure(str(e))
else:
raise OperationFailure("Unknown type")
def open(self, name, mode, buffering=0):
ap = self._getabs(name)
if os.path.isdir(ap):
raise IsDir()
else:
try:
return _org_open(ap, mode, buffering)
except Exception as e:
raise OperationFailure(str(e))
def mkdir(self, name):
ap = self._getabs(name)
if os.path.exists(ap):
raise AlreadyExists("Already exists")
else:
try:
_org_mkdir(ap)
except Exception as e:
raise OperationFailure(str(e))
def close(self):
pass
def isdir(self, name):
ap = self._getabs(name)
return os.path.isdir(ap)
def isfile(self, name):
ap = self._getabs(name)
return os.path.isdir(ap)
def stat(self, name):
ap = self._getabs(name)
return _org_stat(ap)
|
import asyncio
from datetime import timedelta
import logging
import aiohttp
from pytrafikverket.trafikverket_weather import TrafikverketWeather
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_ATTRIBUTION,
CONF_API_KEY,
CONF_MONITORED_CONDITIONS,
CONF_NAME,
DEGREE,
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_TEMPERATURE,
LENGTH_MILLIMETERS,
PERCENTAGE,
SPEED_METERS_PER_SECOND,
TEMP_CELSIUS,
)
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
ATTRIBUTION = "Data provided by Trafikverket"
ATTR_MEASURE_TIME = "measure_time"
ATTR_ACTIVE = "active"
CONF_STATION = "station"
MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=10)
SCAN_INTERVAL = timedelta(seconds=300)
SENSOR_TYPES = {
"air_temp": [
"Air temperature",
TEMP_CELSIUS,
"air_temp",
"mdi:thermometer",
DEVICE_CLASS_TEMPERATURE,
],
"road_temp": [
"Road temperature",
TEMP_CELSIUS,
"road_temp",
"mdi:thermometer",
DEVICE_CLASS_TEMPERATURE,
],
"precipitation": [
"Precipitation type",
None,
"precipitationtype",
"mdi:weather-snowy-rainy",
None,
],
"wind_direction": [
"Wind direction",
DEGREE,
"winddirection",
"mdi:flag-triangle",
None,
],
"wind_direction_text": [
"Wind direction text",
None,
"winddirectiontext",
"mdi:flag-triangle",
None,
],
"wind_speed": [
"Wind speed",
SPEED_METERS_PER_SECOND,
"windforce",
"mdi:weather-windy",
None,
],
"wind_speed_max": [
"Wind speed max",
SPEED_METERS_PER_SECOND,
"windforcemax",
"mdi:weather-windy-variant",
None,
],
"humidity": [
"Humidity",
PERCENTAGE,
"humidity",
"mdi:water-percent",
DEVICE_CLASS_HUMIDITY,
],
"precipitation_amount": [
"Precipitation amount",
LENGTH_MILLIMETERS,
"precipitation_amount",
"mdi:cup-water",
None,
],
"precipitation_amountname": [
"Precipitation name",
None,
"precipitation_amountname",
"mdi:weather-pouring",
None,
],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_NAME): cv.string,
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_STATION): cv.string,
vol.Required(CONF_MONITORED_CONDITIONS, default=[]): [vol.In(SENSOR_TYPES)],
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Trafikverket sensor platform."""
sensor_name = config[CONF_NAME]
sensor_api = config[CONF_API_KEY]
sensor_station = config[CONF_STATION]
web_session = async_get_clientsession(hass)
weather_api = TrafikverketWeather(web_session, sensor_api)
dev = []
for condition in config[CONF_MONITORED_CONDITIONS]:
dev.append(
TrafikverketWeatherStation(
weather_api, sensor_name, condition, sensor_station
)
)
if dev:
async_add_entities(dev, True)
class TrafikverketWeatherStation(Entity):
"""Representation of a Trafikverket sensor."""
def __init__(self, weather_api, name, sensor_type, sensor_station):
"""Initialize the sensor."""
self._client = name
self._name = SENSOR_TYPES[sensor_type][0]
self._type = sensor_type
self._state = None
self._unit = SENSOR_TYPES[sensor_type][1]
self._station = sensor_station
self._weather_api = weather_api
self._icon = SENSOR_TYPES[sensor_type][3]
self._device_class = SENSOR_TYPES[sensor_type][4]
self._weather = None
@property
def name(self):
"""Return the name of the sensor."""
return f"{self._client} {self._name}"
@property
def icon(self):
"""Icon to use in the frontend."""
return self._icon
@property
def device_state_attributes(self):
"""Return the state attributes of Trafikverket Weatherstation."""
return {
ATTR_ATTRIBUTION: ATTRIBUTION,
ATTR_ACTIVE: self._weather.active,
ATTR_MEASURE_TIME: self._weather.measure_time,
}
@property
def device_class(self):
"""Return the device class of the sensor."""
return self._device_class
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit
@Throttle(MIN_TIME_BETWEEN_UPDATES)
async def async_update(self):
"""Get the latest data from Trafikverket and updates the states."""
try:
self._weather = await self._weather_api.async_get_weather(self._station)
self._state = getattr(self._weather, SENSOR_TYPES[self._type][2])
except (asyncio.TimeoutError, aiohttp.ClientError, ValueError) as error:
_LOGGER.error("Could not fetch weather data: %s", error)
|
import logging
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_TOKEN
from homeassistant.helpers.device_registry import format_mac
# pylint: disable=unused-import
from .const import DOMAIN
from .gateway import ConnectXiaomiGateway
_LOGGER = logging.getLogger(__name__)
CONF_FLOW_TYPE = "config_flow_device"
CONF_GATEWAY = "gateway"
DEFAULT_GATEWAY_NAME = "Xiaomi Gateway"
ZEROCONF_GATEWAY = "lumi-gateway"
ZEROCONF_ACPARTNER = "lumi-acpartner"
GATEWAY_SETTINGS = {
vol.Required(CONF_TOKEN): vol.All(str, vol.Length(min=32, max=32)),
vol.Optional(CONF_NAME, default=DEFAULT_GATEWAY_NAME): str,
}
GATEWAY_CONFIG = vol.Schema({vol.Required(CONF_HOST): str}).extend(GATEWAY_SETTINGS)
CONFIG_SCHEMA = vol.Schema({vol.Optional(CONF_GATEWAY, default=False): bool})
class XiaomiMiioFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a Xiaomi Miio config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL
def __init__(self):
"""Initialize."""
self.host = None
async def async_step_user(self, user_input=None):
"""Handle a flow initialized by the user."""
errors = {}
if user_input is not None:
# Check which device needs to be connected.
if user_input[CONF_GATEWAY]:
return await self.async_step_gateway()
errors["base"] = "no_device_selected"
return self.async_show_form(
step_id="user", data_schema=CONFIG_SCHEMA, errors=errors
)
async def async_step_zeroconf(self, discovery_info):
"""Handle zeroconf discovery."""
name = discovery_info.get("name")
self.host = discovery_info.get("host")
mac_address = discovery_info.get("properties", {}).get("mac")
if not name or not self.host or not mac_address:
return self.async_abort(reason="not_xiaomi_miio")
# Check which device is discovered.
if name.startswith(ZEROCONF_GATEWAY) or name.startswith(ZEROCONF_ACPARTNER):
unique_id = format_mac(mac_address)
await self.async_set_unique_id(unique_id)
self._abort_if_unique_id_configured({CONF_HOST: self.host})
# pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167
self.context.update(
{"title_placeholders": {"name": f"Gateway {self.host}"}}
)
return await self.async_step_gateway()
# Discovered device is not yet supported
_LOGGER.debug(
"Not yet supported Xiaomi Miio device '%s' discovered with host %s",
name,
self.host,
)
return self.async_abort(reason="not_xiaomi_miio")
async def async_step_gateway(self, user_input=None):
"""Handle a flow initialized by the user to configure a gateway."""
errors = {}
if user_input is not None:
token = user_input[CONF_TOKEN]
if user_input.get(CONF_HOST):
self.host = user_input[CONF_HOST]
# Try to connect to a Xiaomi Gateway.
connect_gateway_class = ConnectXiaomiGateway(self.hass)
await connect_gateway_class.async_connect_gateway(self.host, token)
gateway_info = connect_gateway_class.gateway_info
if gateway_info is not None:
mac = format_mac(gateway_info.mac_address)
unique_id = mac
await self.async_set_unique_id(unique_id)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=user_input[CONF_NAME],
data={
CONF_FLOW_TYPE: CONF_GATEWAY,
CONF_HOST: self.host,
CONF_TOKEN: token,
"model": gateway_info.model,
"mac": mac,
},
)
errors["base"] = "cannot_connect"
if self.host:
schema = vol.Schema(GATEWAY_SETTINGS)
else:
schema = GATEWAY_CONFIG
return self.async_show_form(
step_id="gateway", data_schema=schema, errors=errors
)
|
import json
import sys
import m2r
def write_docstring(fout):
fout.write('''r"""
Autogenerated docstring
=======================
Please replace me.
"""
''')
def process_markdown(source, fout):
def gen():
for markdown_line in source:
rst_lines = m2r.convert(markdown_line).split('\n')
skip_flag = True
for line in rst_lines:
if line == '' and skip_flag and False:
#
# Suppress empty lines at the start of each section, they
# are not needed.
#
continue
yield line
skip_flag = bool(line)
for line in gen():
fout.write('# %s\n' % line)
def output_cell(cell, fout):
if cell['cell_type'] == 'code':
for line in cell['source']:
fout.write(line.replace('%time ', ''))
elif cell['cell_type'] == 'markdown':
fout.write('#' * 79 + '\n')
process_markdown(cell['source'], fout)
fout.write('\n\n')
def main():
write_docstring(sys.stdout)
notebook = json.load(sys.stdin)
for cell in notebook['cells']:
output_cell(cell, sys.stdout)
if __name__ == '__main__':
main()
|
import logging
from typing import Dict, Optional
import voluptuous as vol
from homeassistant.const import (
ATTR_EDITABLE,
CONF_ICON,
CONF_ID,
CONF_MAXIMUM,
CONF_MINIMUM,
CONF_NAME,
)
from homeassistant.core import callback
from homeassistant.helpers import collection
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.helpers.storage import Store
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
_LOGGER = logging.getLogger(__name__)
ATTR_INITIAL = "initial"
ATTR_STEP = "step"
ATTR_MINIMUM = "minimum"
ATTR_MAXIMUM = "maximum"
VALUE = "value"
CONF_INITIAL = "initial"
CONF_RESTORE = "restore"
CONF_STEP = "step"
DEFAULT_INITIAL = 0
DEFAULT_STEP = 1
DOMAIN = "counter"
ENTITY_ID_FORMAT = DOMAIN + ".{}"
SERVICE_DECREMENT = "decrement"
SERVICE_INCREMENT = "increment"
SERVICE_RESET = "reset"
SERVICE_CONFIGURE = "configure"
STORAGE_KEY = DOMAIN
STORAGE_VERSION = 1
CREATE_FIELDS = {
vol.Optional(CONF_ICON): cv.icon,
vol.Optional(CONF_INITIAL, default=DEFAULT_INITIAL): cv.positive_int,
vol.Required(CONF_NAME): vol.All(cv.string, vol.Length(min=1)),
vol.Optional(CONF_MAXIMUM, default=None): vol.Any(None, vol.Coerce(int)),
vol.Optional(CONF_MINIMUM, default=None): vol.Any(None, vol.Coerce(int)),
vol.Optional(CONF_RESTORE, default=True): cv.boolean,
vol.Optional(CONF_STEP, default=DEFAULT_STEP): cv.positive_int,
}
UPDATE_FIELDS = {
vol.Optional(CONF_ICON): cv.icon,
vol.Optional(CONF_INITIAL): cv.positive_int,
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_MAXIMUM): vol.Any(None, vol.Coerce(int)),
vol.Optional(CONF_MINIMUM): vol.Any(None, vol.Coerce(int)),
vol.Optional(CONF_RESTORE): cv.boolean,
vol.Optional(CONF_STEP): cv.positive_int,
}
def _none_to_empty_dict(value):
if value is None:
return {}
return value
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: cv.schema_with_slug_keys(
vol.All(
_none_to_empty_dict,
{
vol.Optional(CONF_ICON): cv.icon,
vol.Optional(
CONF_INITIAL, default=DEFAULT_INITIAL
): cv.positive_int,
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_MAXIMUM, default=None): vol.Any(
None, vol.Coerce(int)
),
vol.Optional(CONF_MINIMUM, default=None): vol.Any(
None, vol.Coerce(int)
),
vol.Optional(CONF_RESTORE, default=True): cv.boolean,
vol.Optional(CONF_STEP, default=DEFAULT_STEP): cv.positive_int,
},
)
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool:
"""Set up the counters."""
component = EntityComponent(_LOGGER, DOMAIN, hass)
id_manager = collection.IDManager()
yaml_collection = collection.YamlCollection(
logging.getLogger(f"{__name__}.yaml_collection"), id_manager
)
collection.attach_entity_component_collection(
component, yaml_collection, Counter.from_yaml
)
storage_collection = CounterStorageCollection(
Store(hass, STORAGE_VERSION, STORAGE_KEY),
logging.getLogger(f"{__name__}.storage_collection"),
id_manager,
)
collection.attach_entity_component_collection(
component, storage_collection, Counter
)
await yaml_collection.async_load(
[{CONF_ID: id_, **(conf or {})} for id_, conf in config.get(DOMAIN, {}).items()]
)
await storage_collection.async_load()
collection.StorageCollectionWebsocket(
storage_collection, DOMAIN, DOMAIN, CREATE_FIELDS, UPDATE_FIELDS
).async_setup(hass)
collection.attach_entity_registry_cleaner(hass, DOMAIN, DOMAIN, yaml_collection)
collection.attach_entity_registry_cleaner(hass, DOMAIN, DOMAIN, storage_collection)
component.async_register_entity_service(SERVICE_INCREMENT, {}, "async_increment")
component.async_register_entity_service(SERVICE_DECREMENT, {}, "async_decrement")
component.async_register_entity_service(SERVICE_RESET, {}, "async_reset")
component.async_register_entity_service(
SERVICE_CONFIGURE,
{
vol.Optional(ATTR_MINIMUM): vol.Any(None, vol.Coerce(int)),
vol.Optional(ATTR_MAXIMUM): vol.Any(None, vol.Coerce(int)),
vol.Optional(ATTR_STEP): cv.positive_int,
vol.Optional(ATTR_INITIAL): cv.positive_int,
vol.Optional(VALUE): cv.positive_int,
},
"async_configure",
)
return True
class CounterStorageCollection(collection.StorageCollection):
"""Input storage based collection."""
CREATE_SCHEMA = vol.Schema(CREATE_FIELDS)
UPDATE_SCHEMA = vol.Schema(UPDATE_FIELDS)
async def _process_create_data(self, data: Dict) -> Dict:
"""Validate the config is valid."""
return self.CREATE_SCHEMA(data)
@callback
def _get_suggested_id(self, info: Dict) -> str:
"""Suggest an ID based on the config."""
return info[CONF_NAME]
async def _update_data(self, data: dict, update_data: Dict) -> Dict:
"""Return a new updated data object."""
update_data = self.UPDATE_SCHEMA(update_data)
return {**data, **update_data}
class Counter(RestoreEntity):
"""Representation of a counter."""
def __init__(self, config: Dict):
"""Initialize a counter."""
self._config: Dict = config
self._state: Optional[int] = config[CONF_INITIAL]
self.editable: bool = True
@classmethod
def from_yaml(cls, config: Dict) -> "Counter":
"""Create counter instance from yaml config."""
counter = cls(config)
counter.editable = False
counter.entity_id = ENTITY_ID_FORMAT.format(config[CONF_ID])
return counter
@property
def should_poll(self) -> bool:
"""If entity should be polled."""
return False
@property
def name(self) -> Optional[str]:
"""Return name of the counter."""
return self._config.get(CONF_NAME)
@property
def icon(self) -> Optional[str]:
"""Return the icon to be used for this entity."""
return self._config.get(CONF_ICON)
@property
def state(self) -> Optional[int]:
"""Return the current value of the counter."""
return self._state
@property
def state_attributes(self) -> Dict:
"""Return the state attributes."""
ret = {
ATTR_EDITABLE: self.editable,
ATTR_INITIAL: self._config[CONF_INITIAL],
ATTR_STEP: self._config[CONF_STEP],
}
if self._config[CONF_MINIMUM] is not None:
ret[CONF_MINIMUM] = self._config[CONF_MINIMUM]
if self._config[CONF_MAXIMUM] is not None:
ret[CONF_MAXIMUM] = self._config[CONF_MAXIMUM]
return ret
@property
def unique_id(self) -> Optional[str]:
"""Return unique id of the entity."""
return self._config[CONF_ID]
def compute_next_state(self, state) -> int:
"""Keep the state within the range of min/max values."""
if self._config[CONF_MINIMUM] is not None:
state = max(self._config[CONF_MINIMUM], state)
if self._config[CONF_MAXIMUM] is not None:
state = min(self._config[CONF_MAXIMUM], state)
return state
async def async_added_to_hass(self) -> None:
"""Call when entity about to be added to Home Assistant."""
await super().async_added_to_hass()
# __init__ will set self._state to self._initial, only override
# if needed.
if self._config[CONF_RESTORE]:
state = await self.async_get_last_state()
if state is not None:
self._state = self.compute_next_state(int(state.state))
self._config[CONF_INITIAL] = state.attributes.get(ATTR_INITIAL)
self._config[CONF_MAXIMUM] = state.attributes.get(ATTR_MAXIMUM)
self._config[CONF_MINIMUM] = state.attributes.get(ATTR_MINIMUM)
self._config[CONF_STEP] = state.attributes.get(ATTR_STEP)
@callback
def async_decrement(self) -> None:
"""Decrement the counter."""
self._state = self.compute_next_state(self._state - self._config[CONF_STEP])
self.async_write_ha_state()
@callback
def async_increment(self) -> None:
"""Increment a counter."""
self._state = self.compute_next_state(self._state + self._config[CONF_STEP])
self.async_write_ha_state()
@callback
def async_reset(self) -> None:
"""Reset a counter."""
self._state = self.compute_next_state(self._config[CONF_INITIAL])
self.async_write_ha_state()
@callback
def async_configure(self, **kwargs) -> None:
"""Change the counter's settings with a service."""
new_state = kwargs.pop(VALUE, self._state)
self._config = {**self._config, **kwargs}
self._state = self.compute_next_state(new_state)
self.async_write_ha_state()
async def async_update_config(self, config: Dict) -> None:
"""Change the counter's settings WS CRUD."""
self._config = config
self._state = self.compute_next_state(self._state)
self.async_write_ha_state()
|
import re
from django.http import Http404
from siphashc import siphash
from weblate.utils.docs import get_doc_url
class Check:
"""Basic class for checks."""
check_id = ""
name = ""
description = ""
target = False
source = False
ignore_untranslated = True
default_disabled = False
propagates = False
param_type = None
always_display = False
def get_identifier(self):
return self.check_id
def __init__(self):
id_dash = self.check_id.replace("_", "-")
self.url_id = f"check:{self.check_id}"
self.doc_id = f"check-{id_dash}"
self.enable_string = id_dash
self.ignore_string = f"ignore-{id_dash}"
def should_skip(self, unit):
"""Check whether we should skip processing this unit."""
# Is this disabled by default
if self.default_disabled and self.enable_string not in unit.all_flags:
return True
# Is this check ignored
if self.ignore_string in unit.all_flags:
return True
return False
def should_display(self, unit):
"""Display the check always, not only when failing."""
if self.ignore_untranslated and not unit.state:
return False
if self.should_skip(unit):
return False
# Display if enabled and the check is not triggered
return self.always_display and self.check_id not in unit.all_checks_names
def check_target(self, sources, targets, unit):
"""Check target strings."""
# No checking of not translated units (but we do check needs editing ones)
if self.ignore_untranslated and not unit.state:
return False
if self.should_skip(unit):
return False
if self.check_id in unit.check_cache:
return unit.check_cache[self.check_id]
unit.check_cache[self.check_id] = result = self.check_target_unit(
sources, targets, unit
)
return result
def check_target_unit(self, sources, targets, unit):
"""Check single unit, handling plurals."""
# Check singular
if self.check_single(sources[0], targets[0], unit):
return True
# Do we have more to check?
if len(sources) == 1:
return False
# Check plurals against plural from source
for target in targets[1:]:
if self.check_single(sources[1], target, unit):
return True
# Check did not fire
return False
def check_single(self, source, target, unit):
"""Check for single phrase, not dealing with plurals."""
raise NotImplementedError()
def check_source(self, source, unit):
"""Check source strings."""
if self.should_skip(unit):
return False
return self.check_source_unit(source, unit)
def check_source_unit(self, source, unit):
"""Check source string."""
raise NotImplementedError()
def check_chars(self, source, target, pos, chars):
"""Generic checker for chars presence."""
try:
src = source[pos]
tgt = target[pos]
except IndexError:
return False
return (src in chars) != (tgt in chars)
def is_language(self, unit, vals):
"""Detect whether language is in given list, ignores variants."""
return unit.translation.language.base_code in vals
def get_doc_url(self, user=None):
"""Return link to documentation."""
return get_doc_url("user/checks", self.doc_id, user=user)
def check_highlight(self, source, unit):
"""Return parts of the text that match to hightlight them.
Result is list that contains lists of two elements with start position of the
match and the value of the match
"""
return []
def get_description(self, check_obj):
return self.description
def get_fixup(self, unit):
return None
def render(self, request, unit):
raise Http404("Not supported")
def get_cache_key(self, unit, pos):
return "check:{}:{}:{}:{}".format(
self.check_id,
unit.pk,
siphash("Weblate Checks", unit.all_flags.format()),
pos,
)
def get_replacement_function(self, unit):
flags = unit.all_flags
if not flags.has_value("replacements"):
return lambda text: text
# Parse the flag
replacements = flags.get_value("replacements")
# Create dict from that
replacements = dict(
replacements[pos : pos + 2] for pos in range(0, len(replacements), 2)
)
# Build regexp matcher
pattern = re.compile("|".join(re.escape(key) for key in replacements.keys()))
return lambda text: pattern.sub(lambda m: replacements[m.group(0)], text)
class TargetCheck(Check):
"""Basic class for target checks."""
target = True
def check_source_unit(self, source, unit):
"""We don't check source strings here."""
return False
def check_single(self, source, target, unit):
"""Check for single phrase, not dealing with plurals."""
raise NotImplementedError()
class SourceCheck(Check):
"""Basic class for source checks."""
source = True
def check_single(self, source, target, unit):
"""We don't check target strings here."""
return False
def check_source_unit(self, source, unit):
"""Check source string."""
raise NotImplementedError()
class TargetCheckParametrized(Check):
"""Basic class for target checks with flag value."""
default_disabled = True
target = True
def get_value(self, unit):
return unit.all_flags.get_value(self.enable_string)
def has_value(self, unit):
return unit.all_flags.has_value(self.enable_string)
def check_target_unit(self, sources, targets, unit):
"""Check flag value."""
if unit.all_flags.has_value(self.enable_string):
return self.check_target_params(
sources, targets, unit, self.get_value(unit)
)
return False
def check_target_params(self, sources, targets, unit, value):
raise NotImplementedError()
def check_single(self, source, target, unit):
"""We don't check single phrase here."""
return False
def check_source_unit(self, source, unit):
"""We don't check source strings here."""
return False
class CountingCheck(TargetCheck):
"""Check whether there is same count of given string."""
string = ""
def check_single(self, source, target, unit):
if not target or not source:
return False
return source.count(self.string) != target.count(self.string)
|
import os
from distutils import log
from distutils.command.build import build
from distutils.core import Command
from distutils.dep_util import newer
from glob import glob
from itertools import chain
from setuptools import find_packages, setup
from setuptools.command.build_py import build_py
from translate.tools.pocompile import convertmo
LOCALE_MASKS = [
"weblate/locale/*/LC_MESSAGES/*.po",
]
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
with open("README.rst") as readme:
README = readme.read()
with open("requirements.txt") as requirements:
REQUIRES = requirements.read().splitlines()
EXTRAS = {"all": []}
with open("requirements-optional.txt") as requirements:
section = None
for line in requirements:
line = line.strip()
if line.startswith("-r") or not line:
continue
if line.startswith("#"):
section = line[2:]
else:
dep = line.split(";")[0].strip()
EXTRAS[section] = dep
if section != "MySQL":
EXTRAS["all"].append(dep)
class WeblateBuildPy(build_py):
def find_package_modules(self, package, package_dir):
"""Filter settings.py from built module."""
result = super().find_package_modules(package, package_dir)
return [item for item in result if item[2] != "weblate/settings.py"]
class BuildMo(Command):
description = "update MO files to match PO"
user_options = []
def initialize_options(self):
self.build_base = None
def finalize_options(self):
self.set_undefined_options("build", ("build_base", "build_base"))
def run(self):
for name in chain.from_iterable(glob(mask) for mask in LOCALE_MASKS):
output = os.path.splitext(name)[0] + ".mo"
if not newer(name, output):
continue
self.announce(f"compiling {name} -> {output}", level=log.INFO)
with open(name, "rb") as pofile, open(output, "wb") as mofile:
convertmo(pofile, mofile, None)
class WeblateBuild(build):
"""Override the default build with new subcommands."""
# The build_mo has to be before build_data
sub_commands = [("build_mo", lambda self: True)] + build.sub_commands
setup(
name="Weblate",
version="4.4.1",
python_requires=">=3.6",
packages=find_packages(),
include_package_data=True,
description=(
"A web-based continuous localization system with "
"tight version control integration"
),
long_description=README,
long_description_content_type="text/x-rst",
license="GPLv3+",
keywords="i18n l10n gettext git mercurial translate",
url="https://weblate.org/",
download_url="https://weblate.org/download/",
project_urls={
"Issue Tracker": "https://github.com/WeblateOrg/weblate/issues",
"Documentation": "https://docs.weblate.org/",
"Source Code": "https://github.com/WeblateOrg/weblate",
"Twitter": "https://twitter.com/WeblateOrg",
},
author="Michal Čihař",
author_email="[email protected]",
install_requires=REQUIRES,
zip_safe=False,
extras_require=EXTRAS,
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Topic :: Software Development :: Internationalization",
"Topic :: Software Development :: Localization",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
],
entry_points={"console_scripts": ["weblate = weblate.runner:main"]},
cmdclass={"build_py": WeblateBuildPy, "build_mo": BuildMo, "build": WeblateBuild},
)
|
import re
from django.utils.html import escape
from django.utils.safestring import mark_safe
from django.utils.translation import gettext_lazy as _
from weblate.checks.base import TargetCheck
from weblate.checks.data import NON_WORD_CHARS
# Regexp for non word chars
NON_WORD = re.compile("[{}\\]]+".format("".join(NON_WORD_CHARS)))
# Per language ignore list
IGNORES = {
"fr": {"vous", "nous"},
"hi": {"कर"},
"tr": {"tek"},
"sq": {"të"},
}
class DuplicateCheck(TargetCheck):
"""Check for duplicated tokens."""
check_id = "duplicate"
name = _("Consecutive duplicated words")
description = _("Text contains the same word twice in a row:")
def extract_groups(self, text: str, language_code: str):
previous = None
group = 1
groups = []
words = []
ignored = IGNORES.get(language_code, {})
for word in NON_WORD.split(text):
if not word:
continue
if word not in ignored and len(word) >= 2:
if previous == word:
group += 1
elif group > 1:
groups.append(group)
words.append(previous)
group = 1
previous = word
if group > 1:
groups.append(group)
words.append(previous)
return groups, words
def check_single(self, source, target, unit):
source_code = unit.translation.component.source_language.base_code
lang_code = unit.translation.language.base_code
source_groups, source_words = self.extract_groups(source, source_code)
target_groups, target_words = self.extract_groups(target, lang_code)
# The same groups in source and target
if source_groups == target_groups:
return {}
return set(target_words) - set(source_words)
def get_description(self, check_obj):
duplicate = set()
unit = check_obj.unit
source = unit.source_string
for target in unit.get_target_plurals():
duplicate.update(self.check_single(source, target, unit))
return mark_safe(
"{} {}".format(
escape(self.description), escape(", ".join(sorted(duplicate)))
)
)
|
from typing import cast, TYPE_CHECKING, Iterator, Optional, Set, Union
import collections.abc
from PyQt5.QtCore import QUrl, Qt, QEvent, QTimer, QRect, QPoint
from PyQt5.QtGui import QMouseEvent
from qutebrowser.config import config
from qutebrowser.keyinput import modeman
from qutebrowser.utils import log, usertypes, utils, qtutils, objreg
if TYPE_CHECKING:
from qutebrowser.browser import browsertab
JsValueType = Union[int, float, str, None]
class Error(Exception):
"""Base class for WebElement errors."""
class OrphanedError(Error):
"""Raised when a webelement's parent has vanished."""
def css_selector(group: str, url: QUrl) -> str:
"""Get a CSS selector for the given group/URL."""
selectors = config.instance.get('hints.selectors', url)
if group not in selectors:
selectors = config.val.hints.selectors
if group not in selectors:
raise Error("Undefined hinting group {!r}".format(group))
return ','.join(selectors[group])
class AbstractWebElement(collections.abc.MutableMapping):
"""A wrapper around QtWebKit/QtWebEngine web element."""
def __init__(self, tab: 'browsertab.AbstractTab') -> None:
self._tab = tab
def __eq__(self, other: object) -> bool:
raise NotImplementedError
def __str__(self) -> str:
raise NotImplementedError
def __getitem__(self, key: str) -> str:
raise NotImplementedError
def __setitem__(self, key: str, val: str) -> None:
raise NotImplementedError
def __delitem__(self, key: str) -> None:
raise NotImplementedError
def __iter__(self) -> Iterator[str]:
raise NotImplementedError
def __len__(self) -> int:
raise NotImplementedError
def __repr__(self) -> str:
try:
html: Optional[str] = utils.compact_text(self.outer_xml(), 500)
except Error:
html = None
return utils.get_repr(self, html=html)
def has_frame(self) -> bool:
"""Check if this element has a valid frame attached."""
raise NotImplementedError
def geometry(self) -> QRect:
"""Get the geometry for this element."""
raise NotImplementedError
def classes(self) -> Set[str]:
"""Get a set of classes assigned to this element."""
raise NotImplementedError
def tag_name(self) -> str:
"""Get the tag name of this element.
The returned name will always be lower-case.
"""
raise NotImplementedError
def outer_xml(self) -> str:
"""Get the full HTML representation of this element."""
raise NotImplementedError
def value(self) -> JsValueType:
"""Get the value attribute for this element, or None."""
raise NotImplementedError
def set_value(self, value: JsValueType) -> None:
"""Set the element value."""
raise NotImplementedError
def dispatch_event(self, event: str,
bubbles: bool = False,
cancelable: bool = False,
composed: bool = False) -> None:
"""Dispatch an event to the element.
Args:
bubbles: Whether this event should bubble.
cancelable: Whether this event can be cancelled.
composed: Whether the event will trigger listeners outside of a
shadow root.
"""
raise NotImplementedError
def insert_text(self, text: str) -> None:
"""Insert the given text into the element."""
raise NotImplementedError
def rect_on_view(self, *, elem_geometry: QRect = None,
no_js: bool = False) -> QRect:
"""Get the geometry of the element relative to the webview.
Args:
elem_geometry: The geometry of the element, or None.
no_js: Fall back to the Python implementation.
"""
raise NotImplementedError
def is_writable(self) -> bool:
"""Check whether an element is writable."""
return not ('disabled' in self or 'readonly' in self)
def is_content_editable(self) -> bool:
"""Check if an element has a contenteditable attribute.
Args:
elem: The QWebElement to check.
Return:
True if the element has a contenteditable attribute,
False otherwise.
"""
try:
return self['contenteditable'].lower() not in ['false', 'inherit']
except KeyError:
return False
def is_content_editable_prop(self) -> bool:
"""Get the value of this element's isContentEditable property.
The is_content_editable() method above checks for the "contenteditable"
HTML attribute, which does not handle inheritance. However, the actual
attribute value is still needed for certain cases (like strict=True).
This instead gets the isContentEditable JS property, which handles
inheritance.
"""
raise NotImplementedError
def _is_editable_object(self) -> bool:
"""Check if an object-element is editable."""
if 'type' not in self:
log.webelem.debug("<object> without type clicked...")
return False
objtype = self['type'].lower()
if objtype.startswith('application/') or 'classid' in self:
# Let's hope flash/java stuff has an application/* mimetype OR
# at least a classid attribute. Oh, and let's hope images/...
# DON'T have a classid attribute. HTML sucks.
log.webelem.debug("<object type='{}'> clicked.".format(objtype))
return config.val.input.insert_mode.plugins
else:
# Image/Audio/...
return False
def _is_editable_input(self) -> bool:
"""Check if an input-element is editable.
Return:
True if the element is editable, False otherwise.
"""
try:
objtype = self['type'].lower()
except KeyError:
return self.is_writable()
else:
if objtype in ['text', 'email', 'url', 'tel', 'number', 'password',
'search', 'date', 'time', 'datetime',
'datetime-local', 'month', 'week']:
return self.is_writable()
else:
return False
def _is_editable_classes(self) -> bool:
"""Check if an element is editable based on its classes.
Return:
True if the element is editable, False otherwise.
"""
# Beginnings of div-classes which are actually some kind of editor.
classes = {
'div': ['CodeMirror', # Javascript editor over a textarea
'kix-', # Google Docs editor
'ace_'], # http://ace.c9.io/
'pre': ['CodeMirror'],
'span': ['cm-'], # Jupyter Notebook
}
relevant_classes = classes[self.tag_name()]
for klass in self.classes():
if any(klass.strip().startswith(e) for e in relevant_classes):
return True
return False
def is_editable(self, strict: bool = False) -> bool:
"""Check whether we should switch to insert mode for this element.
Args:
strict: Whether to do stricter checking so only fields where we can
get the value match, for use with the :editor command.
Return:
True if we should switch to insert mode, False otherwise.
"""
roles = ('combobox', 'textbox')
log.webelem.debug("Checking if element is editable: {}".format(
repr(self)))
tag = self.tag_name()
if self.is_content_editable() and self.is_writable():
return True
elif self.get('role', None) in roles and self.is_writable():
return True
elif tag == 'input':
return self._is_editable_input()
elif tag == 'textarea':
return self.is_writable()
elif tag in ['embed', 'applet']:
# Flash/Java/...
return config.val.input.insert_mode.plugins and not strict
elif (not strict and self.is_content_editable_prop() and
self.is_writable()):
return True
elif tag == 'object':
return self._is_editable_object() and not strict
elif tag in ['div', 'pre', 'span']:
return self._is_editable_classes() and not strict
return False
def is_text_input(self) -> bool:
"""Check if this element is some kind of text box."""
roles = ('combobox', 'textbox')
tag = self.tag_name()
return self.get('role', None) in roles or tag in ['input', 'textarea']
def remove_blank_target(self) -> None:
"""Remove target from link."""
raise NotImplementedError
def resolve_url(self, baseurl: QUrl) -> Optional[QUrl]:
"""Resolve the URL in the element's src/href attribute.
Args:
baseurl: The URL to base relative URLs on as QUrl.
Return:
A QUrl with the absolute URL, or None.
"""
if baseurl.isRelative():
raise ValueError("Need an absolute base URL!")
for attr in ['href', 'src']:
if attr in self:
text = self[attr].strip()
break
else:
return None
url = QUrl(text)
if not url.isValid():
return None
if url.isRelative():
url = baseurl.resolved(url)
qtutils.ensure_valid(url)
return url
def is_link(self) -> bool:
"""Return True if this AbstractWebElement is a link."""
href_tags = ['a', 'area', 'link']
return self.tag_name() in href_tags and 'href' in self
def _requires_user_interaction(self) -> bool:
"""Return True if clicking this element needs user interaction."""
raise NotImplementedError
def _mouse_pos(self) -> QPoint:
"""Get the position to click/hover."""
# Click the center of the largest square fitting into the top/left
# corner of the rectangle, this will help if part of the <a> element
# is hidden behind other elements
# https://github.com/qutebrowser/qutebrowser/issues/1005
rect = self.rect_on_view()
if rect.width() > rect.height():
rect.setWidth(rect.height())
else:
rect.setHeight(rect.width())
pos = rect.center()
if pos.x() < 0 or pos.y() < 0:
raise Error("Element position is out of view!")
return pos
def _move_text_cursor(self) -> None:
"""Move cursor to end after clicking."""
raise NotImplementedError
def _click_fake_event(self, click_target: usertypes.ClickTarget,
button: Qt.MouseButton = Qt.LeftButton) -> None:
"""Send a fake click event to the element."""
pos = self._mouse_pos()
log.webelem.debug("Sending fake click to {!r} at position {} with "
"target {}".format(self, pos, click_target))
target_modifiers = {
usertypes.ClickTarget.normal: Qt.NoModifier,
usertypes.ClickTarget.window: Qt.AltModifier | Qt.ShiftModifier,
usertypes.ClickTarget.tab: Qt.ControlModifier,
usertypes.ClickTarget.tab_bg: Qt.ControlModifier,
}
if config.val.tabs.background:
target_modifiers[usertypes.ClickTarget.tab] |= Qt.ShiftModifier
else:
target_modifiers[usertypes.ClickTarget.tab_bg] |= Qt.ShiftModifier
modifiers = cast(Qt.KeyboardModifiers, target_modifiers[click_target])
events = [
QMouseEvent(QEvent.MouseMove, pos, Qt.NoButton, Qt.NoButton, Qt.NoModifier),
QMouseEvent(QEvent.MouseButtonPress, pos, button, button, modifiers),
QMouseEvent(QEvent.MouseButtonRelease, pos, button, Qt.NoButton, modifiers),
]
for evt in events:
self._tab.send_event(evt)
QTimer.singleShot(0, self._move_text_cursor)
def _click_editable(self, click_target: usertypes.ClickTarget) -> None:
"""Fake a click on an editable input field."""
raise NotImplementedError
def _click_js(self, click_target: usertypes.ClickTarget) -> None:
"""Fake a click by using the JS .click() method."""
raise NotImplementedError
def delete(self) -> None:
"""Delete this element from the DOM."""
raise NotImplementedError
def _click_href(self, click_target: usertypes.ClickTarget) -> None:
"""Fake a click on an element with a href by opening the link."""
baseurl = self._tab.url()
url = self.resolve_url(baseurl)
if url is None:
self._click_fake_event(click_target)
return
tabbed_browser = objreg.get('tabbed-browser', scope='window',
window=self._tab.win_id)
if click_target in [usertypes.ClickTarget.tab,
usertypes.ClickTarget.tab_bg]:
background = click_target == usertypes.ClickTarget.tab_bg
tabbed_browser.tabopen(url, background=background)
elif click_target == usertypes.ClickTarget.window:
from qutebrowser.mainwindow import mainwindow
window = mainwindow.MainWindow(private=tabbed_browser.is_private)
window.show()
window.tabbed_browser.tabopen(url)
else:
raise ValueError("Unknown ClickTarget {}".format(click_target))
def click(self, click_target: usertypes.ClickTarget, *,
force_event: bool = False) -> None:
"""Simulate a click on the element.
Args:
click_target: A usertypes.ClickTarget member, what kind of click
to simulate.
force_event: Force generating a fake mouse event.
"""
log.webelem.debug("Clicking {!r} with click_target {}, force_event {}"
.format(self, click_target, force_event))
if force_event:
self._click_fake_event(click_target)
return
if click_target == usertypes.ClickTarget.normal:
if self.is_link() and not self._requires_user_interaction():
log.webelem.debug("Clicking via JS click()")
self._click_js(click_target)
elif self.is_editable(strict=True):
log.webelem.debug("Clicking via JS focus()")
self._click_editable(click_target)
if config.val.input.insert_mode.auto_enter:
modeman.enter(self._tab.win_id, usertypes.KeyMode.insert,
'clicking input')
else:
self._click_fake_event(click_target)
elif click_target in [usertypes.ClickTarget.tab,
usertypes.ClickTarget.tab_bg,
usertypes.ClickTarget.window]:
if self.is_link():
self._click_href(click_target)
else:
self._click_fake_event(click_target)
else:
raise ValueError("Unknown ClickTarget {}".format(click_target))
def hover(self) -> None:
"""Simulate a mouse hover over the element."""
pos = self._mouse_pos()
event = QMouseEvent(QEvent.MouseMove, pos, Qt.NoButton, Qt.NoButton,
Qt.NoModifier)
self._tab.send_event(event)
def right_click(self) -> None:
"""Simulate a right-click on the element."""
self._click_fake_event(usertypes.ClickTarget.normal,
button=Qt.RightButton)
|
import pywemo
from homeassistant import config_entries
from homeassistant.helpers import config_entry_flow
from . import DOMAIN
async def _async_has_devices(hass):
"""Return if there are devices that can be discovered."""
return bool(await hass.async_add_executor_job(pywemo.discover_devices))
config_entry_flow.register_discovery_flow(
DOMAIN, "Wemo", _async_has_devices, config_entries.CONN_CLASS_LOCAL_PUSH
)
|
import re
from functools import reduce
import misaka
from django.db.models import Q
from django.utils.safestring import mark_safe
from weblate.auth.models import User
MENTION_RE = re.compile(r"(@[\w.@+-]+)\b", re.UNICODE)
def get_mention_users(text):
"""Returns IDs of users mentioned in the text."""
matches = MENTION_RE.findall(text)
if not matches:
return User.objects.none()
return User.objects.filter(
reduce(lambda acc, x: acc | Q(username=x[1:]), matches, Q())
)
class WeblateHtmlRenderer(misaka.SaferHtmlRenderer):
def link(self, content, raw_url, title=""):
result = super().link(content, raw_url, title)
return result.replace(' href="', ' rel="ugc" href="')
def check_url(self, url, is_image_src=False):
if url.startswith("/user/"):
return True
return super().check_url(url, is_image_src)
RENDERER = WeblateHtmlRenderer()
MARKDOWN = misaka.Markdown(
RENDERER,
extensions=(
"fenced-code",
"tables",
"autolink",
"space-headers",
"strikethrough",
"superscript",
),
)
def render_markdown(text):
users = {u.username.lower(): u for u in get_mention_users(text)}
parts = MENTION_RE.split(text)
for pos, part in enumerate(parts):
if not part.startswith("@"):
continue
username = part[1:].lower()
if username in users:
user = users[username]
parts[pos] = '**[{}]({} "{}")**'.format(
part, user.get_absolute_url(), user.get_visible_name()
)
text = "".join(parts)
return mark_safe(MARKDOWN(text))
|
import pytest
from rumps._internal import guard_unexpected_errors
class TestGuardUnexpectedErrors(object):
def test_raises(self, capfd):
@guard_unexpected_errors
def callback_func():
raise ValueError('-.-')
callback_func()
captured = capfd.readouterr()
assert not captured.out
assert captured.err.strip().startswith('Traceback (most recent call last):')
assert captured.err.strip().endswith('''ValueError: -.-
The above exception was the direct cause of the following exception:
rumps.exceptions.InternalRumpsError: an unexpected error occurred within an internal callback''')
def test_no_raises(self, capfd):
@guard_unexpected_errors
def callback_func():
return 88 * 2
assert callback_func() == 176
captured = capfd.readouterr()
assert not captured.out
assert not captured.err
|
import re
from django.utils.translation import gettext_lazy as _
from weblate.checks.format import BaseFormatCheck
QT_FORMAT_MATCH = re.compile(
r"""
%( # initial %
L? # optional localized representation of numbers
(?P<ord>\d{1,2}) # variable order, like %1
)""",
re.VERBOSE,
)
QT_PLURAL_MATCH = re.compile(
r"""
%( # initial %
L? # optional localized representation of numbers
(?P<type>n) # plural: %n
)""",
re.VERBOSE,
)
class QtFormatCheck(BaseFormatCheck):
"""Check for Qt format string."""
check_id = "qt_format"
name = _("Qt format")
description = _("Qt format string does not match source")
regexp = QT_FORMAT_MATCH
def is_position_based(self, string):
# everything is numbered
return False
class QtPluralCheck(BaseFormatCheck):
"""Check for Qt plural string."""
check_id = "qt_plural_format"
name = _("Qt plural format")
description = _("Qt plural format string does not match source")
regexp = QT_PLURAL_MATCH
def is_position_based(self, string):
return True
|
from copy import deepcopy
from homeassistant.components.deconz import DOMAIN as DECONZ_DOMAIN
from homeassistant.components.deconz.gateway import get_gateway_from_config_entry
from homeassistant.components.switch import (
DOMAIN as SWITCH_DOMAIN,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
)
from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON
from homeassistant.setup import async_setup_component
from .test_gateway import DECONZ_WEB_REQUEST, setup_deconz_integration
from tests.async_mock import patch
POWER_PLUGS = {
"1": {
"id": "On off switch id",
"name": "On off switch",
"type": "On/Off plug-in unit",
"state": {"on": True, "reachable": True},
"uniqueid": "00:00:00:00:00:00:00:00-00",
},
"2": {
"id": "Smart plug id",
"name": "Smart plug",
"type": "Smart plug",
"state": {"on": False, "reachable": True},
"uniqueid": "00:00:00:00:00:00:00:01-00",
},
"3": {
"id": "Unsupported switch id",
"name": "Unsupported switch",
"type": "Not a switch",
"state": {"reachable": True},
"uniqueid": "00:00:00:00:00:00:00:03-00",
},
"4": {
"id": "On off relay id",
"name": "On off relay",
"state": {"on": True, "reachable": True},
"type": "On/Off light",
"uniqueid": "00:00:00:00:00:00:00:04-00",
},
}
SIRENS = {
"1": {
"id": "Warning device id",
"name": "Warning device",
"type": "Warning device",
"state": {"alert": "lselect", "reachable": True},
"uniqueid": "00:00:00:00:00:00:00:00-00",
},
"2": {
"id": "Unsupported switch id",
"name": "Unsupported switch",
"type": "Not a switch",
"state": {"reachable": True},
"uniqueid": "00:00:00:00:00:00:00:01-00",
},
}
async def test_platform_manually_configured(hass):
"""Test that we do not discover anything or try to set up a gateway."""
assert (
await async_setup_component(
hass, SWITCH_DOMAIN, {"switch": {"platform": DECONZ_DOMAIN}}
)
is True
)
assert DECONZ_DOMAIN not in hass.data
async def test_no_switches(hass):
"""Test that no switch entities are created."""
await setup_deconz_integration(hass)
assert len(hass.states.async_all()) == 0
async def test_power_plugs(hass):
"""Test that all supported switch entities are created."""
data = deepcopy(DECONZ_WEB_REQUEST)
data["lights"] = deepcopy(POWER_PLUGS)
config_entry = await setup_deconz_integration(hass, get_state_response=data)
gateway = get_gateway_from_config_entry(hass, config_entry)
assert len(hass.states.async_all()) == 4
assert hass.states.get("switch.on_off_switch").state == STATE_ON
assert hass.states.get("switch.smart_plug").state == STATE_OFF
assert hass.states.get("switch.on_off_relay").state == STATE_ON
assert hass.states.get("switch.unsupported_switch") is None
state_changed_event = {
"t": "event",
"e": "changed",
"r": "lights",
"id": "1",
"state": {"on": False},
}
gateway.api.event_handler(state_changed_event)
assert hass.states.get("switch.on_off_switch").state == STATE_OFF
# Verify service calls
on_off_switch_device = gateway.api.lights["1"]
# Service turn on power plug
with patch.object(
on_off_switch_device, "_request", return_value=True
) as set_callback:
await hass.services.async_call(
SWITCH_DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.on_off_switch"},
blocking=True,
)
await hass.async_block_till_done()
set_callback.assert_called_with("put", "/lights/1/state", json={"on": True})
# Service turn off power plug
with patch.object(
on_off_switch_device, "_request", return_value=True
) as set_callback:
await hass.services.async_call(
SWITCH_DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: "switch.on_off_switch"},
blocking=True,
)
await hass.async_block_till_done()
set_callback.assert_called_with("put", "/lights/1/state", json={"on": False})
await hass.config_entries.async_unload(config_entry.entry_id)
assert len(hass.states.async_all()) == 0
async def test_sirens(hass):
"""Test that siren entities are created."""
data = deepcopy(DECONZ_WEB_REQUEST)
data["lights"] = deepcopy(SIRENS)
config_entry = await setup_deconz_integration(hass, get_state_response=data)
gateway = get_gateway_from_config_entry(hass, config_entry)
assert len(hass.states.async_all()) == 2
assert hass.states.get("switch.warning_device").state == STATE_ON
assert hass.states.get("switch.unsupported_switch") is None
state_changed_event = {
"t": "event",
"e": "changed",
"r": "lights",
"id": "1",
"state": {"alert": None},
}
gateway.api.event_handler(state_changed_event)
assert hass.states.get("switch.warning_device").state == STATE_OFF
# Verify service calls
warning_device_device = gateway.api.lights["1"]
# Service turn on siren
with patch.object(
warning_device_device, "_request", return_value=True
) as set_callback:
await hass.services.async_call(
SWITCH_DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.warning_device"},
blocking=True,
)
await hass.async_block_till_done()
set_callback.assert_called_with(
"put", "/lights/1/state", json={"alert": "lselect"}
)
# Service turn off siren
with patch.object(
warning_device_device, "_request", return_value=True
) as set_callback:
await hass.services.async_call(
SWITCH_DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: "switch.warning_device"},
blocking=True,
)
await hass.async_block_till_done()
set_callback.assert_called_with(
"put", "/lights/1/state", json={"alert": "none"}
)
await hass.config_entries.async_unload(config_entry.entry_id)
assert len(hass.states.async_all()) == 0
|
import logging
from tesla_powerwall import MeterType
from homeassistant.const import DEVICE_CLASS_BATTERY, DEVICE_CLASS_POWER, PERCENTAGE
from .const import (
ATTR_ENERGY_EXPORTED,
ATTR_ENERGY_IMPORTED,
ATTR_FREQUENCY,
ATTR_INSTANT_AVERAGE_VOLTAGE,
ATTR_IS_ACTIVE,
DOMAIN,
ENERGY_KILO_WATT,
POWERWALL_API_CHARGE,
POWERWALL_API_DEVICE_TYPE,
POWERWALL_API_METERS,
POWERWALL_API_SERIAL_NUMBERS,
POWERWALL_API_SITE_INFO,
POWERWALL_API_STATUS,
POWERWALL_COORDINATOR,
)
from .entity import PowerWallEntity
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the August sensors."""
powerwall_data = hass.data[DOMAIN][config_entry.entry_id]
_LOGGER.debug("Powerwall_data: %s", powerwall_data)
coordinator = powerwall_data[POWERWALL_COORDINATOR]
site_info = powerwall_data[POWERWALL_API_SITE_INFO]
device_type = powerwall_data[POWERWALL_API_DEVICE_TYPE]
status = powerwall_data[POWERWALL_API_STATUS]
powerwalls_serial_numbers = powerwall_data[POWERWALL_API_SERIAL_NUMBERS]
entities = []
for meter in MeterType:
entities.append(
PowerWallEnergySensor(
meter,
coordinator,
site_info,
status,
device_type,
powerwalls_serial_numbers,
)
)
entities.append(
PowerWallChargeSensor(
coordinator, site_info, status, device_type, powerwalls_serial_numbers
)
)
async_add_entities(entities, True)
class PowerWallChargeSensor(PowerWallEntity):
"""Representation of an Powerwall charge sensor."""
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return PERCENTAGE
@property
def name(self):
"""Device Name."""
return "Powerwall Charge"
@property
def device_class(self):
"""Device Class."""
return DEVICE_CLASS_BATTERY
@property
def unique_id(self):
"""Device Uniqueid."""
return f"{self.base_unique_id}_charge"
@property
def state(self):
"""Get the current value in percentage."""
return round(self.coordinator.data[POWERWALL_API_CHARGE])
class PowerWallEnergySensor(PowerWallEntity):
"""Representation of an Powerwall Energy sensor."""
def __init__(
self,
meter: MeterType,
coordinator,
site_info,
status,
device_type,
powerwalls_serial_numbers,
):
"""Initialize the sensor."""
super().__init__(
coordinator, site_info, status, device_type, powerwalls_serial_numbers
)
self._meter = meter
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return ENERGY_KILO_WATT
@property
def name(self):
"""Device Name."""
return f"Powerwall {self._meter.value.title()} Now"
@property
def device_class(self):
"""Device Class."""
return DEVICE_CLASS_POWER
@property
def unique_id(self):
"""Device Uniqueid."""
return f"{self.base_unique_id}_{self._meter.value}_instant_power"
@property
def state(self):
"""Get the current value in kW."""
return (
self.coordinator.data[POWERWALL_API_METERS]
.get_meter(self._meter)
.get_power(precision=3)
)
@property
def device_state_attributes(self):
"""Return the device specific state attributes."""
meter = self.coordinator.data[POWERWALL_API_METERS].get_meter(self._meter)
return {
ATTR_FREQUENCY: round(meter.frequency, 1),
ATTR_ENERGY_EXPORTED: meter.get_energy_exported(),
ATTR_ENERGY_IMPORTED: meter.get_energy_imported(),
ATTR_INSTANT_AVERAGE_VOLTAGE: round(meter.avarage_voltage, 1),
ATTR_IS_ACTIVE: meter.is_active(),
}
|
import os
from blinker import Namespace
from flask import Flask,send_from_directory
from flask_cors import CORS
from flask_mongoengine import MongoEngine
APP_ROOT = os.path.dirname(os.path.abspath(__file__ + "../../"))
app = Flask(__name__)
CORS(app)
# Configurations
try:
env = os.environ['APPLICATION_ENV']
except KeyError as e:
# logging.error('Unknown environment key, defaulting to Development')
env = 'Development'
app.config.from_object('config.%s' % env)
app.config.update(
DEBUG=True,
TESTING=True,
TEMPLATES_AUTO_RELOAD=True)
db = MongoEngine(app)
my_signals = Namespace()
from app.agents.controllers import bots
from app.nlu.controllers import nlu
from app.intents.controllers import intents
from app.train.controllers import train
from app.endpoint.controllers import endpoint
from app.entities.controllers import entities_blueprint
app.register_blueprint(nlu)
app.register_blueprint(intents)
app.register_blueprint(train)
app.register_blueprint(endpoint)
app.register_blueprint(bots)
app.register_blueprint(entities_blueprint)
admin_panel_dist = os.path.join(APP_ROOT, 'frontend/dist/')
@app.route('/<path:path>', methods=['GET'])
def static_proxy(path):
return send_from_directory(admin_panel_dist, path)
@app.route('/')
def root():
print(admin_panel_dist)
return send_from_directory(admin_panel_dist, 'index.html')
@app.errorhandler(404)
def not_found(error):
return "Not found", 404
|
from __future__ import absolute_import
from pyspark.ml.param.shared import Param, Params
class HasKerasModelConfig(Params):
"""Mandatory field:
Parameter mixin for Keras model yaml
"""
def __init__(self):
super(HasKerasModelConfig, self).__init__()
self.keras_model_config = Param(
self, "keras_model_config", "Serialized Keras model as yaml string")
def set_keras_model_config(self, keras_model_config):
self._paramMap[self.keras_model_config] = keras_model_config
return self
def get_keras_model_config(self):
return self.getOrDefault(self.keras_model_config)
class HasMode(Params):
"""Parameter mixin for Elephas mode
"""
def __init__(self):
super(HasMode, self).__init__()
self.mode = Param(self, "mode", "Elephas mode")
self._setDefault(mode='asynchronous')
def set_mode(self, mode):
self._paramMap[self.mode] = mode
return self
def get_mode(self):
return self.getOrDefault(self.mode)
class HasFrequency(Params):
"""Parameter mixin for Elephas frequency
"""
def __init__(self):
super(HasFrequency, self).__init__()
self.frequency = Param(self, "frequency", "Elephas frequency")
self._setDefault(frequency='epoch')
def set_frequency(self, frequency):
self._paramMap[self.frequency] = frequency
return self
def get_frequency(self):
return self.getOrDefault(self.frequency)
class HasNumberOfClasses(Params):
"""Mandatory:
Parameter mixin for number of classes
"""
def __init__(self):
super(HasNumberOfClasses, self).__init__()
self.nb_classes = Param(self, "nb_classes", "number of classes")
self._setDefault(nb_classes=10)
def set_nb_classes(self, nb_classes):
self._paramMap[self.nb_classes] = nb_classes
return self
def get_nb_classes(self):
return self.getOrDefault(self.nb_classes)
class HasCategoricalLabels(Params):
"""Mandatory:
Parameter mixin for setting categorical features
"""
def __init__(self):
super(HasCategoricalLabels, self).__init__()
self.categorical = Param(
self, "categorical", "Boolean to indicate if labels are categorical")
self._setDefault(categorical=True)
def set_categorical_labels(self, categorical):
self._paramMap[self.categorical] = categorical
return self
def get_categorical_labels(self):
return self.getOrDefault(self.categorical)
class HasEpochs(Params):
"""Parameter mixin for number of epochs
"""
def __init__(self):
super(HasEpochs, self).__init__()
self.epochs = Param(self, "epochs", "Number of epochs to train")
self._setDefault(epochs=10)
def set_epochs(self, epochs):
self._paramMap[self.epochs] = epochs
return self
def get_epochs(self):
return self.getOrDefault(self.epochs)
class HasBatchSize(Params):
"""Parameter mixin for batch size
"""
def __init__(self):
super(HasBatchSize, self).__init__()
self.batch_size = Param(self, "batch_size", "Batch size")
self._setDefault(batch_size=32)
def set_batch_size(self, batch_size):
self._paramMap[self.batch_size] = batch_size
return self
def get_batch_size(self):
return self.getOrDefault(self.batch_size)
class HasVerbosity(Params):
"""Parameter mixin for output verbosity
"""
def __init__(self):
super(HasVerbosity, self).__init__()
self.verbose = Param(self, "verbose", "Stdout verbosity")
self._setDefault(verbose=0)
def set_verbosity(self, verbose):
self._paramMap[self.verbose] = verbose
return self
def get_verbosity(self):
return self.getOrDefault(self.verbose)
class HasValidationSplit(Params):
"""Parameter mixin for validation split percentage
"""
def __init__(self):
super(HasValidationSplit, self).__init__()
self.validation_split = Param(
self, "validation_split", "validation split percentage")
self._setDefault(validation_split=0.1)
def set_validation_split(self, validation_split):
self._paramMap[self.validation_split] = validation_split
return self
def get_validation_split(self):
return self.getOrDefault(self.validation_split)
class HasNumberOfWorkers(Params):
"""Parameter mixin for number of workers
"""
def __init__(self):
super(HasNumberOfWorkers, self).__init__()
self.num_workers = Param(self, "num_workers", "number of workers")
self._setDefault(num_workers=8)
def set_num_workers(self, num_workers):
self._paramMap[self.num_workers] = num_workers
return self
def get_num_workers(self):
return self.getOrDefault(self.num_workers)
class HasKerasOptimizerConfig(Params):
"""Parameter mixin for Keras optimizer config
"""
def __init__(self):
super(HasKerasOptimizerConfig, self).__init__()
self.optimizer_config = Param(
self, "optimizer_config", "Serialized Keras optimizer properties")
self._setDefault(optimizer_config=None)
def set_optimizer_config(self, optimizer_config):
self._paramMap[self.optimizer_config] = optimizer_config
return self
def get_optimizer_config(self):
return self.getOrDefault(self.optimizer_config)
class HasMetrics(Params):
"""Parameter mixin for Keras metrics
"""
def __init__(self):
super(HasMetrics, self).__init__()
self.metrics = Param(self, "metrics", "Keras metrics")
self._setDefault(metrics=['acc'])
def set_metrics(self, metrics):
self._paramMap[self.metrics] = metrics
return self
def get_metrics(self):
return self.getOrDefault(self.metrics)
class HasLoss(Params):
"""Parameter mixin for Keras metrics
"""
def __init__(self):
super(HasLoss, self).__init__()
self.loss = Param(self, "loss", "Keras loss")
def set_loss(self, loss):
self._paramMap[self.loss] = loss
return self
def get_loss(self):
return self.getOrDefault(self.loss)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import os
import uuid
from absl import flags
from perfkitbenchmarker import container_service
from perfkitbenchmarker import context
from perfkitbenchmarker import errors
from perfkitbenchmarker import resource
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.providers import aws
from perfkitbenchmarker.providers.aws import aws_load_balancer
from perfkitbenchmarker.providers.aws import aws_logs
from perfkitbenchmarker.providers.aws import aws_network
from perfkitbenchmarker.providers.aws import s3
from perfkitbenchmarker.providers.aws import util
import requests
import six
import yaml
FLAGS = flags.FLAGS
_ECS_NOT_READY = frozenset(['PROVISIONING', 'PENDING'])
class EcrRepository(resource.BaseResource):
"""Class representing an Elastic Container Registry image repository."""
def __init__(self, name, region):
super(EcrRepository, self).__init__()
self.name = name
self.region = region
def _Create(self):
"""Creates the image repository."""
if self._Exists():
self.user_managed = True
return
create_cmd = util.AWS_PREFIX + [
'ecr', 'create-repository',
'--region', self.region,
'--repository-name', self.name
]
_, stderr, retcode = vm_util.IssueCommand(create_cmd,
raise_on_failure=False)
if retcode:
if 'InsufficientInstanceCapacity' in stderr:
raise errors.Benchmarks.InsufficientCapacityCloudFailure(stderr)
if 'InstanceLimitExceeded' in stderr or 'VpcLimitExceeded' in stderr:
raise errors.Benchmarks.QuotaFailure(stderr)
raise errors.Resource.CreationError(
'Failed to create EKS Cluster: {} return code: {}'.format(
retcode, stderr))
def _Exists(self):
"""Returns True if the repository exists."""
describe_cmd = util.AWS_PREFIX + [
'ecr', 'describe-repositories',
'--region', self.region,
'--repository-names', self.name
]
stdout, _, _ = vm_util.IssueCommand(
describe_cmd, suppress_warning=True, raise_on_failure=False)
if not stdout or not json.loads(stdout)['repositories']:
return False
return True
def _Delete(self):
"""Deletes the repository."""
delete_cmd = util.AWS_PREFIX + [
'ecr', 'delete-repository',
'--region', self.region,
'--repository-name', self.name,
'--force'
]
vm_util.IssueCommand(delete_cmd, raise_on_failure=False)
class ElasticContainerRegistry(container_service.BaseContainerRegistry):
"""Class for building and storing container images on AWS."""
CLOUD = aws.CLOUD
def __init__(self, registry_spec):
super(ElasticContainerRegistry, self).__init__(registry_spec)
self.account = self.project or util.GetAccount()
self.region = util.GetRegionFromZone(self.zone.split(',')[0])
self.repositories = []
def _Delete(self):
"""Deletes the repositories."""
for repository in self.repositories:
repository.Delete()
def Push(self, image):
"""Push a locally built image to the registry."""
repository_name = '{namespace}/{name}'.format(
namespace=self.name, name=image.name)
repository = EcrRepository(repository_name, self.region)
self.repositories.append(repository)
repository.Create()
super(ElasticContainerRegistry, self).Push(image)
def GetFullRegistryTag(self, image):
"""Gets the full tag of the image."""
tag = '{account}.dkr.ecr.{region}.amazonaws.com/{namespace}/{name}'.format(
account=self.account, region=self.region, namespace=self.name,
name=image)
return tag
def Login(self):
"""Logs in to the registry."""
get_login_cmd = util.AWS_PREFIX + [
'--region', self.region,
'ecr', 'get-login', '--no-include-email'
]
stdout, _, _ = vm_util.IssueCommand(get_login_cmd)
login_cmd = stdout.split()
vm_util.IssueCommand(login_cmd)
def RemoteBuild(self, image):
"""Build the image remotely."""
# TODO(ehankland) use AWS codebuild to build the image.
raise NotImplementedError()
class TaskDefinition(resource.BaseResource):
"""Class representing an AWS task definition."""
def __init__(self, name, container_spec, cluster):
super(TaskDefinition, self).__init__()
self.name = name
self.cpus = container_spec.cpus
self.memory = container_spec.memory
self.image = container_spec.image
self.container_port = container_spec.container_port
self.region = cluster.region
self.arn = None
self.log_group = aws_logs.LogGroup(self.region, 'pkb')
def _CreateDependencies(self):
"""Create the log group if it doesn't exist."""
if not self.log_group.Exists():
self.log_group.Create()
def _Create(self):
"""Create the task definition."""
register_cmd = util.AWS_PREFIX + [
'--region', self.region,
'ecs', 'register-task-definition',
'--family', self.name,
'--execution-role-arn', 'ecsTaskExecutionRole',
'--network-mode', 'awsvpc',
'--requires-compatibilities=FARGATE',
'--cpu', str(int(1024 * self.cpus)),
'--memory', str(self.memory),
'--container-definitions', self._GetContainerDefinitions()
]
stdout, _, _ = vm_util.IssueCommand(register_cmd)
response = json.loads(stdout)
self.arn = response['taskDefinition']['taskDefinitionArn']
def _Delete(self):
"""Deregister the task definition."""
if self.arn is None:
return
deregister_cmd = util.AWS_PREFIX + [
'--region', self.region,
'ecs', 'deregister-task-definition',
'--task-definition', self.arn
]
vm_util.IssueCommand(deregister_cmd)
def _GetContainerDefinitions(self):
"""Returns a JSON representation of the container definitions."""
definitions = [{
'name': self.name,
'image': self.image,
'essential': True,
'portMappings': [
{
'containerPort': self.container_port,
'protocol': 'TCP'
}
],
'logConfiguration': {
'logDriver': 'awslogs',
'options': {
'awslogs-group': 'pkb',
'awslogs-region': self.region,
'awslogs-stream-prefix': 'pkb'
}
}
}]
return json.dumps(definitions)
class EcsTask(container_service.BaseContainer):
"""Class representing an ECS/Fargate task."""
def __init__(self, name, container_spec, cluster):
super(EcsTask, self).__init__(container_spec)
self.name = name
self.task_def = cluster.task_defs[name]
self.arn = None
self.region = cluster.region
self.cluster_name = cluster.name
self.subnet_id = cluster.network.subnet.id
self.ip_address = None
self.security_group_id = (
cluster.network.regional_network.vpc.default_security_group_id)
def _GetNetworkConfig(self):
network_config = {
'awsvpcConfiguration': {
'subnets': [self.subnet_id],
'securityGroups': [self.security_group_id],
'assignPublicIp': 'ENABLED',
}
}
return json.dumps(network_config)
def _GetOverrides(self):
"""Returns a JSON representaion of task overrides.
While the container level resources can be overridden, they have no
effect on task level resources for Fargate tasks. This means
that modifying a container spec will only affect the command of any
new containers launched from it and not cpu/memory.
"""
overrides = {
'containerOverrides': [
{
'name': self.name,
}
]
}
if self.command:
overrides['containerOverrides'][0]['command'] = self.command
return json.dumps(overrides)
def _Create(self):
"""Creates the task."""
run_cmd = util.AWS_PREFIX + [
'--region', self.region,
'ecs', 'run-task',
'--cluster', self.cluster_name,
'--task-definition', self.task_def.arn,
'--launch-type', 'FARGATE',
'--network-configuration', self._GetNetworkConfig(),
'--overrides', self._GetOverrides()
]
stdout, _, _ = vm_util.IssueCommand(run_cmd)
response = json.loads(stdout)
self.arn = response['tasks'][0]['taskArn']
def _PostCreate(self):
"""Gets the tasks IP address."""
container = self._GetTask()['containers'][0]
self.ip_address = container['networkInterfaces'][0]['privateIpv4Address']
def _DeleteDependencies(self):
"""Delete the task def."""
self.task_def.Delete()
def _Delete(self):
"""Deletes the task."""
if self.arn is None:
return
stop_cmd = util.AWS_PREFIX + [
'--region', self.region,
'ecs', 'stop-task',
'--cluster', self.cluster_name,
'--task', self.arn
]
vm_util.IssueCommand(stop_cmd)
def _GetTask(self):
"""Returns a dictionary representation of the task."""
describe_cmd = util.AWS_PREFIX + [
'--region', self.region,
'ecs', 'describe-tasks',
'--cluster', self.cluster_name,
'--tasks', self.arn
]
stdout, _, _ = vm_util.IssueCommand(describe_cmd)
response = json.loads(stdout)
return response['tasks'][0]
def _IsReady(self):
"""Returns true if the task has stopped pending."""
return self._GetTask()['lastStatus'] not in _ECS_NOT_READY
def WaitForExit(self, timeout=None):
"""Waits until the task has finished running."""
@vm_util.Retry(timeout=timeout)
def _WaitForExit():
status = self._GetTask()['lastStatus']
if status != 'STOPPED':
raise Exception('Task is not STOPPED.')
_WaitForExit()
def GetLogs(self):
"""Returns the logs from the container."""
task_id = self.arn.split('/')[-1]
log_stream = 'pkb/{name}/{task_id}'.format(name=self.name, task_id=task_id)
return six.text_type(
aws_logs.GetLogStreamAsString(self.region, log_stream, 'pkb'))
class EcsService(container_service.BaseContainerService):
"""Class representing an ECS/Fargate service."""
def __init__(self, name, container_spec, cluster):
super(EcsService, self).__init__(container_spec)
self.client_token = str(uuid.uuid4())[:32]
self.name = name
self.task_def = cluster.task_defs[name]
self.arn = None
self.region = cluster.region
self.cluster_name = cluster.name
self.subnet_id = cluster.network.subnet.id
self.security_group_id = (
cluster.network.regional_network.vpc.default_security_group_id)
self.load_balancer = aws_load_balancer.LoadBalancer([
cluster.network.subnet])
self.target_group = aws_load_balancer.TargetGroup(
cluster.network.regional_network.vpc, self.container_port)
self.port = 80
def _CreateDependencies(self):
"""Creates the load balancer for the service."""
self.load_balancer.Create()
self.target_group.Create()
listener = aws_load_balancer.Listener(
self.load_balancer, self.target_group, self.port)
listener.Create()
self.ip_address = self.load_balancer.dns_name
def _DeleteDependencies(self):
"""Deletes the service's load balancer."""
self.task_def.Delete()
self.load_balancer.Delete()
self.target_group.Delete()
# TODO(ferneyhough): Consider supporting the flag container_cluster_version.
def _Create(self):
"""Creates the service."""
create_cmd = util.AWS_PREFIX + [
'--region', self.region,
'ecs', 'create-service',
'--desired-count', '1',
'--client-token', self.client_token,
'--cluster', self.cluster_name,
'--service-name', self.name,
'--task-definition', self.task_def.arn,
'--launch-type', 'FARGATE',
'--network-configuration', self._GetNetworkConfig(),
'--load-balancers', self._GetLoadBalancerConfig(),
]
vm_util.IssueCommand(create_cmd)
def _Delete(self):
"""Deletes the service."""
update_cmd = util.AWS_PREFIX + [
'--region', self.region,
'ecs', 'update-service',
'--cluster', self.cluster_name,
'--service', self.name,
'--desired-count', '0'
]
vm_util.IssueCommand(update_cmd)
delete_cmd = util.AWS_PREFIX + [
'--region', self.region,
'ecs', 'delete-service',
'--cluster', self.cluster_name,
'--service', self.name
]
vm_util.IssueCommand(delete_cmd, raise_on_failure=False)
def _GetNetworkConfig(self):
network_config = {
'awsvpcConfiguration': {
'subnets': [self.subnet_id],
'securityGroups': [self.security_group_id],
'assignPublicIp': 'ENABLED',
}
}
return json.dumps(network_config)
def _GetLoadBalancerConfig(self):
"""Returns the JSON representation of the service load balancers."""
load_balancer_config = [{
'targetGroupArn': self.target_group.arn,
'containerName': self.name,
'containerPort': self.container_port,
}]
return json.dumps(load_balancer_config)
def _IsReady(self):
"""Returns True if the Service is ready."""
url = 'http://%s' % self.ip_address
try:
r = requests.get(url)
except requests.ConnectionError:
return False
if r.status_code == 200:
return True
return False
class FargateCluster(container_service.BaseContainerCluster):
"""Class representing an AWS Fargate cluster."""
CLOUD = aws.CLOUD
CLUSTER_TYPE = 'Fargate'
def __init__(self, cluster_spec):
super(FargateCluster, self).__init__(cluster_spec)
self.region = util.GetRegionFromZone(self.zone)
self.network = aws_network.AwsNetwork.GetNetwork(self)
self.firewall = aws_network.AwsFirewall.GetFirewall()
self.name = 'pkb-%s' % FLAGS.run_uri
self.task_defs = {}
self.arn = None
def _Create(self):
"""Creates the cluster."""
create_cmd = util.AWS_PREFIX + [
'--region', self.region,
'ecs', 'create-cluster',
'--cluster-name', self.name
]
stdout, _, _ = vm_util.IssueCommand(create_cmd)
response = json.loads(stdout)
self.arn = response['cluster']['clusterArn']
def _Exists(self):
"""Returns True if the cluster exists."""
if not self.arn:
return False
describe_cmd = util.AWS_PREFIX + [
'--region', self.region,
'ecs', 'describe-clusters',
'--clusters', self.arn
]
stdout, _, _ = vm_util.IssueCommand(describe_cmd)
response = json.loads(stdout)
clusters = response['clusters']
if not clusters or clusters[0]['status'] == 'INACTIVE':
return False
return True
def _Delete(self):
"""Deletes the cluster."""
delete_cmd = util.AWS_PREFIX + [
'--region', self.region,
'ecs', 'delete-cluster',
'--cluster', self.name
]
vm_util.IssueCommand(delete_cmd, raise_on_failure=False)
def DeployContainer(self, name, container_spec):
"""Deploys the container according to the spec."""
if name not in self.task_defs:
task_def = TaskDefinition(name, container_spec, self)
self.task_defs[name] = task_def
task_def.Create()
task = EcsTask(name, container_spec, self)
self.containers[name].append(task)
task.Create()
def DeployContainerService(self, name, container_spec):
"""Deploys the container service according to the spec."""
if name not in self.task_defs:
task_def = TaskDefinition(name, container_spec, self)
self.task_defs[name] = task_def
task_def.Create()
service = EcsService(name, container_spec, self)
self.services[name] = service
self.firewall.AllowPortInSecurityGroup(
service.region, service.security_group_id, service.container_port)
service.Create()
class AwsKopsCluster(container_service.KubernetesCluster):
"""Class representing a kops based Kubernetes cluster."""
CLOUD = aws.CLOUD
CLUSTER_TYPE = 'kops'
def __init__(self, spec):
super(AwsKopsCluster, self).__init__(spec)
self.name += '.k8s.local'
self.config_bucket = 'kops-%s-%s' % (FLAGS.run_uri, str(uuid.uuid4()))
self.region = util.GetRegionFromZone(self.zone)
self.s3_service = s3.S3Service()
self.s3_service.PrepareService(self.region)
def _CreateDependencies(self):
"""Create the bucket to store cluster config."""
self.s3_service.MakeBucket(self.config_bucket)
def _DeleteDependencies(self):
"""Delete the bucket that stores cluster config."""
self.s3_service.DeleteBucket(self.config_bucket)
def _Create(self):
"""Creates the cluster."""
# Create the cluster spec but don't provision any resources.
create_cmd = [
FLAGS.kops, 'create', 'cluster',
'--name=%s' % self.name,
'--zones=%s' % self.zone,
'--node-count=%s' % self.num_nodes,
'--node-size=%s' % self.machine_type
]
env = os.environ.copy()
env['KUBECONFIG'] = FLAGS.kubeconfig
env['KOPS_STATE_STORE'] = 's3://%s' % self.config_bucket
vm_util.IssueCommand(create_cmd, env=env)
# Download the cluster spec and modify it.
get_cmd = [
FLAGS.kops, 'get', 'cluster', self.name, '--output=yaml'
]
stdout, _, _ = vm_util.IssueCommand(get_cmd, env=env)
spec = yaml.safe_load(stdout)
spec['metadata']['creationTimestamp'] = None
spec['spec']['api']['loadBalancer']['idleTimeoutSeconds'] = 3600
benchmark_spec = context.GetThreadBenchmarkSpec()
spec['spec']['cloudLabels'] = {
'owner': FLAGS.owner,
'perfkitbenchmarker-run': FLAGS.run_uri,
'benchmark': benchmark_spec.name,
'perfkit_uuid': benchmark_spec.uuid,
'benchmark_uid': benchmark_spec.uid
}
# Replace the cluster spec.
with vm_util.NamedTemporaryFile() as tf:
yaml.dump(spec, tf)
tf.close()
replace_cmd = [
FLAGS.kops, 'replace', '--filename=%s' % tf.name
]
vm_util.IssueCommand(replace_cmd, env=env)
# Create the actual cluster.
update_cmd = [
FLAGS.kops, 'update', 'cluster', self.name, '--yes'
]
vm_util.IssueCommand(update_cmd, env=env)
def _Delete(self):
"""Deletes the cluster."""
delete_cmd = [
FLAGS.kops, 'delete', 'cluster',
'--name=%s' % self.name,
'--state=s3://%s' % self.config_bucket,
'--yes'
]
vm_util.IssueCommand(delete_cmd, raise_on_failure=False)
def _IsReady(self):
"""Returns True if the cluster is ready, else False."""
validate_cmd = [
FLAGS.kops, 'validate', 'cluster',
'--name=%s' % self.name,
'--state=s3://%s' % self.config_bucket
]
env = os.environ.copy()
env['KUBECONFIG'] = FLAGS.kubeconfig
_, _, retcode = vm_util.IssueCommand(validate_cmd, env=env,
suppress_warning=True,
raise_on_failure=False)
return not retcode
|
from datetime import timedelta
import logging
from homeassistant import setup
from homeassistant.components import binary_sensor
from homeassistant.const import (
ATTR_DEVICE_CLASS,
EVENT_HOMEASSISTANT_START,
STATE_OFF,
STATE_ON,
STATE_UNAVAILABLE,
)
from homeassistant.core import CoreState
import homeassistant.util.dt as dt_util
from tests.async_mock import patch
from tests.common import assert_setup_component, async_fire_time_changed
async def test_setup(hass):
"""Test the setup."""
config = {
"binary_sensor": {
"platform": "template",
"sensors": {
"test": {
"friendly_name": "virtual thingy",
"value_template": "{{ foo }}",
"device_class": "motion",
}
},
}
}
with assert_setup_component(1):
assert await setup.async_setup_component(hass, binary_sensor.DOMAIN, config)
async def test_setup_no_sensors(hass):
"""Test setup with no sensors."""
with assert_setup_component(0):
assert await setup.async_setup_component(
hass, binary_sensor.DOMAIN, {"binary_sensor": {"platform": "template"}}
)
async def test_setup_invalid_device(hass):
"""Test the setup with invalid devices."""
with assert_setup_component(0):
assert await setup.async_setup_component(
hass,
binary_sensor.DOMAIN,
{"binary_sensor": {"platform": "template", "sensors": {"foo bar": {}}}},
)
async def test_setup_invalid_device_class(hass):
"""Test setup with invalid sensor class."""
with assert_setup_component(0):
assert await setup.async_setup_component(
hass,
binary_sensor.DOMAIN,
{
"binary_sensor": {
"platform": "template",
"sensors": {
"test": {
"value_template": "{{ foo }}",
"device_class": "foobarnotreal",
}
},
}
},
)
async def test_setup_invalid_missing_template(hass):
"""Test setup with invalid and missing template."""
with assert_setup_component(0):
assert await setup.async_setup_component(
hass,
binary_sensor.DOMAIN,
{
"binary_sensor": {
"platform": "template",
"sensors": {"test": {"device_class": "motion"}},
}
},
)
async def test_icon_template(hass):
"""Test icon template."""
with assert_setup_component(1):
assert await setup.async_setup_component(
hass,
binary_sensor.DOMAIN,
{
"binary_sensor": {
"platform": "template",
"sensors": {
"test_template_sensor": {
"value_template": "{{ states.sensor.xyz.state }}",
"icon_template": "{% if "
"states.binary_sensor.test_state.state == "
"'Works' %}"
"mdi:check"
"{% endif %}",
}
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test_template_sensor")
assert state.attributes.get("icon") == ""
hass.states.async_set("binary_sensor.test_state", "Works")
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test_template_sensor")
assert state.attributes["icon"] == "mdi:check"
async def test_entity_picture_template(hass):
"""Test entity_picture template."""
with assert_setup_component(1):
assert await setup.async_setup_component(
hass,
binary_sensor.DOMAIN,
{
"binary_sensor": {
"platform": "template",
"sensors": {
"test_template_sensor": {
"value_template": "{{ states.sensor.xyz.state }}",
"entity_picture_template": "{% if "
"states.binary_sensor.test_state.state == "
"'Works' %}"
"/local/sensor.png"
"{% endif %}",
}
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test_template_sensor")
assert state.attributes.get("entity_picture") == ""
hass.states.async_set("binary_sensor.test_state", "Works")
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test_template_sensor")
assert state.attributes["entity_picture"] == "/local/sensor.png"
async def test_attribute_templates(hass):
"""Test attribute_templates template."""
with assert_setup_component(1):
assert await setup.async_setup_component(
hass,
binary_sensor.DOMAIN,
{
"binary_sensor": {
"platform": "template",
"sensors": {
"test_template_sensor": {
"value_template": "{{ states.sensor.xyz.state }}",
"attribute_templates": {
"test_attribute": "It {{ states.sensor.test_state.state }}."
},
}
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test_template_sensor")
assert state.attributes.get("test_attribute") == "It ."
hass.states.async_set("sensor.test_state", "Works2")
await hass.async_block_till_done()
hass.states.async_set("sensor.test_state", "Works")
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test_template_sensor")
assert state.attributes["test_attribute"] == "It Works."
async def test_match_all(hass):
"""Test template that is rerendered on any state lifecycle."""
with patch(
"homeassistant.components.template.binary_sensor."
"BinarySensorTemplate._update_state"
) as _update_state:
with assert_setup_component(1):
assert await setup.async_setup_component(
hass,
binary_sensor.DOMAIN,
{
"binary_sensor": {
"platform": "template",
"sensors": {
"match_all_template_sensor": {
"value_template": (
"{% for state in states %}"
"{% if state.entity_id == 'sensor.humidity' %}"
"{{ state.entity_id }}={{ state.state }}"
"{% endif %}"
"{% endfor %}"
),
},
},
}
},
)
await hass.async_start()
await hass.async_block_till_done()
init_calls = len(_update_state.mock_calls)
hass.states.async_set("sensor.any_state", "update")
await hass.async_block_till_done()
assert len(_update_state.mock_calls) == init_calls
async def test_event(hass):
"""Test the event."""
config = {
"binary_sensor": {
"platform": "template",
"sensors": {
"test": {
"friendly_name": "virtual thingy",
"value_template": "{{ states.sensor.test_state.state == 'on' }}",
"device_class": "motion",
}
},
}
}
with assert_setup_component(1):
assert await setup.async_setup_component(hass, binary_sensor.DOMAIN, config)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state == "off"
hass.states.async_set("sensor.test_state", "on")
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state == "on"
async def test_template_delay_on(hass):
"""Test binary sensor template delay on."""
config = {
"binary_sensor": {
"platform": "template",
"sensors": {
"test": {
"friendly_name": "virtual thingy",
"value_template": "{{ states.sensor.test_state.state == 'on' }}",
"device_class": "motion",
"delay_on": 5,
}
},
}
}
await setup.async_setup_component(hass, binary_sensor.DOMAIN, config)
await hass.async_block_till_done()
await hass.async_start()
hass.states.async_set("sensor.test_state", "on")
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state == "off"
future = dt_util.utcnow() + timedelta(seconds=5)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state == "on"
# check with time changes
hass.states.async_set("sensor.test_state", "off")
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state == "off"
hass.states.async_set("sensor.test_state", "on")
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state == "off"
hass.states.async_set("sensor.test_state", "off")
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state == "off"
future = dt_util.utcnow() + timedelta(seconds=5)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state == "off"
async def test_template_delay_off(hass):
"""Test binary sensor template delay off."""
config = {
"binary_sensor": {
"platform": "template",
"sensors": {
"test": {
"friendly_name": "virtual thingy",
"value_template": "{{ states.sensor.test_state.state == 'on' }}",
"device_class": "motion",
"delay_off": 5,
}
},
}
}
hass.states.async_set("sensor.test_state", "on")
await setup.async_setup_component(hass, binary_sensor.DOMAIN, config)
await hass.async_block_till_done()
await hass.async_start()
hass.states.async_set("sensor.test_state", "off")
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state == "on"
future = dt_util.utcnow() + timedelta(seconds=5)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state == "off"
# check with time changes
hass.states.async_set("sensor.test_state", "on")
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state == "on"
hass.states.async_set("sensor.test_state", "off")
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state == "on"
hass.states.async_set("sensor.test_state", "on")
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state == "on"
future = dt_util.utcnow() + timedelta(seconds=5)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state == "on"
async def test_available_without_availability_template(hass):
"""Ensure availability is true without an availability_template."""
config = {
"binary_sensor": {
"platform": "template",
"sensors": {
"test": {
"friendly_name": "virtual thingy",
"value_template": "true",
"device_class": "motion",
"delay_off": 5,
}
},
}
}
await setup.async_setup_component(hass, binary_sensor.DOMAIN, config)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state != STATE_UNAVAILABLE
assert state.attributes[ATTR_DEVICE_CLASS] == "motion"
async def test_availability_template(hass):
"""Test availability template."""
config = {
"binary_sensor": {
"platform": "template",
"sensors": {
"test": {
"friendly_name": "virtual thingy",
"value_template": "true",
"device_class": "motion",
"delay_off": 5,
"availability_template": "{{ is_state('sensor.test_state','on') }}",
}
},
}
}
await setup.async_setup_component(hass, binary_sensor.DOMAIN, config)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
hass.states.async_set("sensor.test_state", STATE_OFF)
await hass.async_block_till_done()
assert hass.states.get("binary_sensor.test").state == STATE_UNAVAILABLE
hass.states.async_set("sensor.test_state", STATE_ON)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state != STATE_UNAVAILABLE
assert state.attributes[ATTR_DEVICE_CLASS] == "motion"
async def test_invalid_attribute_template(hass, caplog):
"""Test that errors are logged if rendering template fails."""
hass.states.async_set("binary_sensor.test_sensor", "true")
await setup.async_setup_component(
hass,
binary_sensor.DOMAIN,
{
"binary_sensor": {
"platform": "template",
"sensors": {
"invalid_template": {
"value_template": "{{ states.binary_sensor.test_sensor }}",
"attribute_templates": {
"test_attribute": "{{ states.binary_sensor.unknown.attributes.picture }}"
},
}
},
}
},
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 2
await hass.async_start()
await hass.async_block_till_done()
assert "test_attribute" in caplog.text
assert "TemplateError" in caplog.text
async def test_invalid_availability_template_keeps_component_available(hass, caplog):
"""Test that an invalid availability keeps the device available."""
await setup.async_setup_component(
hass,
binary_sensor.DOMAIN,
{
"binary_sensor": {
"platform": "template",
"sensors": {
"my_sensor": {
"value_template": "{{ states.binary_sensor.test_sensor }}",
"availability_template": "{{ x - 12 }}",
}
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
assert hass.states.get("binary_sensor.my_sensor").state != STATE_UNAVAILABLE
assert ("UndefinedError: 'x' is undefined") in caplog.text
async def test_no_update_template_match_all(hass, caplog):
"""Test that we do not update sensors that match on all."""
hass.states.async_set("binary_sensor.test_sensor", "true")
hass.state = CoreState.not_running
await setup.async_setup_component(
hass,
binary_sensor.DOMAIN,
{
"binary_sensor": {
"platform": "template",
"sensors": {
"all_state": {"value_template": '{{ "true" }}'},
"all_icon": {
"value_template": "{{ states.binary_sensor.test_sensor.state }}",
"icon_template": "{{ 1 + 1 }}",
},
"all_entity_picture": {
"value_template": "{{ states.binary_sensor.test_sensor.state }}",
"entity_picture_template": "{{ 1 + 1 }}",
},
"all_attribute": {
"value_template": "{{ states.binary_sensor.test_sensor.state }}",
"attribute_templates": {"test_attribute": "{{ 1 + 1 }}"},
},
},
}
},
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 5
assert hass.states.get("binary_sensor.all_state").state == "off"
assert hass.states.get("binary_sensor.all_icon").state == "off"
assert hass.states.get("binary_sensor.all_entity_picture").state == "off"
assert hass.states.get("binary_sensor.all_attribute").state == "off"
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
assert hass.states.get("binary_sensor.all_state").state == "on"
assert hass.states.get("binary_sensor.all_icon").state == "on"
assert hass.states.get("binary_sensor.all_entity_picture").state == "on"
assert hass.states.get("binary_sensor.all_attribute").state == "on"
hass.states.async_set("binary_sensor.test_sensor", "false")
await hass.async_block_till_done()
assert hass.states.get("binary_sensor.all_state").state == "on"
# Will now process because we have one valid template
assert hass.states.get("binary_sensor.all_icon").state == "off"
assert hass.states.get("binary_sensor.all_entity_picture").state == "off"
assert hass.states.get("binary_sensor.all_attribute").state == "off"
await hass.helpers.entity_component.async_update_entity("binary_sensor.all_state")
await hass.helpers.entity_component.async_update_entity("binary_sensor.all_icon")
await hass.helpers.entity_component.async_update_entity(
"binary_sensor.all_entity_picture"
)
await hass.helpers.entity_component.async_update_entity(
"binary_sensor.all_attribute"
)
assert hass.states.get("binary_sensor.all_state").state == "on"
assert hass.states.get("binary_sensor.all_icon").state == "off"
assert hass.states.get("binary_sensor.all_entity_picture").state == "off"
assert hass.states.get("binary_sensor.all_attribute").state == "off"
async def test_unique_id(hass):
"""Test unique_id option only creates one binary sensor per id."""
await setup.async_setup_component(
hass,
binary_sensor.DOMAIN,
{
"binary_sensor": {
"platform": "template",
"sensors": {
"test_template_cover_01": {
"unique_id": "not-so-unique-anymore",
"value_template": "{{ true }}",
},
"test_template_cover_02": {
"unique_id": "not-so-unique-anymore",
"value_template": "{{ false }}",
},
},
},
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
async def test_template_validation_error(hass, caplog):
"""Test binary sensor template delay on."""
caplog.set_level(logging.ERROR)
config = {
"binary_sensor": {
"platform": "template",
"sensors": {
"test": {
"friendly_name": "virtual thingy",
"value_template": "True",
"icon_template": "{{ states.sensor.test_state.state }}",
"device_class": "motion",
"delay_on": 5,
},
},
},
}
await setup.async_setup_component(hass, binary_sensor.DOMAIN, config)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.attributes.get("icon") == ""
hass.states.async_set("sensor.test_state", "mdi:check")
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.attributes.get("icon") == "mdi:check"
hass.states.async_set("sensor.test_state", "invalid_icon")
await hass.async_block_till_done()
assert len(caplog.records) == 1
assert caplog.records[0].message.startswith(
"Error validating template result 'invalid_icon' from template"
)
state = hass.states.get("binary_sensor.test")
assert state.attributes.get("icon") is None
|
from datetime import timedelta
from openerz_api.main import OpenERZConnector
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.config_validation import PLATFORM_SCHEMA
from homeassistant.helpers.entity import Entity
SCAN_INTERVAL = timedelta(hours=12)
CONF_ZIP = "zip"
CONF_WASTE_TYPE = "waste_type"
CONF_NAME = "name"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_ZIP): cv.positive_int,
vol.Required(CONF_WASTE_TYPE, default="waste"): cv.string,
vol.Optional(CONF_NAME): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the sensor platform."""
api_connector = OpenERZConnector(config[CONF_ZIP], config[CONF_WASTE_TYPE])
add_entities([OpenERZSensor(api_connector, config.get(CONF_NAME))], True)
class OpenERZSensor(Entity):
"""Representation of a Sensor."""
def __init__(self, api_connector, name):
"""Initialize the sensor."""
self._state = None
self._name = name
self.api_connector = api_connector
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
def update(self):
"""Fetch new state data for the sensor.
This is the only method that should fetch new data for Home Assistant.
"""
self._state = self.api_connector.find_next_pickup(day_offset=31)
|
import unittest
import pandas as pd
from pgmpy.models import BayesianModel
from pgmpy.estimators import BDeuScore
class TestBDeuScore(unittest.TestCase):
def setUp(self):
self.d1 = pd.DataFrame(
data={"A": [0, 0, 1], "B": [0, 1, 0], "C": [1, 1, 0], "D": ["X", "Y", "Z"]}
)
self.m1 = BayesianModel([("A", "C"), ("B", "C"), ("D", "B")])
self.m2 = BayesianModel([("C", "A"), ("C", "B"), ("A", "D")])
# data_link - "https://www.kaggle.com/c/titanic/download/train.csv"
self.titanic_data = pd.read_csv(
"pgmpy/tests/test_estimators/testdata/titanic_train.csv"
)
self.titanic_data2 = self.titanic_data[["Survived", "Sex", "Pclass"]]
def test_score(self):
self.assertAlmostEqual(BDeuScore(self.d1).score(self.m1), -9.907103407446435)
self.assertEqual(BDeuScore(self.d1).score(BayesianModel()), 0)
def test_score_titanic(self):
scorer = BDeuScore(self.titanic_data2, equivalent_sample_size=25)
titanic = BayesianModel([("Sex", "Survived"), ("Pclass", "Survived")])
self.assertAlmostEqual(scorer.score(titanic), -1892.7383393910427)
titanic2 = BayesianModel([("Pclass", "Sex")])
titanic2.add_nodes_from(["Sex", "Survived", "Pclass"])
self.assertLess(scorer.score(titanic2), scorer.score(titanic))
def tearDown(self):
del self.d1
del self.m1
del self.m2
del self.titanic_data
del self.titanic_data2
|
from homeassistant.setup import async_setup_component
from tests.async_mock import patch
VALID_CONFIG = {
"sensor": {
"platform": "transport_nsw",
"stop_id": "209516",
"route": "199",
"destination": "",
"api_key": "YOUR_API_KEY",
}
}
def get_departuresMock(_stop_id, route, destination, api_key):
"""Mock TransportNSW departures loading."""
data = {
"stop_id": "209516",
"route": "199",
"due": 16,
"delay": 6,
"real_time": "y",
"destination": "Palm Beach",
"mode": "Bus",
}
return data
@patch("TransportNSW.TransportNSW.get_departures", side_effect=get_departuresMock)
async def test_transportnsw_config(mocked_get_departures, hass):
"""Test minimal TransportNSW configuration."""
assert await async_setup_component(hass, "sensor", VALID_CONFIG)
await hass.async_block_till_done()
state = hass.states.get("sensor.next_bus")
assert state.state == "16"
assert state.attributes["stop_id"] == "209516"
assert state.attributes["route"] == "199"
assert state.attributes["delay"] == 6
assert state.attributes["real_time"] == "y"
assert state.attributes["destination"] == "Palm Beach"
assert state.attributes["mode"] == "Bus"
|
import asyncio.subprocess
import collections
import logging
import os
from typing import Any, Dict, Optional, cast
import voluptuous as vol
from homeassistant.exceptions import HomeAssistantError
from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow
from ..models import Credentials, UserMeta
CONF_COMMAND = "command"
CONF_ARGS = "args"
CONF_META = "meta"
CONFIG_SCHEMA = AUTH_PROVIDER_SCHEMA.extend(
{
vol.Required(CONF_COMMAND): vol.All(
str, os.path.normpath, msg="must be an absolute path"
),
vol.Optional(CONF_ARGS, default=None): vol.Any(vol.DefaultTo(list), [str]),
vol.Optional(CONF_META, default=False): bool,
},
extra=vol.PREVENT_EXTRA,
)
_LOGGER = logging.getLogger(__name__)
class InvalidAuthError(HomeAssistantError):
"""Raised when authentication with given credentials fails."""
@AUTH_PROVIDERS.register("command_line")
class CommandLineAuthProvider(AuthProvider):
"""Auth provider validating credentials by calling a command."""
DEFAULT_TITLE = "Command Line Authentication"
# which keys to accept from a program's stdout
ALLOWED_META_KEYS = ("name",)
def __init__(self, *args: Any, **kwargs: Any) -> None:
"""Extend parent's __init__.
Adds self._user_meta dictionary to hold the user-specific
attributes provided by external programs.
"""
super().__init__(*args, **kwargs)
self._user_meta: Dict[str, Dict[str, Any]] = {}
async def async_login_flow(self, context: Optional[dict]) -> LoginFlow:
"""Return a flow to login."""
return CommandLineLoginFlow(self)
async def async_validate_login(self, username: str, password: str) -> None:
"""Validate a username and password."""
env = {"username": username, "password": password}
try:
process = await asyncio.subprocess.create_subprocess_exec( # pylint: disable=no-member
self.config[CONF_COMMAND],
*self.config[CONF_ARGS],
env=env,
stdout=asyncio.subprocess.PIPE if self.config[CONF_META] else None,
)
stdout, _ = await process.communicate()
except OSError as err:
# happens when command doesn't exist or permission is denied
_LOGGER.error("Error while authenticating %r: %s", username, err)
raise InvalidAuthError from err
if process.returncode != 0:
_LOGGER.error(
"User %r failed to authenticate, command exited with code %d",
username,
process.returncode,
)
raise InvalidAuthError
if self.config[CONF_META]:
meta: Dict[str, str] = {}
for _line in stdout.splitlines():
try:
line = _line.decode().lstrip()
if line.startswith("#"):
continue
key, value = line.split("=", 1)
except ValueError:
# malformed line
continue
key = key.strip()
value = value.strip()
if key in self.ALLOWED_META_KEYS:
meta[key] = value
self._user_meta[username] = meta
async def async_get_or_create_credentials(
self, flow_result: Dict[str, str]
) -> Credentials:
"""Get credentials based on the flow result."""
username = flow_result["username"]
for credential in await self.async_credentials():
if credential.data["username"] == username:
return credential
# Create new credentials.
return self.async_create_credentials({"username": username})
async def async_user_meta_for_credentials(
self, credentials: Credentials
) -> UserMeta:
"""Return extra user metadata for credentials.
Currently, only name is supported.
"""
meta = self._user_meta.get(credentials.data["username"], {})
return UserMeta(name=meta.get("name"), is_active=True)
class CommandLineLoginFlow(LoginFlow):
"""Handler for the login flow."""
async def async_step_init(
self, user_input: Optional[Dict[str, str]] = None
) -> Dict[str, Any]:
"""Handle the step of the form."""
errors = {}
if user_input is not None:
user_input["username"] = user_input["username"].strip()
try:
await cast(
CommandLineAuthProvider, self._auth_provider
).async_validate_login(user_input["username"], user_input["password"])
except InvalidAuthError:
errors["base"] = "invalid_auth"
if not errors:
user_input.pop("password")
return await self.async_finish(user_input)
schema: Dict[str, type] = collections.OrderedDict()
schema["username"] = str
schema["password"] = str
return self.async_show_form(
step_id="init", data_schema=vol.Schema(schema), errors=errors
)
|
import pytest
from homeassistant.components import media_source
from homeassistant.components.media_source import const
from homeassistant.config import async_process_ha_core_config
from homeassistant.setup import async_setup_component
async def test_async_browse_media(hass):
"""Test browse media."""
local_media = hass.config.path("media")
await async_process_ha_core_config(
hass, {"media_dirs": {"local": local_media, "recordings": local_media}}
)
await hass.async_block_till_done()
assert await async_setup_component(hass, const.DOMAIN, {})
await hass.async_block_till_done()
# Test path not exists
with pytest.raises(media_source.BrowseError) as excinfo:
await media_source.async_browse_media(
hass, f"{const.URI_SCHEME}{const.DOMAIN}/local/test/not/exist"
)
assert str(excinfo.value) == "Path does not exist."
# Test browse file
with pytest.raises(media_source.BrowseError) as excinfo:
await media_source.async_browse_media(
hass, f"{const.URI_SCHEME}{const.DOMAIN}/local/test.mp3"
)
assert str(excinfo.value) == "Path is not a directory."
# Test invalid base
with pytest.raises(media_source.BrowseError) as excinfo:
await media_source.async_browse_media(
hass, f"{const.URI_SCHEME}{const.DOMAIN}/invalid/base"
)
assert str(excinfo.value) == "Unknown source directory."
# Test directory traversal
with pytest.raises(media_source.BrowseError) as excinfo:
await media_source.async_browse_media(
hass, f"{const.URI_SCHEME}{const.DOMAIN}/local/../configuration.yaml"
)
assert str(excinfo.value) == "Invalid path."
# Test successful listing
media = await media_source.async_browse_media(
hass, f"{const.URI_SCHEME}{const.DOMAIN}"
)
assert media
media = await media_source.async_browse_media(
hass, f"{const.URI_SCHEME}{const.DOMAIN}/local/."
)
assert media
media = await media_source.async_browse_media(
hass, f"{const.URI_SCHEME}{const.DOMAIN}/recordings/."
)
assert media
async def test_media_view(hass, hass_client):
"""Test media view."""
local_media = hass.config.path("media")
await async_process_ha_core_config(
hass, {"media_dirs": {"local": local_media, "recordings": local_media}}
)
await hass.async_block_till_done()
assert await async_setup_component(hass, const.DOMAIN, {})
await hass.async_block_till_done()
client = await hass_client()
# Protects against non-existent files
resp = await client.get("/media/local/invalid.txt")
assert resp.status == 404
resp = await client.get("/media/recordings/invalid.txt")
assert resp.status == 404
# Protects against non-media files
resp = await client.get("/media/local/not_media.txt")
assert resp.status == 404
# Protects against unknown local media sources
resp = await client.get("/media/unknown_source/not_media.txt")
assert resp.status == 404
# Fetch available media
resp = await client.get("/media/local/test.mp3")
assert resp.status == 200
resp = await client.get("/media/recordings/test.mp3")
assert resp.status == 200
|
import datetime
import logging
from typing import Optional
from nsw_fuel import FuelCheckClient, FuelCheckError
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import ATTR_ATTRIBUTION, CURRENCY_CENT, VOLUME_LITERS
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
ATTR_STATION_ID = "station_id"
ATTR_STATION_NAME = "station_name"
CONF_STATION_ID = "station_id"
CONF_FUEL_TYPES = "fuel_types"
CONF_ALLOWED_FUEL_TYPES = [
"E10",
"U91",
"E85",
"P95",
"P98",
"DL",
"PDL",
"B20",
"LPG",
"CNG",
"EV",
]
CONF_DEFAULT_FUEL_TYPES = ["E10", "U91"]
ATTRIBUTION = "Data provided by NSW Government FuelCheck"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_STATION_ID): cv.positive_int,
vol.Optional(CONF_FUEL_TYPES, default=CONF_DEFAULT_FUEL_TYPES): vol.All(
cv.ensure_list, [vol.In(CONF_ALLOWED_FUEL_TYPES)]
),
}
)
MIN_TIME_BETWEEN_UPDATES = datetime.timedelta(hours=1)
NOTIFICATION_ID = "nsw_fuel_station_notification"
NOTIFICATION_TITLE = "NSW Fuel Station Sensor Setup"
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the NSW Fuel Station sensor."""
station_id = config[CONF_STATION_ID]
fuel_types = config[CONF_FUEL_TYPES]
client = FuelCheckClient()
station_data = StationPriceData(client, station_id)
station_data.update()
if station_data.error is not None:
message = ("Error: {}. Check the logs for additional information.").format(
station_data.error
)
hass.components.persistent_notification.create(
message, title=NOTIFICATION_TITLE, notification_id=NOTIFICATION_ID
)
return
available_fuel_types = station_data.get_available_fuel_types()
add_entities(
[
StationPriceSensor(station_data, fuel_type)
for fuel_type in fuel_types
if fuel_type in available_fuel_types
]
)
class StationPriceData:
"""An object to store and fetch the latest data for a given station."""
def __init__(self, client, station_id: int) -> None:
"""Initialize the sensor."""
self.station_id = station_id
self._client = client
self._data = None
self._reference_data = None
self.error = None
self._station_name = None
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Update the internal data using the API client."""
if self._reference_data is None:
try:
self._reference_data = self._client.get_reference_data()
except FuelCheckError as exc:
self.error = str(exc)
_LOGGER.error(
"Failed to fetch NSW Fuel station reference data. %s", exc
)
return
try:
self._data = self._client.get_fuel_prices_for_station(self.station_id)
except FuelCheckError as exc:
self.error = str(exc)
_LOGGER.error("Failed to fetch NSW Fuel station price data. %s", exc)
def for_fuel_type(self, fuel_type: str):
"""Return the price of the given fuel type."""
if self._data is None:
return None
return next(
(price for price in self._data if price.fuel_type == fuel_type), None
)
def get_available_fuel_types(self):
"""Return the available fuel types for the station."""
return [price.fuel_type for price in self._data]
def get_station_name(self) -> str:
"""Return the name of the station."""
if self._station_name is None:
name = None
if self._reference_data is not None:
name = next(
(
station.name
for station in self._reference_data.stations
if station.code == self.station_id
),
None,
)
self._station_name = name or f"station {self.station_id}"
return self._station_name
class StationPriceSensor(Entity):
"""Implementation of a sensor that reports the fuel price for a station."""
def __init__(self, station_data: StationPriceData, fuel_type: str):
"""Initialize the sensor."""
self._station_data = station_data
self._fuel_type = fuel_type
@property
def name(self) -> str:
"""Return the name of the sensor."""
return f"{self._station_data.get_station_name()} {self._fuel_type}"
@property
def state(self) -> Optional[float]:
"""Return the state of the sensor."""
price_info = self._station_data.for_fuel_type(self._fuel_type)
if price_info:
return price_info.price
return None
@property
def device_state_attributes(self) -> dict:
"""Return the state attributes of the device."""
return {
ATTR_STATION_ID: self._station_data.station_id,
ATTR_STATION_NAME: self._station_data.get_station_name(),
ATTR_ATTRIBUTION: ATTRIBUTION,
}
@property
def unit_of_measurement(self) -> str:
"""Return the units of measurement."""
return f"{CURRENCY_CENT}/{VOLUME_LITERS}"
def update(self):
"""Update current conditions."""
self._station_data.update()
|
from logilab.common.testlib import TestCase, unittest_main
from logilab.common.xmlutils import parse_pi_data
class ProcessingInstructionDataParsingTest(TestCase):
def test_empty_pi(self):
"""
Tests the parsing of the data of an empty processing instruction.
"""
pi_data = u" \t \n "
data = parse_pi_data(pi_data)
self.assertEqual(data, {})
def test_simple_pi_with_double_quotes(self):
"""
Tests the parsing of the data of a simple processing instruction using
double quotes for embedding the value.
"""
pi_data = u""" \t att="value"\n """
data = parse_pi_data(pi_data)
self.assertEqual(data, {u"att": u"value"})
def test_simple_pi_with_simple_quotes(self):
"""
Tests the parsing of the data of a simple processing instruction using
simple quotes for embedding the value.
"""
pi_data = u""" \t att='value'\n """
data = parse_pi_data(pi_data)
self.assertEqual(data, {u"att": u"value"})
def test_complex_pi_with_different_quotes(self):
"""
Tests the parsing of the data of a complex processing instruction using
simple quotes or double quotes for embedding the values.
"""
pi_data = u""" \t att='value'\n att2="value2" att3='value3'"""
data = parse_pi_data(pi_data)
self.assertEqual(data, {u"att": u"value", u"att2": u"value2",
u"att3": u"value3"})
def test_pi_with_non_attribute_data(self):
"""
Tests the parsing of the data of a complex processing instruction
containing non-attribute data.
"""
pi_data = u""" \t keyword att1="value1" """
data = parse_pi_data(pi_data)
self.assertEqual(data, {u"keyword": None, u"att1": u"value1"})
# definitions for automatic unit testing
if __name__ == '__main__':
unittest_main()
|
import unittest
from homeassistant.components.kira import remote as kira
from tests.async_mock import MagicMock
from tests.common import get_test_home_assistant
SERVICE_SEND_COMMAND = "send_command"
TEST_CONFIG = {kira.DOMAIN: {"devices": [{"host": "127.0.0.1", "port": 17324}]}}
DISCOVERY_INFO = {"name": "kira", "device": "kira"}
class TestKiraSensor(unittest.TestCase):
"""Tests the Kira Sensor platform."""
# pylint: disable=invalid-name
DEVICES = []
def add_entities(self, devices):
"""Mock add devices."""
for device in devices:
self.DEVICES.append(device)
def setUp(self):
"""Initialize values for this testcase class."""
self.hass = get_test_home_assistant()
self.mock_kira = MagicMock()
self.hass.data[kira.DOMAIN] = {kira.CONF_REMOTE: {}}
self.hass.data[kira.DOMAIN][kira.CONF_REMOTE]["kira"] = self.mock_kira
self.addCleanup(self.hass.stop)
def test_service_call(self):
"""Test Kira's ability to send commands."""
kira.setup_platform(self.hass, TEST_CONFIG, self.add_entities, DISCOVERY_INFO)
assert len(self.DEVICES) == 1
remote = self.DEVICES[0]
assert remote.name == "kira"
command = ["FAKE_COMMAND"]
device = "FAKE_DEVICE"
commandTuple = (command[0], device)
remote.send_command(device=device, command=command)
self.mock_kira.sendCode.assert_called_with(commandTuple)
|
import unittest
from unittest.mock import Mock
from kaggle_gcp import KaggleKernelCredentials, init_gcs
from test.support import EnvironmentVarGuard
from google.cloud import storage
def _make_credentials():
import google.auth.credentials
return Mock(spec=google.auth.credentials.Credentials)
class TestStorage(unittest.TestCase):
def test_version(self):
self.assertIsNotNone(storage.__version__)
def test_ctr(self):
credentials = _make_credentials()
env = EnvironmentVarGuard()
env.set('KAGGLE_USER_SECRETS_TOKEN', 'foobar')
env.set('KAGGLE_KERNEL_INTEGRATIONS', 'GCS')
with env:
init_gcs()
client = storage.Client(project="xyz", credentials=credentials)
self.assertEqual(client.project, "xyz")
self.assertNotIsInstance(client._credentials, KaggleKernelCredentials)
self.assertIsNotNone(client._credentials)
def test_annonymous_client(self):
env = EnvironmentVarGuard()
env.set('KAGGLE_USER_SECRETS_TOKEN', 'foobar')
env.set('KAGGLE_KERNEL_INTEGRATIONS', 'GCS')
with env:
init_gcs()
anonymous = storage.Client.create_anonymous_client()
self.assertIsNotNone(anonymous)
def test_default_credentials_gcs_enabled(self):
env = EnvironmentVarGuard()
env.set('KAGGLE_USER_SECRETS_TOKEN', 'foobar')
env.set('KAGGLE_KERNEL_INTEGRATIONS', 'GCS')
with env:
init_gcs()
client = storage.Client(project="xyz")
self.assertIsInstance(client._credentials, KaggleKernelCredentials)
self.assertTrue(client._connection.user_agent.startswith("kaggle-gcp-client/1.0"))
def test_monkeypatching_idempotent(self):
env = EnvironmentVarGuard()
env.set('KAGGLE_USER_SECRETS_TOKEN', 'foobar')
env.set('KAGGLE_KERNEL_INTEGRATIONS', 'GCS')
with env:
client1 = storage.Client.__init__
init_gcs()
client2 = storage.Client.__init__
self.assertEqual(client1, client2)
|
import datetime
import os
import random
import sys
import warnings
sys.path = [os.path.abspath(os.path.dirname(__file__))] + sys.path
os.environ['is_test_suite'] = 'True'
from auto_ml import Predictor
from auto_ml.utils_models import load_ml_model
import dill
import numpy as np
from nose.tools import assert_equal, assert_not_equal, with_setup
from sklearn.metrics import accuracy_score
from sklearn.model_selection import train_test_split
import utils_testing as utils
def test_feature_learning_getting_single_predictions_classification(model_name=None):
np.random.seed(0)
df_titanic_train, df_titanic_test = utils.get_titanic_binary_classification_dataset()
column_descriptions = {
'survived': 'output'
, 'sex': 'categorical'
, 'embarked': 'categorical'
, 'pclass': 'categorical'
}
ml_predictor = Predictor(type_of_estimator='classifier', column_descriptions=column_descriptions)
# NOTE: this is bad practice to pass in our same training set as our fl_data set, but we don't have enough data to do it any other way
df_titanic_train, fl_data = train_test_split(df_titanic_train, test_size=0.2)
ml_predictor.train(df_titanic_train, model_names=model_name, feature_learning=True, fl_data=fl_data)
file_name = ml_predictor.save(str(random.random()))
saved_ml_pipeline = load_ml_model(file_name)
os.remove(file_name)
try:
keras_file_name = file_name[:-5] + '_keras_deep_learning_model.h5'
os.remove(keras_file_name)
except:
pass
df_titanic_test_dictionaries = df_titanic_test.to_dict('records')
# 1. make sure the accuracy is the same
predictions = []
for row in df_titanic_test_dictionaries:
predictions.append(saved_ml_pipeline.predict_proba(row)[1])
print('predictions')
print(predictions)
first_score = utils.calculate_brier_score_loss(df_titanic_test.survived, predictions)
print('first_score')
print(first_score)
# Make sure our score is good, but not unreasonably good
lower_bound = -0.16
if model_name == 'DeepLearningClassifier':
lower_bound = -0.187
assert lower_bound < first_score < -0.133
# 2. make sure the speed is reasonable (do it a few extra times)
data_length = len(df_titanic_test_dictionaries)
start_time = datetime.datetime.now()
for idx in range(1000):
row_num = idx % data_length
saved_ml_pipeline.predict(df_titanic_test_dictionaries[row_num])
end_time = datetime.datetime.now()
duration = end_time - start_time
print('duration.total_seconds()')
print(duration.total_seconds())
# It's very difficult to set a benchmark for speed that will work across all machines.
# On my 2013 bottom of the line 15" MacBook Pro, this runs in about 0.8 seconds for 1000 predictions
# That's about 1 millisecond per prediction
# Assuming we might be running on a test box that's pretty weak, multiply by 3
# Also make sure we're not running unreasonably quickly
assert 0.2 < duration.total_seconds() < 15
# 3. make sure we're not modifying the dictionaries (the score is the same after running a few experiments as it is the first time)
predictions = []
for row in df_titanic_test_dictionaries:
predictions.append(saved_ml_pipeline.predict_proba(row)[1])
print('predictions')
print(predictions)
print('df_titanic_test_dictionaries')
print(df_titanic_test_dictionaries)
second_score = utils.calculate_brier_score_loss(df_titanic_test.survived, predictions)
print('second_score')
print(second_score)
# Make sure our score is good, but not unreasonably good
assert lower_bound < second_score < -0.133
def test_feature_learning_categorical_ensembling_getting_single_predictions_classification(model_name=None):
np.random.seed(0)
df_titanic_train, df_titanic_test = utils.get_titanic_binary_classification_dataset()
column_descriptions = {
'survived': 'output'
, 'sex': 'categorical'
, 'embarked': 'categorical'
, 'pclass': 'categorical'
}
ml_predictor = Predictor(type_of_estimator='classifier', column_descriptions=column_descriptions)
# NOTE: this is bad practice to pass in our same training set as our fl_data set, but we don't have enough data to do it any other way
df_titanic_train, fl_data = train_test_split(df_titanic_train, test_size=0.2)
ml_predictor.train_categorical_ensemble(df_titanic_train, model_names=model_name, feature_learning=True, fl_data=fl_data, categorical_column='embarked')
file_name = ml_predictor.save(str(random.random()))
from auto_ml.utils_models import load_ml_model
saved_ml_pipeline = load_ml_model(file_name)
os.remove(file_name)
try:
keras_file_name = file_name[:-5] + '_keras_deep_learning_model.h5'
os.remove(keras_file_name)
except:
pass
df_titanic_test_dictionaries = df_titanic_test.to_dict('records')
# 1. make sure the accuracy is the same
predictions = []
for row in df_titanic_test_dictionaries:
predictions.append(saved_ml_pipeline.predict_proba(row)[1])
print('predictions')
print(predictions)
first_score = utils.calculate_brier_score_loss(df_titanic_test.survived, predictions)
print('first_score')
print(first_score)
# Make sure our score is good, but not unreasonably good
lower_bound = -0.17
if model_name == 'DeepLearningClassifier':
lower_bound = -0.245
if model_name == 'CatBoostClassifier':
lower_bound = -0.265
assert lower_bound < first_score < -0.140
# 2. make sure the speed is reasonable (do it a few extra times)
data_length = len(df_titanic_test_dictionaries)
start_time = datetime.datetime.now()
for idx in range(1000):
row_num = idx % data_length
saved_ml_pipeline.predict(df_titanic_test_dictionaries[row_num])
end_time = datetime.datetime.now()
duration = end_time - start_time
print('duration.total_seconds()')
print(duration.total_seconds())
# It's very difficult to set a benchmark for speed that will work across all machines.
# On my 2013 bottom of the line 15" MacBook Pro, this runs in about 0.8 seconds for 1000 predictions
# That's about 1 millisecond per prediction
# Assuming we might be running on a test box that's pretty weak, multiply by 3
# Also make sure we're not running unreasonably quickly
assert 0.2 < duration.total_seconds() < 15
# 3. make sure we're not modifying the dictionaries (the score is the same after running a few experiments as it is the first time)
predictions = []
for row in df_titanic_test_dictionaries:
predictions.append(saved_ml_pipeline.predict_proba(row)[1])
print('predictions')
print(predictions)
print('df_titanic_test_dictionaries')
print(df_titanic_test_dictionaries)
second_score = utils.calculate_brier_score_loss(df_titanic_test.survived, predictions)
print('second_score')
print(second_score)
# Make sure our score is good, but not unreasonably good
assert lower_bound < second_score < -0.147
def test_feature_learning_getting_single_predictions_regression(model_name=None):
np.random.seed(0)
df_boston_train, df_boston_test = utils.get_boston_regression_dataset()
column_descriptions = {
'MEDV': 'output'
, 'CHAS': 'categorical'
}
ml_predictor = Predictor(type_of_estimator='regressor', column_descriptions=column_descriptions)
# NOTE: this is bad practice to pass in our same training set as our fl_data set, but we don't have enough data to do it any other way
df_boston_train, fl_data = train_test_split(df_boston_train, test_size=0.2)
ml_predictor.train(df_boston_train, model_names=model_name, feature_learning=True, fl_data=fl_data)
file_name = ml_predictor.save(str(random.random()))
# from auto_ml.utils_models import load_keras_model
# saved_ml_pipeline = load_keras_model(file_name)
saved_ml_pipeline = load_ml_model(file_name)
os.remove(file_name)
try:
keras_file_name = file_name[:-5] + '_keras_deep_learning_model.h5'
os.remove(keras_file_name)
except:
pass
df_boston_test_dictionaries = df_boston_test.to_dict('records')
# 1. make sure the accuracy is the same
predictions = []
for row in df_boston_test_dictionaries:
predictions.append(saved_ml_pipeline.predict(row))
first_score = utils.calculate_rmse(df_boston_test.MEDV, predictions)
print('first_score')
print(first_score)
# Make sure our score is good, but not unreasonably good
lower_bound = -4.0
assert lower_bound < first_score < -2.8
# 2. make sure the speed is reasonable (do it a few extra times)
data_length = len(df_boston_test_dictionaries)
start_time = datetime.datetime.now()
for idx in range(1000):
row_num = idx % data_length
saved_ml_pipeline.predict(df_boston_test_dictionaries[row_num])
end_time = datetime.datetime.now()
duration = end_time - start_time
print('duration.total_seconds()')
print(duration.total_seconds())
# It's very difficult to set a benchmark for speed that will work across all machines.
# On my 2013 bottom of the line 15" MacBook Pro, this runs in about 0.8 seconds for 1000 predictions
# That's about 1 millisecond per prediction
# Assuming we might be running on a test box that's pretty weak, multiply by 3
# Also make sure we're not running unreasonably quickly
assert 0.2 < duration.total_seconds() / 1.0 < 15
# 3. make sure we're not modifying the dictionaries (the score is the same after running a few experiments as it is the first time)
predictions = []
for row in df_boston_test_dictionaries:
predictions.append(saved_ml_pipeline.predict(row))
second_score = utils.calculate_rmse(df_boston_test.MEDV, predictions)
print('second_score')
print(second_score)
# Make sure our score is good, but not unreasonably good
assert lower_bound < second_score < -2.8
def test_feature_learning_categorical_ensembling_getting_single_predictions_regression(model_name=None):
np.random.seed(0)
df_boston_train, df_boston_test = utils.get_boston_regression_dataset()
column_descriptions = {
'MEDV': 'output'
, 'CHAS': 'categorical'
}
ml_predictor = Predictor(type_of_estimator='regressor', column_descriptions=column_descriptions)
# NOTE: this is bad practice to pass in our same training set as our fl_data set, but we don't have enough data to do it any other way
df_boston_train, fl_data = train_test_split(df_boston_train, test_size=0.2)
ml_predictor.train_categorical_ensemble(df_boston_train, model_names=model_name, feature_learning=True, fl_data=fl_data, categorical_column='CHAS')
# print('Score on training data')
# ml_predictor.score(df_boston_train, df_boston_train.MEDV)
file_name = ml_predictor.save(str(random.random()))
from auto_ml.utils_models import load_ml_model
saved_ml_pipeline = load_ml_model(file_name)
# with open(file_name, 'rb') as read_file:
# saved_ml_pipeline = dill.load(read_file)
os.remove(file_name)
try:
keras_file_name = file_name[:-5] + '_keras_deep_learning_model.h5'
os.remove(keras_file_name)
except:
pass
df_boston_test_dictionaries = df_boston_test.to_dict('records')
# 1. make sure the accuracy is the same
predictions = []
for row in df_boston_test_dictionaries:
predictions.append(saved_ml_pipeline.predict(row))
first_score = utils.calculate_rmse(df_boston_test.MEDV, predictions)
print('first_score')
print(first_score)
# Make sure our score is good, but not unreasonably good
lower_bound = -4.5
assert lower_bound < first_score < -3.4
# 2. make sure the speed is reasonable (do it a few extra times)
data_length = len(df_boston_test_dictionaries)
start_time = datetime.datetime.now()
for idx in range(1000):
row_num = idx % data_length
saved_ml_pipeline.predict(df_boston_test_dictionaries[row_num])
end_time = datetime.datetime.now()
duration = end_time - start_time
print('duration.total_seconds()')
print(duration.total_seconds())
# It's very difficult to set a benchmark for speed that will work across all machines.
# On my 2013 bottom of the line 15" MacBook Pro, this runs in about 0.8 seconds for 1000 predictions
# That's about 1 millisecond per prediction
# Assuming we might be running on a test box that's pretty weak, multiply by 3
# Also make sure we're not running unreasonably quickly
assert 0.2 < duration.total_seconds() / 1.0 < 15
# 3. make sure we're not modifying the dictionaries (the score is the same after running a few experiments as it is the first time)
predictions = []
for row in df_boston_test_dictionaries:
predictions.append(saved_ml_pipeline.predict(row))
second_score = utils.calculate_rmse(df_boston_test.MEDV, predictions)
print('second_score')
print(second_score)
# Make sure our score is good, but not unreasonably good
assert lower_bound < second_score < -3.4
def test_all_algos_classification(model_name=None):
np.random.seed(0)
df_titanic_train, df_titanic_test = utils.get_titanic_binary_classification_dataset()
column_descriptions = {
'survived': 'output'
, 'sex': 'categorical'
, 'embarked': 'categorical'
, 'pclass': 'categorical'
}
ml_predictor = Predictor(type_of_estimator='classifier', column_descriptions=column_descriptions)
ml_predictor.train(df_titanic_train, model_names=['LogisticRegression', 'RandomForestClassifier', 'RidgeClassifier', 'GradientBoostingClassifier', 'ExtraTreesClassifier', 'AdaBoostClassifier', 'SGDClassifier', 'Perceptron', 'PassiveAggressiveClassifier', 'DeepLearningClassifier', 'XGBClassifier', 'LGBMClassifier', 'LinearSVC'])
test_score = ml_predictor.score(df_titanic_test, df_titanic_test.survived)
print('test_score')
print(test_score)
# Linear models aren't super great on this dataset...
assert -0.215 < test_score < -0.131
def test_all_algos_regression():
# a random seed of 42 has ExtraTreesRegressor getting the best CV score, and that model doesn't generalize as well as GradientBoostingRegressor.
np.random.seed(0)
df_boston_train, df_boston_test = utils.get_boston_regression_dataset()
column_descriptions = {
'MEDV': 'output'
, 'CHAS': 'categorical'
}
ml_predictor = Predictor(type_of_estimator='regressor', column_descriptions=column_descriptions)
ml_predictor.train(df_boston_train, model_names=['LinearRegression', 'RandomForestRegressor', 'Ridge', 'GradientBoostingRegressor', 'AdaBoostRegressor', 'SGDRegressor', 'PassiveAggressiveRegressor', 'Lasso', 'LassoLars', 'ElasticNet', 'OrthogonalMatchingPursuit', 'BayesianRidge', 'ARDRegression', 'MiniBatchKMeans', 'DeepLearningRegressor', 'LGBMRegressor', 'XGBClassifier', 'LinearSVR', 'CatBoostRegressor'])
test_score = ml_predictor.score(df_boston_test, df_boston_test.MEDV)
print('test_score')
print(test_score)
assert -3.4 < test_score < -2.8
def test_throws_warning_when_fl_data_equals_df_train():
df_titanic_train, df_titanic_test = utils.get_titanic_binary_classification_dataset()
column_descriptions = {
'survived': 'output'
, 'sex': 'categorical'
, 'embarked': 'categorical'
, 'pclass': 'categorical'
}
ml_predictor = Predictor(type_of_estimator='classifier', column_descriptions=column_descriptions)
with warnings.catch_warnings(record=True) as w:
try:
ml_predictor.train(df_titanic_train, feature_learning=True, fl_data=df_titanic_train)
except KeyError as e:
pass
# We should not be getting to this line- we should be throwing an error above
for thing in w:
print(thing)
assert len(w) >= 1
assert True
|
import pkgutil
from io import StringIO
import numpy as np
import pandas as pd
from scattertext.Common import DEFAULT_BACKGROUND_SCALER_ALGO, DEFAULT_BACKGROUND_BETA
from scattertext.termscoring import ScaledFScore
class TermCategoryFrequencies(object):
'''
This class allows you to produce scatter plots of raw term frequency counts.
Occasionally, only term frequency statistics are available. This may happen in the case of very large,
lost, or proprietary data sets. `TermCategoryFrequencies` is a corpus representation,that can accept this
sort of data, along with any categorized documents that happen to be available.
Let use the [Corpus of Contemporary American English](https://corpus.byu.edu/coca/) as an example.
We'll construct a visualization
to analyze the difference between spoken American English and English that occurs in fiction.
```python
convention_df = (pd.read_excel('https://www.wordfrequency.info/files/genres_sample.xls')
.dropna()
.set_index('lemma')[['SPOKEN', 'FICTION']]
.iloc[:1000])
convention_df.head()
SPOKEN FICTION
lemma
the 3859682.0 4092394.0
I 1346545.0 1382716.0
they 609735.0 352405.0
she 212920.0 798208.0
would 233766.0 229865.0
```
Transforming this into a visualization is extremely easy. Just pass a dataframe indexed on
terms with columns indicating category-counts into the the `TermCategoryFrequencies` constructor.
```python
term_cat_freq = st.TermCategoryFrequencies(convention_df)
```
And call `produce_scattertext_explorer` normally:
```python
html = st.produce_scattertext_explorer(
term_cat_freq,
category='SPOKEN',
category_name='Spoken',
not_category_name='Fiction',
)
```
[](https://jasonkessler.github.io/demo_category_frequencies.html)
If you'd like to incorporate some documents into the visualization, you can add them into to the
`TermCategoyFrequencies` object.
First, let's extract some example Fiction and Spoken documents from the sample COCA corpus.
```python
import requests, zipfile, io
coca_sample_url = 'http://corpus.byu.edu/cocatext/samples/text.zip'
zip_file = zipfile.ZipFile(io.BytesIO(requests.get(coca_sample_url).content))
document_df = pd.DataFrame(
[{'text': zip_file.open(fn).read().decode('utf-8'),
'category': 'SPOKEN'}
for fn in zip_file.filelist if fn.filename.startswith('w_spok')][:2]
+ [{'text': zip_file.open(fn).read().decode('utf-8'),
'category': 'FICTION'}
for fn in zip_file.filelist if fn.filename.startswith('w_fic')][:2])
```
And we'll pass the `documents_df` dataframe into `TermCategoryFrequencies` via the `document_category_df`
parameter. Ensure the dataframe has two columns, 'text' and 'category'. Afterward, we can
call `produce_scattertext_explorer` (or your visualization function of choice) normally.
```python
doc_term_cat_freq = st.TermCategoryFrequencies(convention_df, document_category_df=document_df)
html = st.produce_scattertext_explorer(
doc_term_cat_freq,
category='SPOKEN',
category_name='Spoken',
not_category_name='Fiction',
)
```
'''
def __init__(self,
category_frequency_df,
document_category_df=None,
metadata_frequency_df=None,
unigram_frequency_path=None):
'''
Parameters
----------
category_frequency_df : pd.DataFrame
Index is term, columns are categories, values are counts
document_category_df : pd.DataFrame, optional
Columns are text, category. Values are text (string) and category (string)
metadata_frequency_df : pd.DataFrame, optional
Index is term, columns are categories, values are counts
unigram_frequency_path : See TermDocMatrix, optional
'''
if document_category_df is not None:
#assert set(document_category_df.columns) == set(['text', 'category'])
assert 'text' in document_category_df.columns and 'category' in document_category_df.columns
self._document_category_df = document_category_df
self.metadata_frequency_df = metadata_frequency_df
self.term_category_freq_df = category_frequency_df
self._unigram_frequency_path = unigram_frequency_path
def get_num_terms(self):
return len(self.term_category_freq_df)
def get_categories(self):
return list(self.term_category_freq_df.columns)
def get_num_metadata(self):
return len(self.metadata_frequency_df)
def get_scaled_f_scores_vs_background(self,
scaler_algo=DEFAULT_BACKGROUND_SCALER_ALGO,
beta=DEFAULT_BACKGROUND_BETA):
df = self.get_term_and_background_counts()
df['Scaled f-score'] = ScaledFScore.get_scores_for_category(
df['corpus'], df['background'], scaler_algo, beta
)
return df.sort_values(by='Scaled f-score', ascending=False)
def get_term_and_background_counts(self):
'''
Returns
-------
A pd.DataFrame consisting of unigram term counts of words occurring
in the TermDocumentMatrix and their corresponding background corpus
counts. The dataframe has two columns, corpus and background.
>>> corpus.get_unigram_corpus.get_term_and_background_counts()
corpus background
obama 702.0 565739.0
romney 570.0 695398.0
barack 248.0 227861.0
...
'''
background_df = self._get_background_unigram_frequencies()
corpus_freq_df = pd.DataFrame({'corpus': self.term_category_freq_df.sum(axis=1)})
corpus_unigram_freq = corpus_freq_df.loc[[w for w in corpus_freq_df.index if ' ' not in w]]
df = corpus_unigram_freq.join(background_df, how='outer').fillna(0)
return df
def _get_background_unigram_frequencies(self):
if self._unigram_frequency_path:
unigram_freq_table_buf = open(self._unigram_frequency_path)
else:
unigram_freq_table_buf = StringIO(pkgutil.get_data('scattertext', 'data/count_1w.txt')
.decode('utf-8'))
to_ret = (pd.read_table(unigram_freq_table_buf,
names=['word', 'background'])
.sort_values(ascending=False, by='background')
.drop_duplicates(['word'])
.set_index('word'))
return to_ret
def list_extra_features(self):
raise Exception("Not implemented in TermCategoryFrequencies")
def get_doc_indices(self):
'''
Returns
-------
np.array
Integer document indices
'''
if self._document_category_df is None:
return np.array([])
categories_d = {d: i for i, d in enumerate(self.get_categories())}
return self._document_category_df.category.apply(categories_d.get).values
def get_texts(self):
'''
Returns
-------
np.array
Texts
'''
if self._document_category_df is None:
return np.array([])
return self._document_category_df.text.values
def get_term_category_frequencies(self, scatterchartdata):
'''
Parameters
----------
scatterchartdata : ScatterChartData
Returns
-------
pd.DataFrame
'''
df = self.term_category_freq_df.rename(
columns={c: c + ' freq' for c in self.term_category_freq_df}
)
df.index.name = 'term'
return df
def apply_ranker(self, term_ranker):
'''
Parameters
----------
term_ranker : TermRanker
We'll ignore this
Returns
-------
pd.Dataframe
'''
return self.get_term_category_frequencies(None)
|
from typing import List, Optional
import voluptuous as vol
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_DEVICE_ID,
CONF_DOMAIN,
CONF_ENTITY_ID,
CONF_TYPE,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
)
from homeassistant.core import Context, HomeAssistant
from homeassistant.helpers import entity_registry
import homeassistant.helpers.config_validation as cv
from . import DOMAIN
ACTION_TYPES = {"turn_on", "turn_off"}
ACTION_SCHEMA = cv.DEVICE_ACTION_BASE_SCHEMA.extend(
{
vol.Required(CONF_TYPE): vol.In(ACTION_TYPES),
vol.Required(CONF_ENTITY_ID): cv.entity_domain(DOMAIN),
}
)
async def async_get_actions(hass: HomeAssistant, device_id: str) -> List[dict]:
"""List device actions for Fan devices."""
registry = await entity_registry.async_get_registry(hass)
actions = []
# Get all the integrations entities for this device
for entry in entity_registry.async_entries_for_device(registry, device_id):
if entry.domain != DOMAIN:
continue
actions.append(
{
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "turn_on",
}
)
actions.append(
{
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "turn_off",
}
)
return actions
async def async_call_action_from_config(
hass: HomeAssistant, config: dict, variables: dict, context: Optional[Context]
) -> None:
"""Execute a device action."""
config = ACTION_SCHEMA(config)
service_data = {ATTR_ENTITY_ID: config[CONF_ENTITY_ID]}
if config[CONF_TYPE] == "turn_on":
service = SERVICE_TURN_ON
elif config[CONF_TYPE] == "turn_off":
service = SERVICE_TURN_OFF
await hass.services.async_call(
DOMAIN, service, service_data, blocking=True, context=context
)
|
import asyncio
import logging
from aiohttp import web
from homeassistant.components.http import HomeAssistantView
from homeassistant.const import HTTP_BAD_REQUEST
from homeassistant.helpers.typing import HomeAssistantType
from .const import ATTR_ADMIN, ATTR_ENABLE, ATTR_ICON, ATTR_PANELS, ATTR_TITLE
from .handler import HassioAPIError
_LOGGER = logging.getLogger(__name__)
async def async_setup_addon_panel(hass: HomeAssistantType, hassio):
"""Add-on Ingress Panel setup."""
hassio_addon_panel = HassIOAddonPanel(hass, hassio)
hass.http.register_view(hassio_addon_panel)
# If panels are exists
panels = await hassio_addon_panel.get_panels()
if not panels:
return
# Register available panels
jobs = []
for addon, data in panels.items():
if not data[ATTR_ENABLE]:
continue
jobs.append(_register_panel(hass, addon, data))
if jobs:
await asyncio.wait(jobs)
class HassIOAddonPanel(HomeAssistantView):
"""Hass.io view to handle base part."""
name = "api:hassio_push:panel"
url = "/api/hassio_push/panel/{addon}"
def __init__(self, hass, hassio):
"""Initialize WebView."""
self.hass = hass
self.hassio = hassio
async def post(self, request, addon):
"""Handle new add-on panel requests."""
panels = await self.get_panels()
# Panel exists for add-on slug
if addon not in panels or not panels[addon][ATTR_ENABLE]:
_LOGGER.error("Panel is not enable for %s", addon)
return web.Response(status=HTTP_BAD_REQUEST)
data = panels[addon]
# Register panel
await _register_panel(self.hass, addon, data)
return web.Response()
async def delete(self, request, addon):
"""Handle remove add-on panel requests."""
self.hass.components.frontend.async_remove_panel(addon)
return web.Response()
async def get_panels(self):
"""Return panels add-on info data."""
try:
data = await self.hassio.get_ingress_panels()
return data[ATTR_PANELS]
except HassioAPIError as err:
_LOGGER.error("Can't read panel info: %s", err)
return {}
async def _register_panel(hass, addon, data):
"""Init coroutine to register the panel."""
await hass.components.panel_custom.async_register_panel(
frontend_url_path=addon,
webcomponent_name="hassio-main",
sidebar_title=data[ATTR_TITLE],
sidebar_icon=data[ATTR_ICON],
js_url="/api/hassio/app/entrypoint.js",
embed_iframe=True,
require_admin=data[ATTR_ADMIN],
config={"ingress": addon},
)
|
import os
import pytest
from molecule import config
from molecule.driver import linode
@pytest.fixture
def linode_instance(patched_config_validate, config_instance):
return linode.Linode(config_instance)
def test_linode_config_gives_config_object(linode_instance):
assert isinstance(linode_instance._config, config.Config)
def test_linode_testinfra_options_property(linode_instance):
assert {
'connection': 'ansible',
'ansible-inventory': linode_instance._config.provisioner.inventory_file
} == linode_instance.testinfra_options
def test_linode_name_property(linode_instance):
assert 'linode' == linode_instance.name
def test_linode_options_property(linode_instance):
assert {'managed': True} == linode_instance.options
def test_linode_login_cmd_template_property(linode_instance):
template = 'ssh {address} -l {user} -p {port} -i {identity_file}'
assert template in linode_instance.login_cmd_template
def test_linode_safe_files_property(linode_instance):
expected_safe_files = [
os.path.join(linode_instance._config.scenario.ephemeral_directory,
'instance_config.yml')
]
assert expected_safe_files == linode_instance.safe_files
def test_linode_default_safe_files_property(linode_instance):
expected_default_safe_files = [
os.path.join(linode_instance._config.scenario.ephemeral_directory,
'instance_config.yml')
]
assert expected_default_safe_files == linode_instance.default_safe_files
def test_linode_delegated_property(linode_instance):
assert not linode_instance.delegated
def test_linode_managed_property(linode_instance):
assert linode_instance.managed
def test_linode_default_ssh_connection_options_property(linode_instance):
expected_options = [
'-o UserKnownHostsFile=/dev/null', '-o ControlMaster=auto',
'-o ControlPersist=60s', '-o IdentitiesOnly=yes',
'-o StrictHostKeyChecking=no'
]
assert expected_options == linode_instance.default_ssh_connection_options
def test_linode_login_options(linode_instance, mocker):
target = 'molecule.driver.linode.Linode._get_instance_config'
get_instance_config_patch = mocker.patch(target)
get_instance_config_patch.return_value = {
'instance': 'linode',
'address': '172.16.0.2',
'user': 'linode-admin',
'port': 22,
'identity_file': '/foo/bar',
}
get_instance_config_patch = {
'instance': 'linode',
'address': '172.16.0.2',
'user': 'linode-admin',
'port': 22,
'identity_file': '/foo/bar',
}
assert get_instance_config_patch == linode_instance.login_options('linode')
def test_linode_ansible_connection_options(linode_instance, mocker):
target = 'molecule.driver.linode.Linode._get_instance_config'
get_instance_config_patch = mocker.patch(target)
get_instance_config_patch.return_value = {
'instance': 'linode',
'address': '172.16.0.2',
'user': 'linode-admin',
'port': 22,
'ssh_pass': 'foobar',
'identity_file': '/foo/bar',
}
get_instance_config_patch = {
'ansible_host':
'172.16.0.2',
'ansible_port':
22,
'ansible_user':
'linode-admin',
'ansible_private_key_file':
'/foo/bar',
'connection':
'ssh',
'ansible_ssh_common_args': ('-o UserKnownHostsFile=/dev/null '
'-o ControlMaster=auto '
'-o ControlPersist=60s '
'-o IdentitiesOnly=yes '
'-o StrictHostKeyChecking=no'),
'ansible_ssh_pass':
'foobar',
}
connection_options = linode_instance.ansible_connection_options('linode')
assert get_instance_config_patch == connection_options
def test_linode_instance_config_property(linode_instance):
instance_config_path = os.path.join(
linode_instance._config.scenario.ephemeral_directory,
'instance_config.yml')
assert instance_config_path == linode_instance.instance_config
def test_linode_ssh_connection_options_property(linode_instance):
expected_options = [
'-o UserKnownHostsFile=/dev/null', '-o ControlMaster=auto',
'-o ControlPersist=60s', '-o IdentitiesOnly=yes',
'-o StrictHostKeyChecking=no'
]
assert expected_options == linode_instance.ssh_connection_options
def test_linode_status(mocker, linode_instance):
linode_status = linode_instance.status()
assert 2 == len(linode_status)
assert linode_status[0].instance_name == 'instance-1'
assert linode_status[0].driver_name == 'linode'
assert linode_status[0].provisioner_name == 'ansible'
assert linode_status[0].scenario_name == 'default'
assert linode_status[0].created == 'false'
assert linode_status[0].converged == 'false'
assert linode_status[1].instance_name == 'instance-2'
assert linode_status[1].driver_name == 'linode'
assert linode_status[1].provisioner_name == 'ansible'
assert linode_status[1].scenario_name == 'default'
assert linode_status[1].created == 'false'
assert linode_status[1].converged == 'false'
def test_created(linode_instance):
assert 'false' == linode_instance._created()
def test_converged(linode_instance):
assert 'false' == linode_instance._converged()
|
from datetime import datetime
from typing import Callable, Optional
from django.core.cache import cache
from django.http import JsonResponse
from django.shortcuts import get_object_or_404, render
from django.utils.translation import pgettext
from weblate.auth.models import User
from weblate.lang.models import Language
from weblate.trans.models import Change
from weblate.utils.views import get_percent_color, get_project_translation
def cache_key(*args):
def makekey(val):
if not val:
return "0"
if hasattr(val, "id"):
return str(val.id)
return str(val)
return "activity-{}".format("-".join(makekey(arg) for arg in args))
def get_activity_stats(
request,
days: int,
step: int,
project: Optional[str] = None,
component: Optional[str] = None,
lang: Optional[str] = None,
user: Optional[str] = None,
):
"""Parse json stats URL params."""
if project is None and lang is None and user is None:
project = None
component = None
translation = None
language = None
user = None
elif user is not None:
project = None
component = None
translation = None
language = None
user = get_object_or_404(User, username=user)
elif project is None:
project = None
component = None
translation = None
language = get_object_or_404(Language, code=lang)
user = None
else:
# Process parameters
project, component, translation = get_project_translation(
request, project, component, lang
)
language = None
user = None
key = cache_key(days, step, project, component, translation, language, user)
result = cache.get(key)
if not result:
# Get actual stats
result = Change.objects.base_stats(
days, step, project, component, translation, language, user
)
cache.set(key, result, 3600 * 4)
return result
def get_label_month(pos: int, previous_month: int, timestamp: datetime) -> str:
if previous_month != timestamp.month:
return pgettext(
"Format string for yearly activity chart", "{month}/{year}"
).format(month=timestamp.month, year=timestamp.year)
return ""
def get_label_day(pos: int, previous_month: int, timestamp: datetime) -> str:
if pos % 5 == 0:
return pgettext(
"Format string for monthly activity chart", "{day}/{month}"
).format(day=timestamp.day, month=timestamp.month, year=timestamp.year)
return ""
def render_activity(
request,
days: int,
step: int,
label_func: Callable[[int, int, datetime], str],
project: Optional[str] = None,
component: Optional[str] = None,
lang: Optional[str] = None,
user: Optional[str] = None,
):
"""Return activity for matching changes and interval as SVG chart."""
activity = get_activity_stats(request, days, step, project, component, lang, user)
max_value = max(item[1] for item in activity)
serie = []
previous_month = -1
offset = 0
for pos, item in enumerate(activity):
timestamp, value = item
percent = value * 100 // max_value if max_value else 0
if value and percent < 4:
percent = 4
label = label_func(pos, previous_month, timestamp)
previous_month = timestamp.month
offset += 15
height = int(1.5 * percent)
serie.append(
(
value,
label,
offset,
get_percent_color(percent),
height,
10 + (150 - height),
)
)
return render(
request, "svg/activity.svg", {"serie": serie}, content_type="image/svg+xml"
)
def yearly_activity(
request,
project: Optional[str] = None,
component: Optional[str] = None,
lang: Optional[str] = None,
user: Optional[str] = None,
):
"""Return yearly activity for matching changes as SVG chart."""
return render_activity(
request, 364, 7, get_label_month, project, component, lang, user
)
def monthly_activity(
request,
project: Optional[str] = None,
component: Optional[str] = None,
lang: Optional[str] = None,
user: Optional[str] = None,
):
"""Return monthly activity for matching changes as SVG chart."""
return render_activity(
request, 52, 1, get_label_day, project, component, lang, user
)
def monthly_activity_json(
request,
project: Optional[str] = None,
component: Optional[str] = None,
lang: Optional[str] = None,
user: Optional[str] = None,
):
"""Return monthly activity for matching changes as json."""
activity = get_activity_stats(request, 52, 1, project, component, lang, user)
return JsonResponse(data=[item[1] for item in activity], safe=False)
|
from homeassistant.components.switch import SwitchEntity
from homeassistant.const import CONF_NAME
from . import LIGHTWAVE_LINK
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Find and return LightWave switches."""
if not discovery_info:
return
switches = []
lwlink = hass.data[LIGHTWAVE_LINK]
for device_id, device_config in discovery_info.items():
name = device_config[CONF_NAME]
switches.append(LWRFSwitch(name, device_id, lwlink))
async_add_entities(switches)
class LWRFSwitch(SwitchEntity):
"""Representation of a LightWaveRF switch."""
def __init__(self, name, device_id, lwlink):
"""Initialize LWRFSwitch entity."""
self._name = name
self._device_id = device_id
self._state = None
self._lwlink = lwlink
@property
def should_poll(self):
"""No polling needed for a LightWave light."""
return False
@property
def name(self):
"""Lightwave switch name."""
return self._name
@property
def is_on(self):
"""Lightwave switch is on state."""
return self._state
async def async_turn_on(self, **kwargs):
"""Turn the LightWave switch on."""
self._state = True
self._lwlink.turn_on_switch(self._device_id, self._name)
self.async_write_ha_state()
async def async_turn_off(self, **kwargs):
"""Turn the LightWave switch off."""
self._state = False
self._lwlink.turn_off(self._device_id, self._name)
self.async_write_ha_state()
|
from homeassistant.components.vacuum import (
ATTR_FAN_SPEED,
ATTR_PARAMS,
DOMAIN,
SERVICE_CLEAN_SPOT,
SERVICE_LOCATE,
SERVICE_PAUSE,
SERVICE_RETURN_TO_BASE,
SERVICE_SEND_COMMAND,
SERVICE_SET_FAN_SPEED,
SERVICE_START,
SERVICE_START_PAUSE,
SERVICE_STOP,
)
from homeassistant.const import (
ATTR_COMMAND,
ATTR_ENTITY_ID,
ENTITY_MATCH_ALL,
SERVICE_TOGGLE,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
)
from homeassistant.loader import bind_hass
@bind_hass
def turn_on(hass, entity_id=ENTITY_MATCH_ALL):
"""Turn all or specified vacuum on."""
hass.add_job(async_turn_on, hass, entity_id)
async def async_turn_on(hass, entity_id=ENTITY_MATCH_ALL):
"""Turn all or specified vacuum on."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else None
await hass.services.async_call(DOMAIN, SERVICE_TURN_ON, data, blocking=True)
@bind_hass
def turn_off(hass, entity_id=ENTITY_MATCH_ALL):
"""Turn all or specified vacuum off."""
hass.add_job(async_turn_off, hass, entity_id)
async def async_turn_off(hass, entity_id=ENTITY_MATCH_ALL):
"""Turn all or specified vacuum off."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else None
await hass.services.async_call(DOMAIN, SERVICE_TURN_OFF, data, blocking=True)
@bind_hass
def toggle(hass, entity_id=ENTITY_MATCH_ALL):
"""Toggle all or specified vacuum."""
hass.add_job(async_toggle, hass, entity_id)
async def async_toggle(hass, entity_id=ENTITY_MATCH_ALL):
"""Toggle all or specified vacuum."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else None
await hass.services.async_call(DOMAIN, SERVICE_TOGGLE, data, blocking=True)
@bind_hass
def locate(hass, entity_id=ENTITY_MATCH_ALL):
"""Locate all or specified vacuum."""
hass.add_job(async_locate, hass, entity_id)
async def async_locate(hass, entity_id=ENTITY_MATCH_ALL):
"""Locate all or specified vacuum."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else None
await hass.services.async_call(DOMAIN, SERVICE_LOCATE, data, blocking=True)
@bind_hass
def clean_spot(hass, entity_id=ENTITY_MATCH_ALL):
"""Tell all or specified vacuum to perform a spot clean-up."""
hass.add_job(async_clean_spot, hass, entity_id)
async def async_clean_spot(hass, entity_id=ENTITY_MATCH_ALL):
"""Tell all or specified vacuum to perform a spot clean-up."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else None
await hass.services.async_call(DOMAIN, SERVICE_CLEAN_SPOT, data, blocking=True)
@bind_hass
def return_to_base(hass, entity_id=ENTITY_MATCH_ALL):
"""Tell all or specified vacuum to return to base."""
hass.add_job(async_return_to_base, hass, entity_id)
async def async_return_to_base(hass, entity_id=ENTITY_MATCH_ALL):
"""Tell all or specified vacuum to return to base."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else None
await hass.services.async_call(DOMAIN, SERVICE_RETURN_TO_BASE, data, blocking=True)
@bind_hass
def start_pause(hass, entity_id=ENTITY_MATCH_ALL):
"""Tell all or specified vacuum to start or pause the current task."""
hass.add_job(async_start_pause, hass, entity_id)
async def async_start_pause(hass, entity_id=ENTITY_MATCH_ALL):
"""Tell all or specified vacuum to start or pause the current task."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else None
await hass.services.async_call(DOMAIN, SERVICE_START_PAUSE, data, blocking=True)
@bind_hass
def start(hass, entity_id=ENTITY_MATCH_ALL):
"""Tell all or specified vacuum to start or resume the current task."""
hass.add_job(async_start, hass, entity_id)
async def async_start(hass, entity_id=ENTITY_MATCH_ALL):
"""Tell all or specified vacuum to start or resume the current task."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else None
await hass.services.async_call(DOMAIN, SERVICE_START, data, blocking=True)
@bind_hass
def pause(hass, entity_id=ENTITY_MATCH_ALL):
"""Tell all or the specified vacuum to pause the current task."""
hass.add_job(async_pause, hass, entity_id)
async def async_pause(hass, entity_id=ENTITY_MATCH_ALL):
"""Tell all or the specified vacuum to pause the current task."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else None
await hass.services.async_call(DOMAIN, SERVICE_PAUSE, data, blocking=True)
@bind_hass
def stop(hass, entity_id=ENTITY_MATCH_ALL):
"""Stop all or specified vacuum."""
hass.add_job(async_stop, hass, entity_id)
async def async_stop(hass, entity_id=ENTITY_MATCH_ALL):
"""Stop all or specified vacuum."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else None
await hass.services.async_call(DOMAIN, SERVICE_STOP, data, blocking=True)
@bind_hass
def set_fan_speed(hass, fan_speed, entity_id=ENTITY_MATCH_ALL):
"""Set fan speed for all or specified vacuum."""
hass.add_job(async_set_fan_speed, hass, fan_speed, entity_id)
async def async_set_fan_speed(hass, fan_speed, entity_id=ENTITY_MATCH_ALL):
"""Set fan speed for all or specified vacuum."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
data[ATTR_FAN_SPEED] = fan_speed
await hass.services.async_call(DOMAIN, SERVICE_SET_FAN_SPEED, data, blocking=True)
@bind_hass
def send_command(hass, command, params=None, entity_id=ENTITY_MATCH_ALL):
"""Send command to all or specified vacuum."""
hass.add_job(async_send_command, hass, command, params, entity_id)
async def async_send_command(hass, command, params=None, entity_id=ENTITY_MATCH_ALL):
"""Send command to all or specified vacuum."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
data[ATTR_COMMAND] = command
if params is not None:
data[ATTR_PARAMS] = params
await hass.services.async_call(DOMAIN, SERVICE_SEND_COMMAND, data, blocking=True)
|
from django import forms
from django.conf import settings
from django.http import HttpRequest
from social_django.views import complete
from weblate.accounts.forms import UniqueEmailMixin
from weblate.accounts.models import AuditLog
from weblate.accounts.strategy import create_session
from weblate.accounts.views import store_userid
from weblate.auth.models import User, get_anonymous
from weblate.trans.models import Change
def send_invitation(request: HttpRequest, project_name: str, user: User):
"""Send invitation to user to join project."""
fake = HttpRequest()
fake.user = get_anonymous()
fake.method = "POST"
fake.session = create_session()
fake.session["invitation_context"] = {
"from_user": request.user.full_name,
"project_name": project_name,
}
fake.POST["email"] = user.email
fake.META = request.META
store_userid(fake, invite=True)
complete(fake, "email")
class InviteUserForm(forms.ModelForm, UniqueEmailMixin):
class Meta:
model = User
fields = ["email", "username", "full_name"]
def save(self, request, project=None):
self.instance.set_unusable_password()
user = super().save()
if project:
project.add_user(user)
Change.objects.create(
project=project,
action=Change.ACTION_INVITE_USER,
user=request.user,
details={"username": user.username},
)
AuditLog.objects.create(
user=user,
request=request,
activity="invited",
username=request.user.username,
)
send_invitation(request, project.name if project else settings.SITE_TITLE, user)
class AdminInviteUserForm(InviteUserForm):
class Meta:
model = User
fields = ["email", "username", "full_name", "is_superuser"]
|
from django.conf import settings
from weblate.machinery.base import (
MachineryRateLimit,
MachineTranslation,
MachineTranslationError,
MissingConfiguration,
)
BAIDU_API = "http://api.fanyi.baidu.com/api/trans/vip/translate"
class BaiduTranslation(MachineTranslation):
"""Baidu API machine translation support."""
name = "Baidu"
max_score = 90
# Map codes used by Baidu to codes used by Weblate
language_map = {
"zh_Hans": "zh",
"ja": "jp",
"ko": "kor",
"fr": "fra",
"es": "spa",
"ar": "ara",
"bg": "bul",
"et": "est",
"da": "dan",
"fi": "fin",
"ro": "rom",
# The slo should map to Slovak, but Baidu uses this code for Slovenian
"sl": "slo",
"sw": "swe",
"zh_Hant": "cht",
"vi": "vie",
}
def __init__(self):
"""Check configuration."""
super().__init__()
if settings.MT_BAIDU_ID is None:
raise MissingConfiguration("Baidu Translate requires app key")
if settings.MT_BAIDU_SECRET is None:
raise MissingConfiguration("Baidu Translate requires app secret")
def download_languages(self):
"""List of supported languages."""
return [
"zh",
"en",
"yue",
"wyw",
"jp",
"kor",
"fra",
"spa",
"th",
"ara",
"ru",
"pt",
"de",
"it",
"el",
"nl",
"pl",
"bul",
"est",
"dan",
"fin",
"cs",
"rom",
"slo",
"swe",
"hu",
"cht",
"vie",
]
def download_translations(
self,
source,
language,
text: str,
unit,
user,
search: bool,
threshold: int = 75,
):
"""Download list of possible translations from a service."""
salt, sign = self.signed_salt(
settings.MT_BAIDU_ID, settings.MT_BAIDU_SECRET, text
)
args = {
"q": text,
"from": source,
"to": language,
"appid": settings.MT_BAIDU_ID,
"salt": salt,
"sign": sign,
}
response = self.request("get", BAIDU_API, params=args)
payload = response.json()
if "error_code" in payload:
try:
if int(payload["error_code"]) == 54003:
raise MachineryRateLimit(payload["error_msg"])
except ValueError:
pass
raise MachineTranslationError(
"Error {error_code}: {error_msg}".format(**payload)
)
for item in payload["trans_result"]:
yield {
"text": item["dst"],
"quality": self.max_score,
"service": self.name,
"source": item["src"],
}
|
import asyncio
import logging
from homeassistant.components.notify import (
ATTR_TITLE,
ATTR_TITLE_DEFAULT,
BaseNotificationService,
)
from . import DOMAIN as TIBBER_DOMAIN
_LOGGER = logging.getLogger(__name__)
async def async_get_service(hass, config, discovery_info=None):
"""Get the Tibber notification service."""
tibber_connection = hass.data[TIBBER_DOMAIN]
return TibberNotificationService(tibber_connection.send_notification)
class TibberNotificationService(BaseNotificationService):
"""Implement the notification service for Tibber."""
def __init__(self, notify):
"""Initialize the service."""
self._notify = notify
async def async_send_message(self, message=None, **kwargs):
"""Send a message to Tibber devices."""
title = kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT)
try:
await self._notify(title=title, message=message)
except asyncio.TimeoutError:
_LOGGER.error("Timeout sending message with Tibber")
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.