text
stringlengths 213
32.3k
|
---|
from homeassistant.components.powerwall.const import DOMAIN
from homeassistant.const import STATE_ON
from homeassistant.setup import async_setup_component
from .mocks import _mock_get_config, _mock_powerwall_with_fixtures
from tests.async_mock import patch
async def test_sensors(hass):
"""Test creation of the binary sensors."""
mock_powerwall = await _mock_powerwall_with_fixtures(hass)
with patch(
"homeassistant.components.powerwall.config_flow.Powerwall",
return_value=mock_powerwall,
), patch(
"homeassistant.components.powerwall.Powerwall",
return_value=mock_powerwall,
):
assert await async_setup_component(hass, DOMAIN, _mock_get_config())
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.grid_status")
assert state.state == STATE_ON
expected_attributes = {"friendly_name": "Grid Status", "device_class": "power"}
# Only test for a subset of attributes in case
# HA changes the implementation and a new one appears
assert all(item in state.attributes.items() for item in expected_attributes.items())
state = hass.states.get("binary_sensor.powerwall_status")
assert state.state == STATE_ON
expected_attributes = {
"friendly_name": "Powerwall Status",
"device_class": "power",
}
# Only test for a subset of attributes in case
# HA changes the implementation and a new one appears
assert all(item in state.attributes.items() for item in expected_attributes.items())
state = hass.states.get("binary_sensor.powerwall_connected_to_tesla")
assert state.state == STATE_ON
expected_attributes = {
"friendly_name": "Powerwall Connected to Tesla",
"device_class": "connectivity",
}
# Only test for a subset of attributes in case
# HA changes the implementation and a new one appears
assert all(item in state.attributes.items() for item in expected_attributes.items())
state = hass.states.get("binary_sensor.powerwall_charging")
assert state.state == STATE_ON
expected_attributes = {
"friendly_name": "Powerwall Charging",
"device_class": "battery_charging",
}
# Only test for a subset of attributes in case
# HA changes the implementation and a new one appears
assert all(item in state.attributes.items() for item in expected_attributes.items())
|
import re
from itertools import chain, groupby
from builtins import range
from . import info
from .util import parse_duration, solve_quadratic, proper_round
class Const(object):
'''
Load plan with constant load
'''
def __init__(self, rps, duration):
self.rps = float(rps)
self.duration = duration
def __iter__(self):
if self.rps == 0:
return iter([])
interval = 1000.0 / self.rps
return (
int(i * interval)
for i in range(0, int(self.rps * self.duration / 1000)))
def rps_at(self, t):
'''Return rps for second t'''
if 0 <= t <= self.duration:
return self.rps
else:
return 0
def get_duration(self):
'''Return step duration'''
return self.duration
def __len__(self):
'''Return total ammo count'''
return int(self.duration / 1000 * self.rps)
def get_rps_list(self):
return [(int(self.rps), self.duration / 1000)]
def __repr__(self):
return 'const(%s, %s)' % (self.rps, self.duration / 1000)
class Line(object):
'''Load plan with linear load'''
def __init__(self, minrps, maxrps, duration):
"""
:param minrps:
:param maxrps:
:param duration: milliseconds
"""
self.minrps = float(minrps)
self.maxrps = float(maxrps)
self.duration = duration / 1000.0
self.slope = (self.maxrps - self.minrps) / self.duration
def ts(self, n):
"""
:param n: number of charge
:return: when to shoot nth charge, milliseconds
"""
try:
root1, root2 = solve_quadratic(self.slope / 2.0, self.minrps, -n)
except ZeroDivisionError:
root2 = float(n) / self.minrps
return int(root2 * 1000)
def __iter__(self):
"""
:return: timestamps for each charge
"""
return (self.ts(n) for n in range(0, self.__len__()))
def rps_at(self, t):
'''Return rps for second t'''
if 0 <= t <= self.duration:
return self.minrps + \
float(self.maxrps - self.minrps) * t / self.duration
else:
return 0
def get_duration(self):
'''Return load duration in seconds'''
return int(self.duration * 1000)
def __len__(self):
'''Return total ammo count'''
return int((self.maxrps + self.minrps) / 2.0 * self.duration)
def get_float_rps_list(self):
'''
get list of constant load parts (we have no constant load at all, but tank will think so),
with parts durations (float)
'''
int_rps = range(int(self.minrps), int(self.maxrps) + 1)
step_duration = float(self.duration) / len(int_rps)
rps_list = [(rps, int(step_duration)) for rps in int_rps]
return rps_list
def get_rps_list(self):
"""
get list of each second's rps
:returns: list of tuples (rps, duration of corresponding rps in seconds)
:rtype: list
"""
seconds = range(0, int(self.duration) + 1)
rps_groups = groupby([proper_round(self.rps_at(t)) for t in seconds],
lambda x: x)
rps_list = [(rps, len(list(rpl))) for rps, rpl in rps_groups]
return rps_list
class Composite(object):
'''Load plan with multiple steps'''
def __init__(self, steps):
self.steps = steps
def __iter__(self):
base = 0
for step in self.steps:
for ts in step:
yield ts + base
base += step.get_duration()
def get_duration(self):
'''Return total duration'''
return sum(step.get_duration() for step in self.steps)
def __len__(self):
'''Return total ammo count'''
return int(sum(step.__len__() for step in self.steps))
def get_rps_list(self):
return list(
chain.from_iterable(step.get_rps_list() for step in self.steps))
class Stairway(Composite):
def __init__(self, minrps, maxrps, increment, step_duration):
self.duration = step_duration
if maxrps < minrps:
increment = -increment
n_steps = int((maxrps - minrps) / increment)
steps = [
Const(minrps + i * increment, step_duration)
for i in range(0, n_steps + 1)
]
if increment > 0:
if (minrps + n_steps * increment) < maxrps:
steps.append(Const(maxrps, step_duration))
elif increment < 0:
if (minrps + n_steps * increment) > maxrps:
steps.append(Const(maxrps, step_duration))
super(Stairway, self).__init__(steps)
class StepFactory(object):
DURATION_RE = r'([0-9.]+d)?([0-9.]+h)?([0-9.]+m)?([0-9.]+s?)?'
@classmethod
def line(cls, params):
template = re.compile(r'([0-9.]+),\s*([0-9.]+),\s*({})\)'.format(cls.DURATION_RE))
minrps, maxrps, duration = template.search(params).groups()[:3]
return Line(float(minrps), float(maxrps), parse_duration(duration))
@classmethod
def const(cls, params):
template = re.compile(r'([0-9.]+),\s*({})\)'.format(cls.DURATION_RE))
rps, duration = template.search(params).groups()[:2]
return Const(float(rps), parse_duration(duration))
@classmethod
def stairway(cls, params):
template = re.compile(
r'([0-9.]+),\s*([0-9.]+),\s*([0-9.]+),\s*({})\)'.format(cls.DURATION_RE))
minrps, maxrps, increment, duration = template.search(params).groups()[:4]
return Stairway(
float(minrps),
float(maxrps), float(increment), parse_duration(duration))
@staticmethod
def produce(step_config):
_plans = {
'line': StepFactory.line,
'const': StepFactory.const,
'step': StepFactory.stairway,
}
load_type, params = step_config.split('(')
load_type = load_type.strip()
if load_type in _plans:
return _plans[load_type](params)
else:
raise NotImplementedError(
'No such load type implemented: "%s"' % load_type)
def create(rps_schedule):
"""
Create Load Plan as defined in schedule. Publish info about its duration.
"""
if len(rps_schedule) > 1:
lp = Composite(
[StepFactory.produce(step_config) for step_config in rps_schedule])
else:
lp = StepFactory.produce(rps_schedule[0])
info.status.publish('duration', lp.get_duration() / 1000)
info.status.publish('steps', lp.get_rps_list())
info.status.lp_len = len(lp)
return lp
|
from os.path import exists
from os.path import sys
from setuptools import setup, find_packages
def parse_version(fpath):
"""
Statically parse the version number from a python file
"""
import ast
if not exists(fpath):
raise ValueError("fpath={!r} does not exist".format(fpath))
with open(fpath, "r") as file_:
sourcecode = file_.read()
pt = ast.parse(sourcecode)
class VersionVisitor(ast.NodeVisitor):
def visit_Assign(self, node):
for target in node.targets:
if getattr(target, "id", None) == "__version__":
self.version = node.value.s
visitor = VersionVisitor()
visitor.visit(pt)
return visitor.version
def parse_requirements(fname="requirements.txt", with_version=False):
"""
Parse the package dependencies listed in a requirements file but strips
specific versioning information.
Args:
fname (str): path to requirements file
with_version (bool, default=False): if true include version specs
Returns:
List[str]: list of requirements items
"""
from os.path import exists
import re
require_fpath = fname
def parse_line(line):
"""
Parse information from a line in a requirements text file
"""
if line.startswith("-r "):
# Allow specifying requirements in other files
target = line.split(" ")[1]
for info in parse_require_file(target):
yield info
else:
info = {"line": line}
if line.startswith("-e "):
info["package"] = line.split("#egg=")[1]
else:
# Remove versioning from the package
pat = "(" + "|".join([">=", "==", ">"]) + ")"
parts = re.split(pat, line, maxsplit=1)
parts = [p.strip() for p in parts]
info["package"] = parts[0]
if len(parts) > 1:
op, rest = parts[1:]
if ";" in rest:
# Handle platform specific dependencies
# http://setuptools.readthedocs.io/en/latest/setuptools.html#declaring-platform-specific-dependencies
version, platform_deps = map(str.strip, rest.split(";"))
info["platform_deps"] = platform_deps
else:
version = rest # NOQA
info["version"] = (op, version)
yield info
def parse_require_file(fpath):
with open(fpath, "r") as f:
for line in f.readlines():
line = line.strip()
if line and not line.startswith("#"):
for info in parse_line(line):
yield info
def gen_packages_items():
if exists(require_fpath):
for info in parse_require_file(require_fpath):
parts = [info["package"]]
if with_version and "version" in info:
parts.extend(info["version"])
if not sys.version.startswith("3.4"):
# apparently package_deps are broken in 3.4
platform_deps = info.get("platform_deps")
if platform_deps is not None:
parts.append(";" + platform_deps)
item = "".join(parts)
yield item
packages = list(gen_packages_items())
return packages
def parse_description(fpath):
"""
Parse the description in the README file
"""
# Check this exists, otherwise pip install breaks
if exists(fpath):
with open(fpath, "r") as f:
text = f.read()
return text
return ""
NAME = "pgmpy"
VERSION = parse_version("pgmpy/__init__.py") # must be global for git tags
if __name__ == "__main__":
setup(
name=NAME,
version=VERSION,
description="A library for Probabilistic Graphical Models",
packages=find_packages(exclude=["tests"]),
author="Ankur Ankan",
author_email="[email protected]",
url="https://github.com/pgmpy/pgmpy",
license="MIT",
classifiers=[
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Intended Audience :: Education",
"Intended Audience :: Science/Research",
"Operating System :: Unix",
"Operating System :: POSIX",
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS",
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
"Topic :: Scientific/Engineering :: Bio-Informatics",
],
long_description=parse_description("README.md"),
long_description_content_type="text/markdown",
install_requires=parse_requirements("requirements/runtime.txt"),
extras_require={
"all": parse_requirements("requirements.txt"),
"tests": parse_requirements("requirements/tests.txt"),
},
)
|
import logging
from homeassistant.const import CONF_DEVICE, EVENT_HOMEASSISTANT_STOP
from .const import DOMAIN as AXIS_DOMAIN
from .device import AxisNetworkDevice
_LOGGER = logging.getLogger(__name__)
async def async_setup(hass, config):
"""Old way to set up Axis devices."""
return True
async def async_setup_entry(hass, config_entry):
"""Set up the Axis component."""
hass.data.setdefault(AXIS_DOMAIN, {})
device = AxisNetworkDevice(hass, config_entry)
if not await device.async_setup():
return False
# 0.104 introduced config entry unique id, this makes upgrading possible
if config_entry.unique_id is None:
hass.config_entries.async_update_entry(
config_entry, unique_id=device.api.vapix.serial_number
)
hass.data[AXIS_DOMAIN][config_entry.unique_id] = device
await device.async_update_device_registry()
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, device.shutdown)
return True
async def async_unload_entry(hass, config_entry):
"""Unload Axis device config entry."""
device = hass.data[AXIS_DOMAIN].pop(config_entry.unique_id)
return await device.async_reset()
async def async_migrate_entry(hass, config_entry):
"""Migrate old entry."""
_LOGGER.debug("Migrating from version %s", config_entry.version)
# Flatten configuration but keep old data if user rollbacks HASS prior to 0.106
if config_entry.version == 1:
config_entry.data = {**config_entry.data, **config_entry.data[CONF_DEVICE]}
config_entry.version = 2
_LOGGER.info("Migration to version %s successful", config_entry.version)
return True
|
import numpy as np
from scattertext.CSRMatrixTools import CSRMatrixFactory
from scattertext.indexstore import IndexStore
from scattertext.termranking import AbsoluteFrequencyRanker
class CompactTerms(object):
def __init__(self,
term_ranker=AbsoluteFrequencyRanker,
minimum_term_count=0,
slack=1):
'''
Parameters
----------
term_ranker : TermRanker
Default AbsoluteFrequencyRanker
minimum_term_count : int
Default 0
slack : int
Default 1
'''
self.term_ranker = term_ranker
self.minimum_term_count = minimum_term_count
self.redundancy_slack = slack
def compact(self, term_doc_matrix, non_text=False):
'''
Parameters
----------
term_doc_matrix : TermDocMatrix
Term document matrix object to compact
non_text : bool
Use non-text features instead of terms
Returns
-------
New term doc matrix
'''
return term_doc_matrix.remove_terms_by_indices(self._indices_to_compact(term_doc_matrix, non_text), non_text)
def _indices_to_compact(self, term_doc_matrix, non_text=False):
ranker = self.term_ranker(term_doc_matrix)
if non_text:
ranker = ranker.use_non_text_features()
indicies = self._get_term_indices_to_compact_from_term_freqs(
ranker.get_ranks(),
term_doc_matrix,
non_text
)
return list(indicies)
def _get_term_indices_to_compact_from_term_freqs(self, term_freqs, term_doc_matrix, non_text):
idx = IndexStore()
tdf_vals = term_freqs.values
valid_terms_mask = tdf_vals.sum(axis=1) >= self.minimum_term_count
tdf_vals = term_freqs[valid_terms_mask].values
terms = np.array(term_freqs.index)[valid_terms_mask]
lengths = []
fact = CSRMatrixFactory()
for i, t in enumerate(terms):
for tok in t.split():
fact[i, idx.getidx(tok)] = 1
lengths.append(len(t.split()))
lengths = np.array(lengths)
mat = fact.get_csr_matrix()
coocs = lengths - (mat * mat.T)
pairs = np.argwhere(coocs == 0).T
pairs = self._limit_to_non_identical_terms(pairs)
pairs = self._limit_to_pairs_of_bigrams_and_a_constituent_unigram(pairs, terms)
pairs = self._limit_to_redundant_unigrams(pairs, tdf_vals)
idx_store = term_doc_matrix._get_relevant_idx_store(non_text)
redundant_terms = idx_store.getidxstrictbatch(terms[np.unique(pairs[:, 1])])
infrequent_terms = np.argwhere(~valid_terms_mask).T[0]
terms_to_remove = np.concatenate([redundant_terms, infrequent_terms])
return terms_to_remove
def _limit_to_redundant_unigrams(self, pairs, tdf_vals):
return pairs[np.all(tdf_vals[pairs[:, 1]] <= tdf_vals[pairs[:, 0]] + self.redundancy_slack, axis=1)]
def _limit_to_pairs_of_bigrams_and_a_constituent_unigram(self, pairs, terms):
return pairs[np.array([terms[i[1]] in terms[i[0]] for i in pairs])]
def _limit_to_non_identical_terms(self, pairs):
return pairs.T[(pairs[0] != pairs[1])]
|
from datetime import timedelta
from typing import Dict
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
CONF_DISKS,
DATA_MEGABYTES,
DATA_RATE_KILOBYTES_PER_SECOND,
DATA_TERABYTES,
PRECISION_TENTHS,
TEMP_CELSIUS,
)
from homeassistant.helpers.temperature import display_temp
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.util.dt import utcnow
from . import SynoApi, SynologyDSMDeviceEntity, SynologyDSMEntity
from .const import (
CONF_VOLUMES,
DOMAIN,
INFORMATION_SENSORS,
STORAGE_DISK_SENSORS,
STORAGE_VOL_SENSORS,
SYNO_API,
TEMP_SENSORS_KEYS,
UTILISATION_SENSORS,
)
async def async_setup_entry(
hass: HomeAssistantType, entry: ConfigEntry, async_add_entities
) -> None:
"""Set up the Synology NAS Sensor."""
api = hass.data[DOMAIN][entry.unique_id][SYNO_API]
entities = [
SynoDSMUtilSensor(api, sensor_type, UTILISATION_SENSORS[sensor_type])
for sensor_type in UTILISATION_SENSORS
]
# Handle all volumes
if api.storage.volumes_ids:
for volume in entry.data.get(CONF_VOLUMES, api.storage.volumes_ids):
entities += [
SynoDSMStorageSensor(
api, sensor_type, STORAGE_VOL_SENSORS[sensor_type], volume
)
for sensor_type in STORAGE_VOL_SENSORS
]
# Handle all disks
if api.storage.disks_ids:
for disk in entry.data.get(CONF_DISKS, api.storage.disks_ids):
entities += [
SynoDSMStorageSensor(
api, sensor_type, STORAGE_DISK_SENSORS[sensor_type], disk
)
for sensor_type in STORAGE_DISK_SENSORS
]
entities += [
SynoDSMInfoSensor(api, sensor_type, INFORMATION_SENSORS[sensor_type])
for sensor_type in INFORMATION_SENSORS
]
async_add_entities(entities)
class SynoDSMUtilSensor(SynologyDSMEntity):
"""Representation a Synology Utilisation sensor."""
@property
def state(self):
"""Return the state."""
attr = getattr(self._api.utilisation, self.entity_type)
if callable(attr):
attr = attr()
if attr is None:
return None
# Data (RAM)
if self._unit == DATA_MEGABYTES:
return round(attr / 1024.0 ** 2, 1)
# Network
if self._unit == DATA_RATE_KILOBYTES_PER_SECOND:
return round(attr / 1024.0, 1)
return attr
@property
def available(self) -> bool:
"""Return True if entity is available."""
return bool(self._api.utilisation)
class SynoDSMStorageSensor(SynologyDSMDeviceEntity):
"""Representation a Synology Storage sensor."""
@property
def state(self):
"""Return the state."""
attr = getattr(self._api.storage, self.entity_type)(self._device_id)
if attr is None:
return None
# Data (disk space)
if self._unit == DATA_TERABYTES:
return round(attr / 1024.0 ** 4, 2)
# Temperature
if self.entity_type in TEMP_SENSORS_KEYS:
return display_temp(self.hass, attr, TEMP_CELSIUS, PRECISION_TENTHS)
return attr
class SynoDSMInfoSensor(SynologyDSMEntity):
"""Representation a Synology information sensor."""
def __init__(self, api: SynoApi, entity_type: str, entity_info: Dict[str, str]):
"""Initialize the Synology SynoDSMInfoSensor entity."""
super().__init__(api, entity_type, entity_info)
self._previous_uptime = None
self._last_boot = None
@property
def state(self):
"""Return the state."""
attr = getattr(self._api.information, self.entity_type)
if attr is None:
return None
# Temperature
if self.entity_type in TEMP_SENSORS_KEYS:
return display_temp(self.hass, attr, TEMP_CELSIUS, PRECISION_TENTHS)
if self.entity_type == "uptime":
# reboot happened or entity creation
if self._previous_uptime is None or self._previous_uptime > attr:
last_boot = utcnow() - timedelta(seconds=attr)
self._last_boot = last_boot.replace(microsecond=0).isoformat()
self._previous_uptime = attr
return self._last_boot
return attr
|
import numbers
import numpy as np
import six
import chainer
def _is_iterable(x):
if isinstance(x, str):
return False
return hasattr(x, '__iter__')
def _as_tuple(t):
if _is_iterable(t):
return tuple(t)
else:
return t,
def _bool_to_indices(indices, len_):
true_indices = []
for i, index in enumerate(indices):
if isinstance(index, (bool, np.bool_)):
if index:
true_indices.append(i)
else:
return indices
if not len(indices) == len_:
raise ValueError(
'The number of booleans is different from the length of dataset')
return true_indices
def _as_key_indices(keys, key_names):
key_names = _as_tuple(key_names)
keys = _bool_to_indices(_as_tuple(keys), len(key_names))
for key in keys:
if isinstance(key, numbers.Integral):
key_index = key
if key_index < 0:
key_index += len(key_names)
if key_index not in range(0, len(key_names)):
raise IndexError(
'index {} is out of bounds for keys with size {}'.format(
key, len(key_names)))
else:
try:
key_index = key_names.index(key)
except ValueError:
raise KeyError('{} does not exists'.format(key))
yield key_index
class SliceableDataset(chainer.dataset.DatasetMixin):
"""An abstract dataset class that supports slicing.
This is a dataset class that supports slicing.
A dataset class inheriting this class should implement
three methods: :meth:`__len__`, :meth:`keys`, and
:meth:`get_example_by_keys`.
Users can easily create sliceable datasets using
:class:`~chainercv.chainer_experimental.datasets.sliceable.GetterDataset`
or
:class:`~chainercv.chainer_experimental.datasets.sliceable.TupleDataset`.
"""
def __len__(self):
raise NotImplementedError
@property
def keys(self):
"""Return names of all keys
Returns:
string or tuple of strings
"""
raise NotImplementedError
def get_example_by_keys(self, index, key_indices):
"""Return data of an example by keys
Args:
index (int): An index of an example.
key_indices (tuple of ints): A tuple of indices of requested keys.
Returns:
tuple of data
"""
raise NotImplementedError
def get_example(self, index):
if isinstance(self.keys, tuple):
return self.get_example_by_keys(
index, tuple(range(len(self.keys))))
else:
return self.get_example_by_keys(index, (0,))[0]
@property
def slice(self):
return SliceHelper(self)
def __iter__(self):
return (self.get_example(i) for i in six.moves.range(len(self)))
class SliceHelper(object):
"""A helper class for :class:`SliceableDataset`."""
def __init__(self, dataset):
self._dataset = dataset
def __getitem__(self, args):
if isinstance(args, tuple):
indices, keys = args
else:
indices = args
keys = self._dataset.keys
if not isinstance(indices, slice):
indices = _bool_to_indices(indices, len(self._dataset))
key_indices = tuple(_as_key_indices(keys, self._dataset.keys))
return_tuple = _is_iterable(keys)
return SlicedDataset(
self._dataset, indices,
tuple(key_indices) if return_tuple else key_indices[0])
class SlicedDataset(SliceableDataset):
"""A sliced view for :class:`SliceableDataset`."""
def __init__(self, dataset, indices, key_indices):
self._dataset = dataset
self._indices = indices
self._key_indices = key_indices
def __len__(self):
if isinstance(self._indices, slice):
start, end, step = self._indices.indices(len(self._dataset))
return len(range(start, end, step))
else:
return len(self._indices)
@property
def keys(self):
keys = _as_tuple(self._dataset.keys)
if isinstance(self._key_indices, tuple):
return tuple(keys[key_index] for key_index in self._key_indices)
else:
return keys[self._key_indices]
def get_example_by_keys(self, index, key_indices):
if isinstance(key_indices, tuple):
key_indices = tuple(
_as_tuple(self._key_indices)[key_index]
for key_index in key_indices)
else:
key_indices = _as_tuple(self._key_indices)[key_indices]
if isinstance(self._indices, slice):
start, _, step = self._indices.indices(len(self._dataset))
return self._dataset.get_example_by_keys(
start + index * step, key_indices)
else:
return self._dataset.get_example_by_keys(
self._indices[index], key_indices)
|
from __future__ import print_function
import numpy
import os
import time
from PIL import Image
from src.training_data import load_training_tiles, way_bitmap_for_naip
from src.single_layer_network import list_findings
def render_errors(raster_data_paths, model, training_info, render_results):
"""Render JPEGs showing findings."""
for path in raster_data_paths:
labels, images = load_training_tiles(path)
if len(labels) == 0 or len(images) == 0:
print("WARNING, there is a borked naip image file")
continue
false_positives, fp_images = list_findings(labels, images, model)
path_parts = path.split('/')
filename = path_parts[len(path_parts) - 1]
print("FINDINGS: {} false pos of {} tiles, from {}".format(
len(false_positives), len(images), filename))
render_results_for_analysis([path], false_positives, fp_images, training_info['bands'],
training_info['tile_size'])
def render_results_for_analysis(raster_data_paths, predictions, test_images, band_list, tile_size):
"""Generate a JPEG for each TIFF showing predictions shaded."""
for raster_data_path in raster_data_paths:
way_bitmap_npy = numpy.asarray(way_bitmap_for_naip(None, raster_data_path, None, None,
None))
render_predictions(raster_data_path, predictions, test_images, way_bitmap_npy, band_list,
tile_size)
def render_predictions(raster_data_path, predictions, test_images, way_bitmap_npy, band_list,
tile_size):
"""Generate a JPEG for the given raster_data_path, showing predictions shaded."""
test_images_by_naip = []
predictions_by_naip = []
index = 0
for image_info in test_images:
predictions_by_naip.append(predictions[index])
test_images_by_naip.append(test_images[index])
index += 1
render_results_as_image(raster_data_path,
way_bitmap_npy,
test_images_by_naip,
band_list,
tile_size,
predictions=predictions_by_naip)
def render_results_as_image(raster_data_path,
way_bitmap,
test_images,
band_list,
tile_size,
predictions=None):
"""Save the source TIFF as a JPEG, with labels and data overlaid."""
timestr = time.strftime("%Y%m%d-%H%M%S")
outfile = os.path.splitext(raster_data_path)[0] + '-' + timestr + ".jpeg"
# TIF to JPEG bit from:
# http://stackoverflow.com/questions/28870504/converting-tiff-to-jpeg-in-python
im = Image.open(raster_data_path)
print("GENERATING JPEG for %s" % raster_data_path)
rows = len(way_bitmap)
cols = len(way_bitmap[0])
t0 = time.time()
r, g, b, ir = im.split()
# visualize single band analysis tinted for R-G-B,
# or grayscale for infrared band
if sum(band_list) == 1:
if band_list[3] == 1:
# visualize IR as grayscale
im = Image.merge("RGB", (ir, ir, ir))
else:
# visualize single-color band analysis as a scale of that color
zeros_band = Image.new('RGB', r.size).split()[0]
if band_list[0] == 1:
im = Image.merge("RGB", (r, zeros_band, zeros_band))
elif band_list[1] == 1:
im = Image.merge("RGB", (zeros_band, g, zeros_band))
elif band_list[2] == 1:
im = Image.merge("RGB", (zeros_band, zeros_band, b))
else:
# visualize multi-band analysis as RGB
im = Image.merge("RGB", (r, g, b))
t1 = time.time()
print("{0:.1f}s to FLATTEN the {1} analyzed bands of TIF to JPEG".format(t1 - t0, sum(
band_list)))
t0 = time.time()
shade_labels(im, test_images, predictions, tile_size)
t1 = time.time()
print("{0:.1f}s to SHADE PREDICTIONS on JPEG".format(t1 - t0))
t0 = time.time()
# show raw data that spawned the labels
for row in range(0, rows):
for col in range(0, cols):
if way_bitmap[row][col] != 0:
im.putpixel((col, row), (255, 0, 0))
t1 = time.time()
print("{0:.1f}s to DRAW WAYS ON JPEG".format(t1 - t0))
im.save(outfile, "JPEG")
def shade_labels(image, labels, predictions, tile_size):
"""Visualize predicted ON labels as blue, OFF as green."""
label_index = 0
for label in labels:
start_x = label[1][0]
start_y = label[1][1]
for x in range(start_x, start_x + tile_size):
for y in range(start_y, start_y + tile_size):
r, g, b = image.getpixel((x, y))
if predictions[label_index][0] < predictions[label_index][1]:
# shade ON predictions blue
image.putpixel((x, y), (r, g, 255))
else:
# shade OFF predictions green
image.putpixel((x, y), (r, 255, b))
label_index += 1
|
from homeassistant import config_entries, setup
from homeassistant.components.avri.const import DOMAIN
from tests.async_mock import patch
async def test_form(hass):
"""Test we get the form."""
await setup.async_setup_component(hass, "avri", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
with patch(
"homeassistant.components.avri.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"zip_code": "1234AB",
"house_number": 42,
"house_number_extension": "",
"country_code": "NL",
},
)
assert result2["type"] == "create_entry"
assert result2["title"] == "1234AB 42"
assert result2["data"] == {
"id": "1234AB 42",
"zip_code": "1234AB",
"house_number": 42,
"house_number_extension": "",
"country_code": "NL",
}
await hass.async_block_till_done()
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_invalid_house_number(hass):
"""Test we handle invalid house number."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"zip_code": "1234AB",
"house_number": -1,
"house_number_extension": "",
"country_code": "NL",
},
)
assert result2["type"] == "form"
assert result2["errors"] == {"house_number": "invalid_house_number"}
async def test_form_invalid_country_code(hass):
"""Test we handle invalid county code."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"zip_code": "1234AB",
"house_number": 42,
"house_number_extension": "",
"country_code": "foo",
},
)
assert result2["type"] == "form"
assert result2["errors"] == {"country_code": "invalid_country_code"}
|
from homeassistant.components.lock import SUPPORT_OPEN, LockEntity
from homeassistant.const import STATE_LOCKED, STATE_UNLOCKED
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Demo lock platform."""
async_add_entities(
[
DemoLock("Front Door", STATE_LOCKED),
DemoLock("Kitchen Door", STATE_UNLOCKED),
DemoLock("Openable Lock", STATE_LOCKED, True),
]
)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Demo config entry."""
await async_setup_platform(hass, {}, async_add_entities)
class DemoLock(LockEntity):
"""Representation of a Demo lock."""
def __init__(self, name, state, openable=False):
"""Initialize the lock."""
self._name = name
self._state = state
self._openable = openable
@property
def should_poll(self):
"""No polling needed for a demo lock."""
return False
@property
def name(self):
"""Return the name of the lock if any."""
return self._name
@property
def is_locked(self):
"""Return true if lock is locked."""
return self._state == STATE_LOCKED
def lock(self, **kwargs):
"""Lock the device."""
self._state = STATE_LOCKED
self.schedule_update_ha_state()
def unlock(self, **kwargs):
"""Unlock the device."""
self._state = STATE_UNLOCKED
self.schedule_update_ha_state()
def open(self, **kwargs):
"""Open the door latch."""
self._state = STATE_UNLOCKED
self.schedule_update_ha_state()
@property
def supported_features(self):
"""Flag supported features."""
if self._openable:
return SUPPORT_OPEN
|
import functools
import numpy as np
from . import utils
# Use as a sentinel value to indicate a dtype appropriate NA value.
NA = utils.ReprObject("<NA>")
@functools.total_ordering
class AlwaysGreaterThan:
def __gt__(self, other):
return True
def __eq__(self, other):
return isinstance(other, type(self))
@functools.total_ordering
class AlwaysLessThan:
def __lt__(self, other):
return True
def __eq__(self, other):
return isinstance(other, type(self))
# Equivalence to np.inf (-np.inf) for object-type
INF = AlwaysGreaterThan()
NINF = AlwaysLessThan()
# Pairs of types that, if both found, should be promoted to object dtype
# instead of following NumPy's own type-promotion rules. These type promotion
# rules match pandas instead. For reference, see the NumPy type hierarchy:
# https://docs.scipy.org/doc/numpy-1.13.0/reference/arrays.scalars.html
PROMOTE_TO_OBJECT = [
{np.number, np.character}, # numpy promotes to character
{np.bool_, np.character}, # numpy promotes to character
{np.bytes_, np.unicode_}, # numpy promotes to unicode
]
def maybe_promote(dtype):
"""Simpler equivalent of pandas.core.common._maybe_promote
Parameters
----------
dtype : np.dtype
Returns
-------
dtype : Promoted dtype that can hold missing values.
fill_value : Valid missing value for the promoted dtype.
"""
# N.B. these casting rules should match pandas
if np.issubdtype(dtype, np.floating):
fill_value = np.nan
elif np.issubdtype(dtype, np.timedelta64):
# See https://github.com/numpy/numpy/issues/10685
# np.timedelta64 is a subclass of np.integer
# Check np.timedelta64 before np.integer
fill_value = np.timedelta64("NaT")
elif np.issubdtype(dtype, np.integer):
if dtype.itemsize <= 2:
dtype = np.float32
else:
dtype = np.float64
fill_value = np.nan
elif np.issubdtype(dtype, np.complexfloating):
fill_value = np.nan + np.nan * 1j
elif np.issubdtype(dtype, np.datetime64):
fill_value = np.datetime64("NaT")
else:
dtype = object
fill_value = np.nan
return np.dtype(dtype), fill_value
NAT_TYPES = (np.datetime64("NaT"), np.timedelta64("NaT"))
def get_fill_value(dtype):
"""Return an appropriate fill value for this dtype.
Parameters
----------
dtype : np.dtype
Returns
-------
fill_value : Missing value corresponding to this dtype.
"""
_, fill_value = maybe_promote(dtype)
return fill_value
def get_pos_infinity(dtype):
"""Return an appropriate positive infinity for this dtype.
Parameters
----------
dtype : np.dtype
Returns
-------
fill_value : positive infinity value corresponding to this dtype.
"""
if issubclass(dtype.type, (np.floating, np.integer)):
return np.inf
if issubclass(dtype.type, np.complexfloating):
return np.inf + 1j * np.inf
return INF
def get_neg_infinity(dtype):
"""Return an appropriate positive infinity for this dtype.
Parameters
----------
dtype : np.dtype
Returns
-------
fill_value : positive infinity value corresponding to this dtype.
"""
if issubclass(dtype.type, (np.floating, np.integer)):
return -np.inf
if issubclass(dtype.type, np.complexfloating):
return -np.inf - 1j * np.inf
return NINF
def is_datetime_like(dtype):
"""Check if a dtype is a subclass of the numpy datetime types"""
return np.issubdtype(dtype, np.datetime64) or np.issubdtype(dtype, np.timedelta64)
def result_type(*arrays_and_dtypes):
"""Like np.result_type, but with type promotion rules matching pandas.
Examples of changed behavior:
number + string -> object (not string)
bytes + unicode -> object (not unicode)
Parameters
----------
*arrays_and_dtypes : list of arrays and dtypes
The dtype is extracted from both numpy and dask arrays.
Returns
-------
numpy.dtype for the result.
"""
types = {np.result_type(t).type for t in arrays_and_dtypes}
for left, right in PROMOTE_TO_OBJECT:
if any(issubclass(t, left) for t in types) and any(
issubclass(t, right) for t in types
):
return np.dtype(object)
return np.result_type(*arrays_and_dtypes)
|
import logging
from homeassistant.const import CONF_DEVICE, CONF_NAME, STATE_UNKNOWN
from homeassistant.helpers.entity import Entity
DOMAIN = "kira"
_LOGGER = logging.getLogger(__name__)
ICON = "mdi:remote"
CONF_SENSOR = "sensor"
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up a Kira sensor."""
if discovery_info is not None:
name = discovery_info.get(CONF_NAME)
device = discovery_info.get(CONF_DEVICE)
kira = hass.data[DOMAIN][CONF_SENSOR][name]
add_entities([KiraReceiver(device, kira)])
class KiraReceiver(Entity):
"""Implementation of a Kira Receiver."""
def __init__(self, name, kira):
"""Initialize the sensor."""
self._name = name
self._state = None
self._device = STATE_UNKNOWN
kira.registerCallback(self._update_callback)
def _update_callback(self, code):
code_name, device = code
_LOGGER.debug("Kira Code: %s", code_name)
self._state = code_name
self._device = device
self.schedule_update_ha_state()
@property
def name(self):
"""Return the name of the receiver."""
return self._name
@property
def icon(self):
"""Return icon."""
return ICON
@property
def state(self):
"""Return the state of the receiver."""
return self._state
@property
def device_state_attributes(self):
"""Return the state attributes of the device."""
return {CONF_DEVICE: self._device}
@property
def should_poll(self) -> bool:
"""Entity should not be polled."""
return False
@property
def force_update(self) -> bool:
"""Kira should force updates. Repeated states have meaning."""
return True
|
import arrow
import requests
import xmltodict
from cryptography import x509
from flask import current_app
from lemur.common.utils import get_psuedo_random_string
from lemur.extensions import metrics, sentry
from lemur.plugins import lemur_verisign as verisign
from lemur.plugins.bases import IssuerPlugin, SourcePlugin
# https://support.venafi.com/entries/66445046-Info-VeriSign-Error-Codes
VERISIGN_ERRORS = {
"0x30c5": "Domain Mismatch when enrolling for an SSL certificate, a domain in your request has not been added to verisign",
"0x3a10": "Invalid X509 certificate format.: an unsupported certificate format was submitted",
"0x4002": "Internal QM Error. : Internal Database connection error.",
"0x3301": "Bad transaction id or parent cert not renewable.: User try to renew a certificate that is not yet ready for renew or the transaction id is wrong",
"0x3069": "Challenge phrase mismatch: The challenge phrase submitted does not match the original one",
"0x3111": "Unsupported Product: User submitted a wrong product or requested cipher is not supported",
"0x30e8": "CN or org does not match the original one.: the submitted CSR contains a common name or org that does not match the original one",
"0x1005": "Duplicate certificate: a certificate with the same common name exists already",
"0x0194": "Incorrect Signature Algorithm: The requested signature algorithm is not supported for the key type. i.e. an ECDSA is submitted for an RSA key",
"0x6000": "parameter missing or incorrect: This is a general error code for missing or incorrect parameters. The reason will be in the response message. i.e. 'CSR is missing, 'Unsupported serverType' when no supported serverType could be found., 'invalid transaction id'",
"0x3063": "Certificate not allowed: trying to issue a certificate that is not configured for the account",
"0x23df": "No MDS Data Returned: internal connection lost or server not responding. this should be rare",
"0x3004": "Invalid Account: The users mpki account associated with the certificate is not valid or not yet active",
"0x4101": "Internal Error: internal server error, user should try again later. (Also check that State is spelled out",
"0x3101": "Missing admin role: Your account does not have the admin role required to access the webservice API",
"0x3085": "Account does not have webservice feature.: Your account does not the the webservice role required to access the webservice API",
"0x9511": "Corrupted CSR : the submitted CSR was mal-formed",
"0xa001": "Public key format does not match.: The public key format does not match the original cert at certificate renewal or replacement. E.g. if you try to renew or replace an RSA cert with a DSA or ECC key based CSR",
"0x0143": "Certificate End Date Error: You are trying to replace a certificate with validity end date exceeding the original cert. or the certificate end date is not valid",
"0x482d": "SHA1 validity check error: What error code do we get when we submit the SHA1 SSL requests with the validity more than 12/31/2016?",
"0x482e": "What error code do we get when we cannot complete the re-authentication for domains with a newly-approved gTLD 30 days after the gTLD approval",
"0x4824": "Per CA/B Forum baseline requirements, non-FQDN certs cannot exceed 11/1/2015. Examples: hostname, foo.cba (.cba is a pending gTLD)",
"eE0x48": "Currently the maximum cert validity is 4-years",
"0x4826": "OU misleading. See comments",
"0x4827": "Org re-auth past due. EV org has to go through re-authentication every 13 months; OV org has to go through re-authentication every 39 months",
"0x482a": "Domain re-auth past due. EV domain has to go through re-authentication every 13 months; OV domain has to go through re-authentication every 39 months.",
"0x482b": "No org address was set to default, should not happen",
"0x482c": "signature algorithm does not match intended key type in the CSR (e.g. CSR has an ECC key, but the signature algorithm is sha1WithRSAEncryption)",
"0x600E": "only supports ECC keys with the named curve NIST P-256, aka secp256r1 or prime256v1, other ECC key sizes will get this error ",
"0x6013": "only supports DSA keys with (2048, 256) as the bit lengths of the prime parameter pair (p, q), other DSA key sizes will get this error",
"0x600d": "RSA key size < 2A048",
"0x4828": "Verisign certificates can be at most two years in length",
"0x3043": "Certificates must have a validity of at least 1 day",
"0x950b": "CSR: Invalid State",
"0x3105": "Organization Name Not Matched",
"0x300a": "Domain/SubjectAltName Mismatched -- make sure that the SANs have the proper domain suffix",
"0x950e": "Invalid Common Name -- make sure the CN has a proper domain suffix",
"0xa00e": "Pending. (Insufficient number of tokens.)",
"0x8134": "Pending. (Domain failed CAA validation.)",
}
def log_status_code(r, *args, **kwargs):
"""
Is a request hook that logs all status codes to the verisign api.
:param r:
:param args:
:param kwargs:
:return:
"""
metrics.send("symantec_status_code_{}".format(r.status_code), "counter", 1)
def get_additional_names(options):
"""
Return a list of strings to be added to a SAN certificates.
:param options:
:return:
"""
names = []
# add SANs if present
if options.get("extensions"):
for san in options["extensions"]["sub_alt_names"]:
if isinstance(san, x509.DNSName):
names.append(san.value)
return names
def process_options(options):
"""
Processes and maps the incoming issuer options to fields/options that
verisign understands
:param options:
:return: dict or valid verisign options
"""
# if there is a config variable with VERISIGN_PRODUCT_<upper(authority.name)> take the value as Cert product-type
# else default to "Server", to be compatoible with former versions
authority = options.get("authority").name.upper()
product_type = current_app.config.get("VERISIGN_PRODUCT_{0}".format(authority), "Server")
data = {
"challenge": get_psuedo_random_string(),
"serverType": "Apache",
"certProductType": product_type,
"firstName": current_app.config.get("VERISIGN_FIRST_NAME"),
"lastName": current_app.config.get("VERISIGN_LAST_NAME"),
"signatureAlgorithm": "sha256WithRSAEncryption",
"email": current_app.config.get("VERISIGN_EMAIL"),
"ctLogOption": current_app.config.get("VERISIGN_CS_LOG_OPTION", "public"),
}
data["subject_alt_names"] = ",".join(get_additional_names(options))
if options.get("validity_end"):
# VeriSign (Symantec) only accepts strictly smaller than 2 year end date
if options.get("validity_end") < arrow.utcnow().shift(years=2, days=-1):
period = get_default_issuance(options)
data["specificEndDate"] = options["validity_end"].format("MM/DD/YYYY")
data["validityPeriod"] = period
else:
# allowing Symantec website setting the end date, given the validity period
data["validityPeriod"] = str(get_default_issuance(options))
options.pop("validity_end", None)
elif options.get("validity_years"):
if options["validity_years"] in [1, 2]:
data["validityPeriod"] = str(options["validity_years"]) + "Y"
else:
raise Exception(
"Verisign issued certificates cannot exceed two years in validity"
)
return data
def get_default_issuance(options):
"""
Gets the default time range for certificates
:param options:
:return:
"""
now = arrow.utcnow()
if options["validity_end"] < now.shift(years=+1):
validity_period = "1Y"
elif options["validity_end"] < now.shift(years=+2):
validity_period = "2Y"
else:
raise Exception(
"Verisign issued certificates cannot exceed two years in validity"
)
return validity_period
def handle_response(content):
"""
Helper function for parsing responses from the Verisign API.
:param content:
:return: :raise Exception:
"""
d = xmltodict.parse(content)
global VERISIGN_ERRORS
if d.get("Error"):
status_code = d["Error"]["StatusCode"]
elif d.get("Response"):
status_code = d["Response"]["StatusCode"]
if status_code in VERISIGN_ERRORS.keys():
raise Exception(VERISIGN_ERRORS[status_code])
return d
class VerisignIssuerPlugin(IssuerPlugin):
title = "Verisign"
slug = "verisign-issuer"
description = "Enables the creation of certificates by the VICE2.0 verisign API."
version = verisign.VERSION
author = "Kevin Glisson"
author_url = "https://github.com/netflix/lemur.git"
def __init__(self, *args, **kwargs):
self.session = requests.Session()
self.session.cert = current_app.config.get("VERISIGN_PEM_PATH")
self.session.hooks = dict(response=log_status_code)
super(VerisignIssuerPlugin, self).__init__(*args, **kwargs)
def create_certificate(self, csr, issuer_options):
"""
Creates a Verisign certificate.
:param csr:
:param issuer_options:
:return: :raise Exception:
"""
url = current_app.config.get("VERISIGN_URL") + "/rest/services/enroll"
data = process_options(issuer_options)
data["csr"] = csr
current_app.logger.info(
"Requesting a new verisign certificate: {0}".format(data)
)
response = self.session.post(url, data=data)
try:
response_dict = handle_response(response.content)
except KeyError:
metrics.send(
"verisign_create_certificate_error",
"counter",
1,
metric_tags={"common_name": issuer_options.get("common_name", "")},
)
sentry.captureException(
extra={"common_name": issuer_options.get("common_name", "")}
)
raise Exception(f"Error with Verisign: {response.content}")
authority = issuer_options.get("authority").name.upper()
cert = response_dict['Response']['Certificate']
external_id = None
if 'Transaction_ID' in response_dict['Response'].keys():
external_id = response_dict['Response']['Transaction_ID']
chain = current_app.config.get("VERISIGN_INTERMEDIATE_{0}".format(authority), current_app.config.get("VERISIGN_INTERMEDIATE"))
return cert, chain, external_id
@staticmethod
def create_authority(options):
"""
Creates an authority, this authority is then used by Lemur to allow a user
to specify which Certificate Authority they want to sign their certificate.
:param options:
:return:
"""
role = {"username": "", "password": "", "name": "verisign"}
return current_app.config.get("VERISIGN_ROOT"), "", [role]
def get_available_units(self):
"""
Uses the Verisign to fetch the number of available units left. This can be used to get tabs
on the number of certificates that can be issued.
:return:
"""
url = current_app.config.get("VERISIGN_URL") + "/rest/services/getTokens"
response = self.session.post(
url, headers={"content-type": "application/x-www-form-urlencoded"}
)
return handle_response(response.content)["Response"]["Order"]
def clear_pending_certificates(self):
"""
Uses Verisign to clear the pending certificates awaiting approval.
:return:
"""
url = current_app.config.get("VERISIGN_URL") + "/reportingws"
end = arrow.now()
start = end.shift(days=-7)
data = {
"reportType": "detail",
"certProductType": "Server",
"certStatus": "Pending",
"startDate": start.format("MM/DD/YYYY"),
"endDate": end.format("MM/DD/YYYY"),
}
response = self.session.post(url, data=data)
url = current_app.config.get("VERISIGN_URL") + "/rest/services/reject"
for order_id in response.json()["orderNumber"]:
response = self.session.get(url, params={"transaction_id": order_id})
if response.status_code == 200:
print("Rejecting certificate. TransactionId: {}".format(order_id))
class VerisignSourcePlugin(SourcePlugin):
title = "Verisign"
slug = "verisign-source"
description = (
"Allows for the polling of issued certificates from the VICE2.0 verisign API."
)
version = verisign.VERSION
author = "Kevin Glisson"
author_url = "https://github.com/netflix/lemur.git"
def __init__(self, *args, **kwargs):
self.session = requests.Session()
self.session.cert = current_app.config.get("VERISIGN_PEM_PATH")
super(VerisignSourcePlugin, self).__init__(*args, **kwargs)
def get_certificates(self):
url = current_app.config.get("VERISIGN_URL") + "/reportingws"
end = arrow.now()
start = end.shift(years=-5)
data = {
"reportType": "detail",
"startDate": start.format("MM/DD/YYYY"),
"endDate": end.format("MM/DD/YYYY"),
"structuredRecord": "Y",
"certStatus": "Valid",
}
current_app.logger.debug(data)
response = self.session.post(url, data=data)
|
import logging
import unittest
from gensim.corpora import mmcorpus, Dictionary
from gensim.models import hdpmodel
from gensim.test import basetmtests
from gensim.test.utils import datapath, common_texts
import numpy as np
dictionary = Dictionary(common_texts)
corpus = [dictionary.doc2bow(text) for text in common_texts]
class TestHdpModel(unittest.TestCase, basetmtests.TestBaseTopicModel):
def setUp(self):
self.corpus = mmcorpus.MmCorpus(datapath('testcorpus.mm'))
self.class_ = hdpmodel.HdpModel
self.model = self.class_(corpus, id2word=dictionary, random_state=np.random.seed(0))
def testTopicValues(self):
"""
Check show topics method
"""
results = self.model.show_topics()[0]
expected_prob, expected_word = '0.264', 'trees '
prob, word = results[1].split('+')[0].split('*')
self.assertEqual(results[0], 0)
self.assertEqual(prob, expected_prob)
self.assertEqual(word, expected_word)
return
def testLDAmodel(self):
"""
Create ldamodel object, and check if the corresponding alphas are equal.
"""
ldam = self.model.suggested_lda_model()
self.assertEqual(ldam.alpha[0], self.model.lda_alpha[0])
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.DEBUG)
unittest.main()
|
from __future__ import print_function
import json
import re
import sys
def main():
if len(sys.argv) != 4:
print('usage: %s samples_file.json metric_name data_label' % sys.argv[0])
sys.exit(1)
with open(sys.argv[1]) as samples_file:
for line in samples_file:
sample = json.loads(line)
if sample['metric'] == sys.argv[2]:
regex = r'\|%s:(.*?)\|' % sys.argv[3]
data_label = re.search(regex, sample['labels']).group(1)
print(','.join((data_label, str(sample['value']))))
if __name__ == '__main__':
main()
|
revision = "5e680529b666"
down_revision = "131ec6accff5"
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column("endpoints", sa.Column("sensitive", sa.Boolean(), nullable=True))
op.add_column("endpoints", sa.Column("source_id", sa.Integer(), nullable=True))
op.create_foreign_key(None, "endpoints", "sources", ["source_id"], ["id"])
def downgrade():
op.drop_constraint(None, "endpoints", type_="foreignkey")
op.drop_column("endpoints", "source_id")
op.drop_column("endpoints", "sensitive")
|
import unittest
import subprocess
import time
from gensim.models import LdaModel
from gensim.test.utils import datapath, common_dictionary
from gensim.corpora import MmCorpus
from gensim.models.callbacks import CoherenceMetric
try:
from visdom import Visdom
VISDOM_INSTALLED = True
except ImportError:
VISDOM_INSTALLED = False
@unittest.skipIf(VISDOM_INSTALLED is False, "Visdom not installed")
class TestLdaCallback(unittest.TestCase):
def setUp(self):
self.corpus = MmCorpus(datapath('testcorpus.mm'))
self.ch_umass = CoherenceMetric(corpus=self.corpus, coherence="u_mass", logger="visdom", title="Coherence")
self.callback = [self.ch_umass]
self.model = LdaModel(id2word=common_dictionary, num_topics=2, passes=10, callbacks=self.callback)
self.host = "http://localhost"
self.port = 8097
def testCallbackUpdateGraph(self):
with subprocess.Popen(['python', '-m', 'visdom.server', '-port', str(self.port)]) as proc:
# wait for visdom server startup (any better way?)
viz = Visdom(server=self.host, port=self.port)
for attempt in range(5):
time.sleep(1.0) # seconds
if viz.check_connection():
break
assert viz.check_connection()
viz.close()
self.model.update(self.corpus)
proc.kill()
if __name__ == '__main__':
unittest.main()
|
import argparse
import logging
import os
import sys
import manhole
import requests_cache
import service_configuration_lib
import yaml
from pyramid.config import Configurator
from wsgicors import CORS
import paasta_tools.api
from paasta_tools import kubernetes_tools
from paasta_tools import marathon_tools
from paasta_tools.api import settings
from paasta_tools.api.tweens import request_logger
from paasta_tools.utils import load_system_paasta_config
try:
import clog
except ImportError:
clog = None
log = logging.getLogger(__name__)
def parse_paasta_api_args():
parser = argparse.ArgumentParser(description="Runs a PaaSTA API server")
parser.add_argument(
"-D",
"--debug",
dest="debug",
action="store_true",
default=False,
help="output the debug logs",
)
parser.add_argument("port", type=int, help="port number for the api server")
parser.add_argument(
"-d",
"--soa-dir",
dest="soa_dir",
help="define a different soa config directory",
)
parser.add_argument(
"-c",
"--cluster",
dest="cluster",
help="specify a cluster. If no empty, the cluster from /etc/paasta is used",
)
parser.add_argument(
"--max-request-seconds",
default=120,
dest="max_request_seconds",
help="Maximum seconds allowed for a worker to process a request",
)
args = parser.parse_args()
return args
def make_app(global_config=None):
paasta_api_path = os.path.dirname(paasta_tools.api.__file__)
setup_paasta_api()
setup_clog()
config = Configurator(
settings={
"service_name": "paasta-api",
"pyramid_swagger.schema_directory": os.path.join(
paasta_api_path, "api_docs"
),
"pyramid_swagger.skip_validation": [
"/(static)\\b",
"/(status)\\b",
"/(swagger.json)\\b",
],
"pyramid_swagger.swagger_versions": ["2.0"],
}
)
config.include("pyramid_swagger")
config.include(request_logger)
config.add_route("resources.utilization", "/v1/resources/utilization")
config.add_route(
"service.instance.status", "/v1/services/{service}/{instance}/status"
)
config.add_route(
"service.instance.set_state",
"/v1/services/{service}/{instance}/state/{desired_state}",
)
config.add_route(
"service.instance.delay", "/v1/services/{service}/{instance}/delay"
)
config.add_route(
"service.instance.tasks", "/v1/services/{service}/{instance}/tasks"
)
config.add_route(
"service.instance.tasks.task",
"/v1/services/{service}/{instance}/tasks/{task_id}",
)
config.add_route("service.list", "/v1/services/{service}")
config.add_route("services", "/v1/services")
config.add_route(
"service.autoscaler.get",
"/v1/services/{service}/{instance}/autoscaler",
request_method="GET",
)
config.add_route(
"service.autoscaler.post",
"/v1/services/{service}/{instance}/autoscaler",
request_method="POST",
)
config.add_route(
"service_autoscaler.pause.post",
"/v1/service_autoscaler/pause",
request_method="POST",
)
config.add_route(
"service_autoscaler.pause.delete",
"/v1/service_autoscaler/pause",
request_method="DELETE",
)
config.add_route(
"service_autoscaler.pause.get",
"/v1/service_autoscaler/pause",
request_method="GET",
)
config.add_route("version", "/v1/version")
config.add_route(
"marathon_dashboard", "/v1/marathon_dashboard", request_method="GET"
)
config.add_route("metastatus", "/v1/metastatus")
config.add_route("deploy_queue.list", "/v1/deploy_queue")
config.scan()
return CORS(
config.make_wsgi_app(), headers="*", methods="*", maxage="180", origin="*"
)
_app = None
def application(env, start_response):
"""For uwsgi or gunicorn."""
global _app
if not _app:
_app = make_app()
manhole_path = os.environ.get("PAASTA_MANHOLE_PATH")
if manhole_path:
manhole.install(
socket_path=f"{manhole_path}-{os.getpid()}", locals={"_app": _app}
)
return _app(env, start_response)
def setup_paasta_api():
if os.environ.get("PAASTA_API_DEBUG"):
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.WARNING)
# pyinotify is a better solution than turning off file caching completely
service_configuration_lib.disable_yaml_cache()
settings.system_paasta_config = load_system_paasta_config()
if os.environ.get("PAASTA_API_CLUSTER"):
settings.cluster = os.environ.get("PAASTA_API_CLUSTER")
else:
settings.cluster = settings.system_paasta_config.get_cluster()
settings.marathon_clients = marathon_tools.get_marathon_clients(
marathon_tools.get_marathon_servers(settings.system_paasta_config)
)
settings.marathon_servers = marathon_tools.get_marathon_servers(
system_paasta_config=settings.system_paasta_config
)
settings.marathon_clients = marathon_tools.get_marathon_clients(
marathon_servers=settings.marathon_servers, cached=False
)
try:
settings.kubernetes_client = kubernetes_tools.KubeClient()
except FileNotFoundError:
log.info("Kubernetes not found")
settings.kubernetes_client = None
except Exception:
log.exception("Error while initializing KubeClient")
settings.kubernetes_client = None
# Set up transparent cache for http API calls. With expire_after, responses
# are removed only when the same request is made. Expired storage is not a
# concern here. Thus remove_expired_responses is not needed.
requests_cache.install_cache("paasta-api", backend="memory", expire_after=5)
def setup_clog(config_file="/nail/srv/configs/clog.yaml"):
if clog:
if os.path.exists(config_file):
with open(config_file) as fp:
clog_config = yaml.safe_load(fp)
else:
# these are barebones basic configs from /nail/srv/configs/clog.yaml
clog_config = {
"scribe_host": "169.254.255.254",
"scribe_port": 1463,
"monk_disable": False,
"scribe_disable": False,
}
clog.config.configure_from_dict(clog_config)
def main(argv=None):
args = parse_paasta_api_args()
if args.debug:
os.environ["PAASTA_API_DEBUG"] = "1"
if args.soa_dir:
os.environ["PAASTA_API_SOA_DIR"] = args.soa_dir
if args.cluster:
os.environ["PAASTA_API_CLUSTER"] = args.cluster
os.execlp(
os.path.join(sys.exec_prefix, "bin", "gunicorn"),
"gunicorn",
"-w",
"4",
"--bind",
f":{args.port}",
"--timeout",
str(args.max_request_seconds),
"--graceful-timeout",
str(args.max_request_seconds),
"paasta_tools.api.api:application",
)
if __name__ == "__main__":
main()
|
import random
from flexx import flx
COLORS = ('#eee', '#999', '#555', '#111',
'#f00', '#0f0', '#00f', '#ff0', '#f0f', '#0ff',
'#a44', '#4a4', '#44a', '#aa4', '#afa', '#4aa',
)
class Relay(flx.Component):
""" Global object to relay paint events to all participants.
"""
@flx.emitter
def add_paint_for_all(self, pos, color):
return dict(pos=pos, color=color)
# Create global relay object, shared by all connections
relay = Relay()
class ColabPainting(flx.PyComponent):
""" The Python side of the app. There is one instance per connection.
"""
color = flx.ColorProp(settable=True, doc="Paint color")
status = flx.StringProp('', settable=True, doc="Status text")
def init(self):
self.set_color(random.choice(COLORS))
self.widget = ColabPaintingView(self)
self._update_participants()
@flx.action
def add_paint(self, pos):
""" Add paint at the specified position.
"""
relay.add_paint_for_all(pos, self.color.hex)
@relay.reaction('add_paint_for_all') # note that we connect to relay here
def _any_user_adds_paint(self, *events):
""" Receive global paint event from the relay, invoke action on view.
"""
for ev in events:
self.widget.add_paint_to_canvas(ev.pos, ev.color)
@flx.manager.reaction('connections_changed')
def _update_participants(self, *events):
if self.session.status:
sessions = flx.manager.get_connections(self.session.app_name)
n = len(sessions)
del sessions
self.set_status('%i persons are painting' % n)
class ColabPaintingView(flx.Widget):
""" The part of the app that runs in the browser.
"""
CSS = """
.flx-ColabPaintingView { background: #ddd; }
.flx-ColabPaintingView .flx-CanvasWidget {
background: #fff;
border: 10px solid #000;
}
"""
def init(self, model):
super().init()
self.model = model
# App layout
with flx.VBox():
flx.Label(flex=0, text=lambda: model.status)
flx.Widget(flex=1)
with flx.HBox(flex=2):
flx.Widget(flex=1)
self.canvas = flx.CanvasWidget(flex=0, minsize=400, maxsize=400)
flx.Widget(flex=1)
flx.Widget(flex=1)
# Init context to draw to
self._ctx = self.canvas.node.getContext('2d')
@flx.reaction
def __update_color(self):
self.canvas.apply_style('border: 10px solid ' + self.model.color.hex)
@flx.reaction('canvas.pointer_down')
def __on_click(self, *events):
for ev in events:
self.model.add_paint(ev.pos)
@flx.action
def add_paint_to_canvas(self, pos, color):
""" Actually draw a dot on the canvas.
"""
self._ctx.globalAlpha = 0.8
self._ctx.beginPath()
self._ctx.fillStyle = color
self._ctx.arc(pos[0], pos[1], 5, 0, 6.2831)
self._ctx.fill()
if __name__ == '__main__':
a = flx.App(ColabPainting)
a.serve()
# m = a.launch('browser') # for use during development
flx.start()
|
from django.contrib.auth.decorators import login_required
from django.core.exceptions import PermissionDenied
from django.http import Http404, HttpResponse, HttpResponseBadRequest, JsonResponse
from django.shortcuts import get_object_or_404, render
from django.utils.translation import gettext as _
from django.views.decorators.cache import cache_control
from django.views.decorators.http import require_POST
from weblate.checks.flags import Flags
from weblate.checks.models import Check
from weblate.machinery import MACHINE_TRANSLATION_SERVICES
from weblate.machinery.base import MachineTranslationError
from weblate.trans.models import Change, Unit
from weblate.trans.util import sort_unicode
from weblate.utils.errors import report_error
from weblate.utils.views import get_component, get_project, get_translation
def handle_machinery(request, service, unit, search=None):
if not request.user.has_perm("machinery.view", unit.translation):
raise PermissionDenied()
# Error response
response = {
"responseStatus": 500,
"service": service,
"responseDetails": "",
"translations": [],
"lang": unit.translation.language.code,
"dir": unit.translation.language.direction,
}
try:
translation_service = MACHINE_TRANSLATION_SERVICES[service]
response["service"] = translation_service.name
except KeyError:
response["responseDetails"] = _("Service is currently not available.")
else:
try:
response["translations"] = translation_service.translate(
unit, request.user, search=search
)
response["responseStatus"] = 200
except MachineTranslationError as exc:
response["responseDetails"] = str(exc)
except Exception as error:
report_error()
response["responseDetails"] = f"{error.__class__.__name__}: {error}"
return JsonResponse(data=response)
@require_POST
def translate(request, unit_id, service):
"""AJAX handler for translating."""
if service not in MACHINE_TRANSLATION_SERVICES:
raise Http404("Invalid service specified")
unit = get_object_or_404(Unit, pk=int(unit_id))
return handle_machinery(request, service, unit)
@require_POST
def memory(request, unit_id):
"""AJAX handler for translation memory."""
unit = get_object_or_404(Unit, pk=int(unit_id))
query = request.POST.get("q")
if not query:
return HttpResponseBadRequest("Missing search string")
return handle_machinery(request, "weblate-translation-memory", unit, search=query)
def get_unit_translations(request, unit_id):
"""Return unit's other translations."""
unit = get_object_or_404(Unit, pk=int(unit_id))
user = request.user
user.check_access_component(unit.translation.component)
return render(
request,
"js/translations.html",
{
"units": sort_unicode(
unit.source_unit.unit_set.exclude(pk=unit.pk)
.prefetch()
.prefetch_full(),
lambda unit: "{}-{}".format(
user.profile.get_language_order(unit.translation.language),
unit.translation.language,
),
)
},
)
@require_POST
@login_required
def ignore_check(request, check_id):
obj = get_object_or_404(Check, pk=int(check_id))
if not request.user.has_perm("unit.check", obj):
raise PermissionDenied()
# Mark check for ignoring
obj.set_dismiss("revert" not in request.GET)
# response for AJAX
return HttpResponse("ok")
@require_POST
@login_required
def ignore_check_source(request, check_id):
obj = get_object_or_404(Check, pk=int(check_id))
unit = obj.unit.source_unit
if not request.user.has_perm("unit.check", obj) or not request.user.has_perm(
"source.edit", unit.translation.component
):
raise PermissionDenied()
# Mark check for ignoring
ignore = obj.check_obj.ignore_string
flags = Flags(unit.extra_flags)
if ignore not in flags:
flags.merge(ignore)
unit.extra_flags = flags.format()
unit.save(same_content=True)
# response for AJAX
return HttpResponse("ok")
def git_status_shared(request, obj, changes, repositories):
if not request.user.has_perm("meta:vcs.status", obj):
raise PermissionDenied()
return render(
request,
"js/git-status.html",
{
"object": obj,
"changes": changes.prefetch(),
"repositories": repositories,
"pending_units": obj.count_pending_units,
"outgoing_commits": sum(repo.count_repo_outgoing for repo in repositories),
"missing_commits": sum(repo.count_repo_missing for repo in repositories),
},
)
@login_required
def git_status_project(request, project):
obj = get_project(request, project)
return git_status_shared(
request,
obj,
Change.objects.filter(
project=obj, action__in=Change.ACTIONS_REPOSITORY
).order()[:10],
obj.all_repo_components,
)
@login_required
def git_status_component(request, project, component):
obj = get_component(request, project, component)
target = obj
if target.is_repo_link:
target = target.linked_component
return git_status_shared(
request,
obj,
Change.objects.filter(
action__in=Change.ACTIONS_REPOSITORY, component=target
).order()[:10],
[obj],
)
@login_required
def git_status_translation(request, project, component, lang):
obj = get_translation(request, project, component, lang)
target = obj.component
if target.is_repo_link:
target = target.linked_component
return git_status_shared(
request,
obj,
Change.objects.filter(
action__in=Change.ACTIONS_REPOSITORY, component=target
).order()[:10],
[obj.component],
)
@cache_control(max_age=3600)
def matomo(request):
return render(
request, "js/matomo.js", content_type='text/javascript; charset="utf-8"'
)
|
import importlib.util
import os
import shutil
import sys
import textwrap
import traceback
import doit.cmd_base
from collections import defaultdict
from blinker import signal
from doit.cmd_auto import Auto as DoitAuto
from doit.cmd_base import TaskLoader, _wrap
from doit.cmd_clean import Clean as DoitClean
from doit.cmd_completion import TabCompletion
from doit.cmd_help import Help as DoitHelp
from doit.cmd_run import Run as DoitRun
from doit.doit_cmd import DoitMain
from doit.loader import generate_tasks
from doit.reporter import ExecutedOnlyReporter
from . import __version__
from .nikola import Nikola
from .plugin_categories import Command
from .log import configure_logging, LOGGER, ColorfulFormatter, LoggingMode
from .utils import get_root_dir, req_missing, sys_decode
try:
import readline # NOQA
except ImportError:
pass # This is only so raw_input/input does nicer things if it's available
config = {}
# DO NOT USE unless you know what you are doing!
_RETURN_DOITNIKOLA = False
def main(args=None):
"""Run Nikola."""
colorful = False
if sys.stderr.isatty() and os.name != 'nt' and os.getenv('NIKOLA_MONO') is None and os.getenv('TERM') != 'dumb':
colorful = True
ColorfulFormatter._colorful = colorful
if args is None:
args = sys.argv[1:]
oargs = args
args = [sys_decode(arg) for arg in args]
conf_filename = 'conf.py'
conf_filename_changed = False
for index, arg in enumerate(args):
if arg[:7] == '--conf=':
del args[index]
del oargs[index]
conf_filename = arg[7:]
conf_filename_changed = True
break
quiet = False
if len(args) > 0 and args[0] == 'build' and '--strict' in args:
LOGGER.info('Running in strict mode')
configure_logging(LoggingMode.STRICT)
elif len(args) > 0 and args[0] == 'build' and '-q' in args or '--quiet' in args:
configure_logging(LoggingMode.QUIET)
quiet = True
else:
configure_logging()
global config
original_cwd = os.getcwd()
# Those commands do not require a `conf.py`. (Issue #1132)
# Moreover, actually having one somewhere in the tree can be bad, putting
# the output of that command (the new site) in an unknown directory that is
# not the current working directory. (does not apply to `version`)
argname = args[0] if len(args) > 0 else None
if argname and argname not in ['init', 'version'] and not argname.startswith('import_'):
root = get_root_dir()
if root:
os.chdir(root)
# Help and imports don't require config, but can use one if it exists
needs_config_file = (argname != 'help') and not argname.startswith('import_')
LOGGER.debug("Website root: %r", root)
else:
needs_config_file = False
sys.path.insert(0, os.path.dirname(conf_filename))
try:
spec = importlib.util.spec_from_file_location("conf", conf_filename)
conf = importlib.util.module_from_spec(spec)
# Preserve caching behavior of `import conf` if the filename matches
if os.path.splitext(os.path.basename(conf_filename))[0] == "conf":
sys.modules["conf"] = conf
spec.loader.exec_module(conf)
config = conf.__dict__
except Exception:
if os.path.exists(conf_filename):
msg = traceback.format_exc()
LOGGER.error('"{0}" cannot be parsed.\n{1}'.format(conf_filename, msg))
return 1
elif needs_config_file and conf_filename_changed:
LOGGER.error('Cannot find configuration file "{0}".'.format(conf_filename))
return 1
config = {}
if conf_filename_changed:
LOGGER.info("Using config file '{0}'".format(conf_filename))
invariant = False
if len(args) > 0 and args[0] == 'build' and '--invariant' in args:
try:
import freezegun
freeze = freezegun.freeze_time("2038-01-01")
freeze.start()
invariant = True
except ImportError:
req_missing(['freezegun'], 'perform invariant builds')
if config:
if os.path.isdir('plugins') and not os.path.exists('plugins/__init__.py'):
with open('plugins/__init__.py', 'w') as fh:
fh.write('# Plugin modules go here.')
config['__colorful__'] = colorful
config['__invariant__'] = invariant
config['__quiet__'] = quiet
config['__configuration_filename__'] = conf_filename
config['__cwd__'] = original_cwd
site = Nikola(**config)
DN = DoitNikola(site, quiet)
if _RETURN_DOITNIKOLA:
return DN
_ = DN.run(oargs)
if site.invariant:
freeze.stop()
return _
class Help(DoitHelp):
"""Show Nikola usage."""
@staticmethod
def print_usage(cmds):
"""Print nikola "usage" (basic help) instructions."""
# Remove 'run'. Nikola uses 'build', though we support 'run' for
# people used to it (eg. doit users).
# WARNING: 'run' is the vanilla doit command, without support for
# --strict, --invariant and --quiet.
del cmds['run']
print("Nikola is a tool to create static websites and blogs. For full documentation and more information, please visit https://getnikola.com/\n\n")
print("Available commands:")
for cmd_name in sorted(cmds.keys()):
cmd = cmds[cmd_name]
print(" nikola {:20s} {}".format(cmd_name, cmd.doc_purpose))
print("")
print(" nikola help show help / reference")
print(" nikola help <command> show command usage")
print(" nikola help <task-name> show task usage")
class Build(DoitRun):
"""Expose "run" command as "build" for backwards compatibility."""
def __init__(self, *args, **kw):
"""Initialize Build."""
opts = list(self.cmd_options)
opts.append(
{
'name': 'strict',
'long': 'strict',
'default': False,
'type': bool,
'help': "Fail on things that would normally be warnings.",
}
)
opts.append(
{
'name': 'invariant',
'long': 'invariant',
'default': False,
'type': bool,
'help': "Generate invariant output (for testing only!).",
}
)
opts.append(
{
'name': 'quiet',
'long': 'quiet',
'short': 'q',
'default': False,
'type': bool,
'help': "Run quietly.",
}
)
self.cmd_options = tuple(opts)
super().__init__(*args, **kw)
class Clean(DoitClean):
"""Clean site, including the cache directory."""
# The unseemly *a is because this API changed between doit 0.30.1 and 0.31
def clean_tasks(self, tasks, dryrun, *a):
"""Clean tasks."""
if not dryrun and config:
cache_folder = config.get('CACHE_FOLDER', 'cache')
if os.path.exists(cache_folder):
shutil.rmtree(cache_folder)
return super(Clean, self).clean_tasks(tasks, dryrun, *a)
# Nikola has its own "auto" commands that uses livereload.
# Expose original doit "auto" command as "doit_auto".
DoitAuto.name = 'doit_auto'
class NikolaTaskLoader(TaskLoader):
"""Nikola-specific task loader."""
def __init__(self, nikola, quiet=False):
"""Initialize the loader."""
self.nikola = nikola
self.quiet = quiet
def load_tasks(self, cmd, opt_values, pos_args):
"""Load Nikola tasks."""
if self.quiet:
DOIT_CONFIG = {
'verbosity': 0,
'reporter': 'zero',
}
else:
DOIT_CONFIG = {
'reporter': ExecutedOnlyReporter,
'outfile': sys.stderr,
}
DOIT_CONFIG['default_tasks'] = ['render_site', 'post_render']
DOIT_CONFIG.update(self.nikola._doit_config)
try:
tasks = generate_tasks(
'render_site',
self.nikola.gen_tasks('render_site', "Task", 'Group of tasks to render the site.'))
latetasks = generate_tasks(
'post_render',
self.nikola.gen_tasks('post_render', "LateTask", 'Group of tasks to be executed after site is rendered.'))
signal('initialized').send(self.nikola)
except Exception:
LOGGER.error('Error loading tasks. An unhandled exception occurred.')
if self.nikola.debug or self.nikola.show_tracebacks:
raise
_print_exception()
sys.exit(3)
return tasks + latetasks, DOIT_CONFIG
class DoitNikola(DoitMain):
"""Nikola-specific implementation of DoitMain."""
# overwite help command
DOIT_CMDS = list(DoitMain.DOIT_CMDS) + [Help, Build, Clean, DoitAuto]
TASK_LOADER = NikolaTaskLoader
def __init__(self, nikola, quiet=False):
"""Initialzie DoitNikola."""
super().__init__()
self.nikola = nikola
nikola.doit = self
self.task_loader = self.TASK_LOADER(nikola, quiet)
def get_cmds(self):
"""Get commands."""
# core doit commands
cmds = DoitMain.get_cmds(self)
# load nikola commands
for name, cmd in self.nikola._commands.items():
cmds[name] = cmd
return cmds
def run(self, cmd_args):
"""Run Nikola."""
args = self.process_args(cmd_args)
args = [sys_decode(arg) for arg in args]
if len(args) == 0:
cmd_args = ['help']
args = ['help']
if '--help' in args or '-h' in args:
new_cmd_args = ['help'] + cmd_args
new_args = ['help'] + args
cmd_args = []
args = []
for arg in new_cmd_args:
if arg not in ('--help', '-h'):
cmd_args.append(arg)
for arg in new_args:
if arg not in ('--help', '-h'):
args.append(arg)
if args[0] == 'help':
self.nikola.init_plugins(commands_only=True)
elif args[0] == 'plugin':
self.nikola.init_plugins(load_all=True)
else:
self.nikola.init_plugins()
sub_cmds = self.get_cmds()
if any(arg in ("--version", '-V') for arg in args):
cmd_args = ['version']
args = ['version']
if args[0] not in sub_cmds.keys():
LOGGER.error("Unknown command {0}".format(args[0]))
sugg = defaultdict(list)
sub_filtered = (i for i in sub_cmds.keys() if i != 'run')
for c in sub_filtered:
d = levenshtein(c, args[0])
sugg[d].append(c)
if sugg.keys():
best_sugg = sugg[min(sugg.keys())]
if len(best_sugg) == 1:
LOGGER.info('Did you mean "{}"?'.format(best_sugg[0]))
else:
LOGGER.info('Did you mean "{}" or "{}"?'.format('", "'.join(best_sugg[:-1]), best_sugg[-1]))
return 3
if not sub_cmds[args[0]] in (Help, TabCompletion) and not isinstance(sub_cmds[args[0]], Command):
if not self.nikola.configured:
LOGGER.error("This command needs to run inside an "
"existing Nikola site.")
return 3
try:
return super().run(cmd_args)
except Exception:
LOGGER.error('An unhandled exception occurred.')
if self.nikola.debug or self.nikola.show_tracebacks:
raise
_print_exception()
return 1
@staticmethod
def print_version():
"""Print Nikola version."""
print("Nikola v" + __version__)
# Override Command.help() to make it more readable and to remove
# some doit-specific stuff. Based on doit's implementation.
# (see Issue #3342)
def _command_help(self: Command):
"""Return help text for a command."""
text = []
usage = "{} {} {}".format(self.bin_name, self.name, self.doc_usage)
text.extend(textwrap.wrap(usage, subsequent_indent=' '))
text.extend(_wrap(self.doc_purpose, 4))
text.append("\nOptions:")
options = defaultdict(list)
for opt in self.cmdparser.options:
options[opt.section].append(opt)
for section, opts in sorted(options.items()):
if section:
section_name = '\n{}'.format(section)
text.extend(_wrap(section_name, 2))
for opt in opts:
# ignore option that cant be modified on cmd line
if not (opt.short or opt.long):
continue
text.extend(_wrap(opt.help_param(), 4))
opt_help = opt.help
if '%(default)s' in opt_help:
opt_help = opt.help % {'default': opt.default}
elif opt.default != '' and opt.default is not False and opt.default is not None:
opt_help += ' [default: {}]'.format(opt.default)
opt_choices = opt.help_choices()
desc = '{} {}'.format(opt_help, opt_choices)
text.extend(_wrap(desc, 8))
# print bool inverse option
if opt.inverse:
text.extend(_wrap('--{}'.format(opt.inverse), 4))
text.extend(_wrap('opposite of --{}'.format(opt.long), 8))
if self.doc_description is not None:
text.append("\n\nDescription:")
text.extend(_wrap(self.doc_description, 4))
return "\n".join(text)
doit.cmd_base.Command.help = _command_help
def levenshtein(s1, s2):
u"""Calculate the Levenshtein distance of two strings.
Implementation from Wikibooks:
https://en.wikibooks.org/w/index.php?title=Algorithm_Implementation/Strings/Levenshtein_distance&oldid=2974448#Python
Copyright © The Wikibooks contributors (CC BY-SA/fair use citation); edited to match coding style and add an exception.
"""
if len(s1) < len(s2):
return levenshtein(s2, s1)
# len(s1) >= len(s2)
if len(s2) == 0:
return len(s1)
previous_row = range(len(s2) + 1)
for i, c1 in enumerate(s1):
current_row = [i + 1]
for j, c2 in enumerate(s2):
# j+1 instead of j since previous_row and current_row are one character longer than s2
insertions = previous_row[j + 1] + 1
deletions = current_row[j] + 1
substitutions = previous_row[j] + (c1 != c2)
current_row.append(min(insertions, deletions, substitutions))
previous_row = current_row
return previous_row[-1]
def _print_exception():
"""Print an exception in a friendlier, shorter style."""
etype, evalue, _ = sys.exc_info()
LOGGER.error(''.join(traceback.format_exception(etype, evalue, None, limit=0, chain=False)).strip())
LOGGER.warning("To see more details, run Nikola in debug mode (set environment variable NIKOLA_DEBUG=1) or use NIKOLA_SHOW_TRACEBACKS=1")
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
|
import hangups
def get_conv_name(conv, truncate=False, show_unread=False):
"""Return a readable name for a conversation.
If the conversation has a custom name, use the custom name. Otherwise, for
one-to-one conversations, the name is the full name of the other user. For
group conversations, the name is a comma-separated list of first names. If
the group conversation is empty, the name is "Empty Conversation".
If truncate is true, only show up to two names in a group conversation.
If show_unread is True, if there are unread chat messages, show the number
of unread chat messages in parentheses after the conversation name.
"""
num_unread = len([conv_event for conv_event in conv.unread_events if
isinstance(conv_event, hangups.ChatMessageEvent) and
not conv.get_user(conv_event.user_id).is_self])
if show_unread and num_unread > 0:
postfix = ' ({})'.format(num_unread)
else:
postfix = ''
if conv.name is not None:
return conv.name + postfix
else:
participants = sorted(
(user for user in conv.users if not user.is_self),
key=lambda user: user.id_
)
names = [user.first_name for user in participants]
if not participants:
return "Empty Conversation" + postfix
if len(participants) == 1:
return participants[0].full_name + postfix
elif truncate and len(participants) > 2:
return (', '.join(names[:2] + ['+{}'.format(len(names) - 2)]) +
postfix)
else:
return ', '.join(names) + postfix
def add_color_to_scheme(scheme, name, foreground, background, palette_colors):
"""Add foreground and background colours to a color scheme"""
if foreground is None and background is None:
return scheme
new_scheme = []
for item in scheme:
if item[0] == name:
if foreground is None:
foreground = item[1]
if background is None:
background = item[2]
if palette_colors > 16:
new_scheme.append((name, '', '', '', foreground, background))
else:
new_scheme.append((name, foreground, background))
else:
new_scheme.append(item)
return new_scheme
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
__author__ = '[email protected]'
import json
import logging
import time
from absl import app
from absl import flags
import script_driver
import tpc_profile_details
flags.DEFINE_string('profile', None, 'Profile to identify the sequence of '
'scripts.')
FLAGS = flags.FLAGS
def execute_profile(profile):
"""Method to execute a profile (list of sql scripts) on an edw cluster.
Execute a profile (list of sql scripts, identified by names) on a cluster and
report a dictionary with the execution time.
Arguments:
profile: Profile to identify the sequence of scripts.
Returns:
A dictionary containing
1. Individual script metrics: script name and its execution time (-1 if
the script fails).
2. Aggregated execution time: profile name and cumulative execution time.
"""
execution_times = {}
start_time = time.time()
for script_index in tpc_profile_details.profile_dictionary[profile]:
logfile_suffix = '{}_{}'.format(profile, str(script_index))
script = '{}.sql'.format(str(script_index))
script_performance = script_driver.execute_script(script, logfile_suffix)
execution_times.update(json.loads(script_performance))
profile_execution_wall_time = round((time.time() - start_time), 2)
execution_times['wall_time'] = {'execution_time': profile_execution_wall_time,
'job_id': 'undefined_job'}
return json.dumps(execution_times)
def main(argv):
del argv
print(execute_profile(FLAGS.profile))
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
app.run(main)
|
import asyncio
from collections import OrderedDict
import logging
import async_timeout
from songpal import (
ConnectChange,
ContentChange,
Device,
PowerChange,
SongpalException,
VolumeChange,
)
import voluptuous as vol
from homeassistant.components.media_player import MediaPlayerEntity
from homeassistant.components.media_player.const import (
SUPPORT_SELECT_SOURCE,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
SUPPORT_VOLUME_STEP,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_NAME, EVENT_HOMEASSISTANT_STOP, STATE_OFF, STATE_ON
from homeassistant.exceptions import PlatformNotReady
from homeassistant.helpers import (
config_validation as cv,
device_registry as dr,
entity_platform,
)
from homeassistant.helpers.typing import HomeAssistantType
from .const import CONF_ENDPOINT, DOMAIN, SET_SOUND_SETTING
_LOGGER = logging.getLogger(__name__)
PARAM_NAME = "name"
PARAM_VALUE = "value"
SUPPORT_SONGPAL = (
SUPPORT_VOLUME_SET
| SUPPORT_VOLUME_STEP
| SUPPORT_VOLUME_MUTE
| SUPPORT_SELECT_SOURCE
| SUPPORT_TURN_ON
| SUPPORT_TURN_OFF
)
INITIAL_RETRY_DELAY = 10
async def async_setup_platform(
hass: HomeAssistantType, config: dict, async_add_entities, discovery_info=None
) -> None:
"""Set up from legacy configuration file. Obsolete."""
_LOGGER.error(
"Configuring Songpal through media_player platform is no longer supported. Convert to songpal platform or UI configuration"
)
async def async_setup_entry(
hass: HomeAssistantType, config_entry: ConfigEntry, async_add_entities
) -> None:
"""Set up songpal media player."""
name = config_entry.data[CONF_NAME]
endpoint = config_entry.data[CONF_ENDPOINT]
device = Device(endpoint)
try:
async with async_timeout.timeout(
10
): # set timeout to avoid blocking the setup process
await device.get_supported_methods()
except (SongpalException, asyncio.TimeoutError) as ex:
_LOGGER.warning("[%s(%s)] Unable to connect", name, endpoint)
_LOGGER.debug("Unable to get methods from songpal: %s", ex)
raise PlatformNotReady from ex
songpal_entity = SongpalEntity(name, device)
async_add_entities([songpal_entity], True)
platform = entity_platform.current_platform.get()
platform.async_register_entity_service(
SET_SOUND_SETTING,
{vol.Required(PARAM_NAME): cv.string, vol.Required(PARAM_VALUE): cv.string},
"async_set_sound_setting",
)
class SongpalEntity(MediaPlayerEntity):
"""Class representing a Songpal device."""
def __init__(self, name, device):
"""Init."""
self._name = name
self._dev = device
self._sysinfo = None
self._model = None
self._state = False
self._available = False
self._initialized = False
self._volume_control = None
self._volume_min = 0
self._volume_max = 1
self._volume = 0
self._is_muted = False
self._active_source = None
self._sources = {}
@property
def should_poll(self):
"""Return True if the device should be polled."""
return False
async def async_added_to_hass(self):
"""Run when entity is added to hass."""
await self.async_activate_websocket()
async def async_will_remove_from_hass(self):
"""Run when entity will be removed from hass."""
await self._dev.stop_listen_notifications()
async def async_activate_websocket(self):
"""Activate websocket for listening if wanted."""
_LOGGER.info("Activating websocket connection")
async def _volume_changed(volume: VolumeChange):
_LOGGER.debug("Volume changed: %s", volume)
self._volume = volume.volume
self._is_muted = volume.mute
self.async_write_ha_state()
async def _source_changed(content: ContentChange):
_LOGGER.debug("Source changed: %s", content)
if content.is_input:
self._active_source = self._sources[content.uri]
_LOGGER.debug("New active source: %s", self._active_source)
self.async_write_ha_state()
else:
_LOGGER.debug("Got non-handled content change: %s", content)
async def _power_changed(power: PowerChange):
_LOGGER.debug("Power changed: %s", power)
self._state = power.status
self.async_write_ha_state()
async def _try_reconnect(connect: ConnectChange):
_LOGGER.warning(
"[%s(%s)] Got disconnected, trying to reconnect",
self.name,
self._dev.endpoint,
)
_LOGGER.debug("Disconnected: %s", connect.exception)
self._available = False
self.async_write_ha_state()
# Try to reconnect forever, a successful reconnect will initialize
# the websocket connection again.
delay = INITIAL_RETRY_DELAY
while not self._available:
_LOGGER.debug("Trying to reconnect in %s seconds", delay)
await asyncio.sleep(delay)
try:
await self._dev.get_supported_methods()
except SongpalException as ex:
_LOGGER.debug("Failed to reconnect: %s", ex)
delay = min(2 * delay, 300)
else:
# We need to inform HA about the state in case we are coming
# back from a disconnected state.
await self.async_update_ha_state(force_refresh=True)
self.hass.loop.create_task(self._dev.listen_notifications())
_LOGGER.warning(
"[%s(%s)] Connection reestablished", self.name, self._dev.endpoint
)
self._dev.on_notification(VolumeChange, _volume_changed)
self._dev.on_notification(ContentChange, _source_changed)
self._dev.on_notification(PowerChange, _power_changed)
self._dev.on_notification(ConnectChange, _try_reconnect)
async def handle_stop(event):
await self._dev.stop_listen_notifications()
self.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, handle_stop)
self.hass.loop.create_task(self._dev.listen_notifications())
@property
def name(self):
"""Return name of the device."""
return self._name
@property
def unique_id(self):
"""Return a unique ID."""
return self._sysinfo.macAddr
@property
def device_info(self):
"""Return the device info."""
return {
"connections": {(dr.CONNECTION_NETWORK_MAC, self._sysinfo.macAddr)},
"identifiers": {(DOMAIN, self.unique_id)},
"manufacturer": "Sony Corporation",
"name": self.name,
"sw_version": self._sysinfo.version,
"model": self._model,
}
@property
def available(self):
"""Return availability of the device."""
return self._available
async def async_set_sound_setting(self, name, value):
"""Change a setting on the device."""
_LOGGER.debug("Calling set_sound_setting with %s: %s", name, value)
await self._dev.set_sound_settings(name, value)
async def async_update(self):
"""Fetch updates from the device."""
try:
if self._sysinfo is None:
self._sysinfo = await self._dev.get_system_info()
if self._model is None:
interface_info = await self._dev.get_interface_information()
self._model = interface_info.modelName
volumes = await self._dev.get_volume_information()
if not volumes:
_LOGGER.error("Got no volume controls, bailing out")
self._available = False
return
if len(volumes) > 1:
_LOGGER.debug("Got %s volume controls, using the first one", volumes)
volume = volumes[0]
_LOGGER.debug("Current volume: %s", volume)
self._volume_max = volume.maxVolume
self._volume_min = volume.minVolume
self._volume = volume.volume
self._volume_control = volume
self._is_muted = self._volume_control.is_muted
status = await self._dev.get_power()
self._state = status.status
_LOGGER.debug("Got state: %s", status)
inputs = await self._dev.get_inputs()
_LOGGER.debug("Got ins: %s", inputs)
self._sources = OrderedDict()
for input_ in inputs:
self._sources[input_.uri] = input_
if input_.active:
self._active_source = input_
_LOGGER.debug("Active source: %s", self._active_source)
self._available = True
except SongpalException as ex:
_LOGGER.error("Unable to update: %s", ex)
self._available = False
async def async_select_source(self, source):
"""Select source."""
for out in self._sources.values():
if out.title == source:
await out.activate()
return
_LOGGER.error("Unable to find output: %s", source)
@property
def source_list(self):
"""Return list of available sources."""
return [src.title for src in self._sources.values()]
@property
def state(self):
"""Return current state."""
if self._state:
return STATE_ON
return STATE_OFF
@property
def source(self):
"""Return currently active source."""
# Avoid a KeyError when _active_source is not (yet) populated
return getattr(self._active_source, "title", None)
@property
def volume_level(self):
"""Return volume level."""
volume = self._volume / self._volume_max
return volume
async def async_set_volume_level(self, volume):
"""Set volume level."""
volume = int(volume * self._volume_max)
_LOGGER.debug("Setting volume to %s", volume)
return await self._volume_control.set_volume(volume)
async def async_volume_up(self):
"""Set volume up."""
return await self._volume_control.set_volume(self._volume + 1)
async def async_volume_down(self):
"""Set volume down."""
return await self._volume_control.set_volume(self._volume - 1)
async def async_turn_on(self):
"""Turn the device on."""
return await self._dev.set_power(True)
async def async_turn_off(self):
"""Turn the device off."""
return await self._dev.set_power(False)
async def async_mute_volume(self, mute):
"""Mute or unmute the device."""
_LOGGER.debug("Set mute: %s", mute)
return await self._volume_control.set_mute(mute)
@property
def is_volume_muted(self):
"""Return whether the device is muted."""
return self._is_muted
@property
def supported_features(self):
"""Return supported features."""
return SUPPORT_SONGPAL
|
from flexx import event
class Person(event.Component):
first_name = event.StringProp('Jane', settable=True)
last_name = event.StringProp('Doe', settable=True)
class Greeter(event.Component):
message = event.StringProp('', settable=True)
@event.reaction
def show_message(self):
print('Message:', self.message)
p = Person()
# This is the line that this is about
g = Greeter(message=lambda: p.first_name + ' ' + p.last_name)
p.set_first_name('Alice')
event.loop.iter()
|
from unittest import TestCase
import numpy as np
import pandas as pd
from scattertext import LogOddsRatioInformativeDirichletPrior
from scattertext.PriorFactory import PriorFactory
from scattertext.test.test_semioticSquare import get_test_corpus
class TestPriorFactory(TestCase):
def test_all_categories(self):
corpus = get_test_corpus()
priors, my_corpus = (PriorFactory(corpus, starting_count=0, category='hamlet')
.use_all_categories()
.build())
tdf = corpus.get_term_freq_df()
self.assertEqual(len(priors), len(tdf))
np.testing.assert_equal(priors.values,
corpus.get_term_freq_df().sum(axis=1).values)
def test_neutral_categories(self):
corpus = get_test_corpus()
priors= (PriorFactory(corpus, 'hamlet', starting_count=0.001,
not_categories=['swift'])
.use_neutral_categories()
.get_priors())
self.assertEqual(priors.min(), 0.001)
self.assertEqual(priors.shape[0], corpus._X.shape[1])
corpus = get_test_corpus()
priors = (PriorFactory(corpus, 'hamlet', starting_count=0.001,
not_categories=['swift'])
.use_neutral_categories()
.drop_zero_priors()
.get_priors())
jzcnts = corpus.get_term_freq_df()['jay-z/r. kelly freq'].where(lambda x: x > 0).dropna()
np.testing.assert_equal(priors.values,
jzcnts.values + 0.001)
def test_get_general_term_frequencies(self):
corpus = get_test_corpus()
fact = (PriorFactory(corpus,
category='hamlet',
not_categories=['swift'],
starting_count=0)
.use_general_term_frequencies()
.use_all_categories()
)
priors, clean_corpus = fact.build()
expected_prior = pd.merge(corpus.get_term_doc_count_df(),
corpus.get_term_and_background_counts()[['background']],
left_index=True,
right_index=True,
how='left').fillna(0.).sum(axis=1)
np.testing.assert_allclose(priors.values, expected_prior.values)
def test_align_to_target(self):
full_corpus = get_test_corpus()
corpus = full_corpus.remove_categories(['swift'])
priors = PriorFactory(full_corpus).use_all_categories().get_priors()
with self.assertRaises(ValueError):
(LogOddsRatioInformativeDirichletPrior(priors)
.get_scores(*corpus.get_term_freq_df().values.T))
priors = (PriorFactory(full_corpus)
.use_all_categories()
.align_to_target(corpus)
.get_priors())
(LogOddsRatioInformativeDirichletPrior(priors)
.get_scores(*corpus.get_term_freq_df().values.T))
def test_use_categories(self):
full_corpus = get_test_corpus()
priors = PriorFactory(full_corpus).use_categories(['swift']).get_priors()
corpus = full_corpus.remove_categories(['swift'])
with self.assertRaises(ValueError):
(LogOddsRatioInformativeDirichletPrior(priors)
.get_scores(*corpus.get_term_freq_df().values.T))
priors = (PriorFactory(full_corpus)
.use_all_categories()
.align_to_target(corpus)
.get_priors())
(LogOddsRatioInformativeDirichletPrior(priors)
.get_scores(*corpus.get_term_freq_df().values.T))
def test_get_custom_term_frequencies(self):
corpus = get_test_corpus()
fact = (PriorFactory(corpus, starting_count=0.04)
.use_custom_term_frequencies(pd.Series({'halt': 3, 'i': 8}))
.drop_zero_priors()
)
priors, clean_corpus = fact.build()
self.assertEqual(set(clean_corpus.get_terms()), {'i', 'halt'})
np.testing.assert_equal(priors.sort_values().values, [3.04, 8.04])
|
import functools
from hangups import get_auth
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD
from homeassistant.core import callback
from .const import (
CONF_2FA,
CONF_AUTH_CODE,
CONF_REFRESH_TOKEN,
DOMAIN as HANGOUTS_DOMAIN,
)
from .hangups_utils import (
Google2FAError,
GoogleAuthError,
HangoutsCredentials,
HangoutsRefreshToken,
)
@callback
def configured_hangouts(hass):
"""Return the configures Google Hangouts Account."""
entries = hass.config_entries.async_entries(HANGOUTS_DOMAIN)
if entries:
return entries[0]
return None
@config_entries.HANDLERS.register(HANGOUTS_DOMAIN)
class HangoutsFlowHandler(config_entries.ConfigFlow):
"""Config flow Google Hangouts."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_PUSH
def __init__(self):
"""Initialize Google Hangouts config flow."""
self._credentials = None
self._refresh_token = None
async def async_step_user(self, user_input=None):
"""Handle a flow start."""
errors = {}
if configured_hangouts(self.hass) is not None:
return self.async_abort(reason="already_configured")
if user_input is not None:
user_email = user_input[CONF_EMAIL]
user_password = user_input[CONF_PASSWORD]
user_auth_code = user_input.get(CONF_AUTH_CODE)
manual_login = user_auth_code is not None
user_pin = None
self._credentials = HangoutsCredentials(
user_email, user_password, user_pin, user_auth_code
)
self._refresh_token = HangoutsRefreshToken(None)
try:
await self.hass.async_add_executor_job(
functools.partial(
get_auth,
self._credentials,
self._refresh_token,
manual_login=manual_login,
)
)
return await self.async_step_final()
except GoogleAuthError as err:
if isinstance(err, Google2FAError):
return await self.async_step_2fa()
msg = str(err)
if msg == "Unknown verification code input":
errors["base"] = "invalid_2fa_method"
else:
errors["base"] = "invalid_login"
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(CONF_EMAIL): str,
vol.Required(CONF_PASSWORD): str,
vol.Optional(CONF_AUTH_CODE): str,
}
),
errors=errors,
)
async def async_step_2fa(self, user_input=None):
"""Handle the 2fa step, if needed."""
errors = {}
if user_input is not None:
self._credentials.set_verification_code(user_input[CONF_2FA])
try:
await self.hass.async_add_executor_job(
get_auth, self._credentials, self._refresh_token
)
return await self.async_step_final()
except GoogleAuthError:
errors["base"] = "invalid_2fa"
return self.async_show_form(
step_id=CONF_2FA,
data_schema=vol.Schema({vol.Required(CONF_2FA): str}),
errors=errors,
)
async def async_step_final(self):
"""Handle the final step, create the config entry."""
return self.async_create_entry(
title=self._credentials.get_email(),
data={
CONF_EMAIL: self._credentials.get_email(),
CONF_REFRESH_TOKEN: self._refresh_token.get(),
},
)
async def async_step_import(self, _):
"""Handle a flow import."""
return await self.async_step_user()
|
import argparse # noqa
import logging
import os
import redis
from docker_registry.lib import layers
from docker_registry.lib import rlock
from docker_registry.lib import rqueue
import docker_registry.storage as storage
store = storage.load()
redis_default_host = os.environ.get(
'DOCKER_REDIS_1_PORT_6379_TCP_ADDR',
'0.0.0.0')
redis_default_port = int(os.environ.get(
'DOCKER_REDIS_1_PORT_6379_TCP_PORT',
'6379'))
log = logging.getLogger(__name__)
log.setLevel(logging.DEBUG)
def get_parser():
parser = argparse.ArgumentParser(
description="Daemon for computing layer diffs"
)
parser.add_argument(
"--rhost", default=redis_default_host, dest="redis_host",
help="Host of redis instance to listen to",
)
parser.add_argument(
"--rport", default=redis_default_port, dest="redis_port", type=int,
help="Port of redis instance to listen to",
)
parser.add_argument(
"-d", "--database", default=0, dest="redis_db",
type=int, metavar="redis_db",
help="Redis database to connect to",
)
parser.add_argument(
"-p", "--password", default=None, metavar="redis_pw", dest="redis_pw",
help="Redis database password",
)
return parser
def get_redis_connection(options):
redis_conn = redis.StrictRedis(
host=options.redis_host,
port=options.redis_port,
db=options.redis_db,
password=options.redis_pw,
)
return redis_conn
def handle_request(layer_id, redis_conn):
'''handler for any item pulled from worker job queue
This handler is called every time the worker is able to pop a message
from the job queue filled by the registry. The worker blocks until a
message is available. This handler will then attempt to aquire a lock
for the provided layer_id and if successful, process a diff for the
layer.
If the lock for this layer_id has already been aquired for this layer
the worker will immediately timeout to block for another request.
'''
try:
# this with-context will attempt to establish a 5 minute lock
# on the key for this layer, immediately passing on LockTimeout
# if one isn't availble
with rlock.Lock(redis_conn,
"diff-worker-lock",
layer_id,
expires=60 * 5):
# first check if a cached result is already available. The registry
# already does this, but hey.
diff_data = layers.get_image_diff_cache(layer_id)
if not diff_data:
log.info("Processing diff for %s" % layer_id)
layers.get_image_diff_json(layer_id)
except rlock.LockTimeout:
log.info("Another worker is processing %s. Skipping." % layer_id)
if __name__ == '__main__':
parser = get_parser()
options = parser.parse_args()
redis_conn = get_redis_connection(options)
# create a bounded queue holding registry requests for diff calculations
queue = rqueue.CappedCollection(redis_conn, "diff-worker", 1024)
# initialize worker factory with the queue and redis connection
worker_factory = rqueue.worker(queue, redis_conn)
# create worker instance with our handler
worker = worker_factory(handle_request)
log.info("Starting worker...")
worker()
|
import math
from homeassistant.components.fan import (
DOMAIN,
SPEED_HIGH,
SPEED_LOW,
SPEED_MEDIUM,
SPEED_OFF,
SUPPORT_SET_SPEED,
FanEntity,
)
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from . import ZWaveDeviceEntity
SPEED_LIST = [SPEED_OFF, SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH]
SUPPORTED_FEATURES = SUPPORT_SET_SPEED
# Value will first be divided to an integer
VALUE_TO_SPEED = {0: SPEED_OFF, 1: SPEED_LOW, 2: SPEED_MEDIUM, 3: SPEED_HIGH}
SPEED_TO_VALUE = {SPEED_OFF: 0, SPEED_LOW: 1, SPEED_MEDIUM: 50, SPEED_HIGH: 99}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Z-Wave Fan from Config Entry."""
@callback
def async_add_fan(fan):
"""Add Z-Wave Fan."""
async_add_entities([fan])
async_dispatcher_connect(hass, "zwave_new_fan", async_add_fan)
def get_device(values, **kwargs):
"""Create Z-Wave entity device."""
return ZwaveFan(values)
class ZwaveFan(ZWaveDeviceEntity, FanEntity):
"""Representation of a Z-Wave fan."""
def __init__(self, values):
"""Initialize the Z-Wave fan device."""
ZWaveDeviceEntity.__init__(self, values, DOMAIN)
self.update_properties()
def update_properties(self):
"""Handle data changes for node values."""
value = math.ceil(self.values.primary.data * 3 / 100)
self._state = VALUE_TO_SPEED[value]
def set_speed(self, speed):
"""Set the speed of the fan."""
self.node.set_dimmer(self.values.primary.value_id, SPEED_TO_VALUE[speed])
def turn_on(self, speed=None, **kwargs):
"""Turn the device on."""
if speed is None:
# Value 255 tells device to return to previous value
self.node.set_dimmer(self.values.primary.value_id, 255)
else:
self.set_speed(speed)
def turn_off(self, **kwargs):
"""Turn the device off."""
self.node.set_dimmer(self.values.primary.value_id, 0)
@property
def speed(self):
"""Return the current speed."""
return self._state
@property
def speed_list(self):
"""Get the list of available speeds."""
return SPEED_LIST
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORTED_FEATURES
|
from typing import Optional, Sequence
from pysmartthings import Attribute, Capability
from homeassistant.components.cover import (
ATTR_POSITION,
DEVICE_CLASS_DOOR,
DEVICE_CLASS_GARAGE,
DEVICE_CLASS_SHADE,
DOMAIN as COVER_DOMAIN,
STATE_CLOSED,
STATE_CLOSING,
STATE_OPEN,
STATE_OPENING,
SUPPORT_CLOSE,
SUPPORT_OPEN,
SUPPORT_SET_POSITION,
CoverEntity,
)
from homeassistant.const import ATTR_BATTERY_LEVEL
from . import SmartThingsEntity
from .const import DATA_BROKERS, DOMAIN
VALUE_TO_STATE = {
"closed": STATE_CLOSED,
"closing": STATE_CLOSING,
"open": STATE_OPEN,
"opening": STATE_OPENING,
"partially open": STATE_OPEN,
"unknown": None,
}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Add covers for a config entry."""
broker = hass.data[DOMAIN][DATA_BROKERS][config_entry.entry_id]
async_add_entities(
[
SmartThingsCover(device)
for device in broker.devices.values()
if broker.any_assigned(device.device_id, COVER_DOMAIN)
],
True,
)
def get_capabilities(capabilities: Sequence[str]) -> Optional[Sequence[str]]:
"""Return all capabilities supported if minimum required are present."""
min_required = [
Capability.door_control,
Capability.garage_door_control,
Capability.window_shade,
]
# Must have one of the min_required
if any(capability in capabilities for capability in min_required):
# Return all capabilities supported/consumed
return min_required + [Capability.battery, Capability.switch_level]
return None
class SmartThingsCover(SmartThingsEntity, CoverEntity):
"""Define a SmartThings cover."""
def __init__(self, device):
"""Initialize the cover class."""
super().__init__(device)
self._device_class = None
self._state = None
self._state_attrs = None
self._supported_features = SUPPORT_OPEN | SUPPORT_CLOSE
if Capability.switch_level in device.capabilities:
self._supported_features |= SUPPORT_SET_POSITION
async def async_close_cover(self, **kwargs):
"""Close cover."""
# Same command for all 3 supported capabilities
await self._device.close(set_status=True)
# State is set optimistically in the commands above, therefore update
# the entity state ahead of receiving the confirming push updates
self.async_schedule_update_ha_state(True)
async def async_open_cover(self, **kwargs):
"""Open the cover."""
# Same for all capability types
await self._device.open(set_status=True)
# State is set optimistically in the commands above, therefore update
# the entity state ahead of receiving the confirming push updates
self.async_schedule_update_ha_state(True)
async def async_set_cover_position(self, **kwargs):
"""Move the cover to a specific position."""
if not self._supported_features & SUPPORT_SET_POSITION:
return
# Do not set_status=True as device will report progress.
await self._device.set_level(kwargs[ATTR_POSITION], 0)
async def async_update(self):
"""Update the attrs of the cover."""
value = None
if Capability.door_control in self._device.capabilities:
self._device_class = DEVICE_CLASS_DOOR
value = self._device.status.door
elif Capability.window_shade in self._device.capabilities:
self._device_class = DEVICE_CLASS_SHADE
value = self._device.status.window_shade
elif Capability.garage_door_control in self._device.capabilities:
self._device_class = DEVICE_CLASS_GARAGE
value = self._device.status.door
self._state = VALUE_TO_STATE.get(value)
self._state_attrs = {}
battery = self._device.status.attributes[Attribute.battery].value
if battery is not None:
self._state_attrs[ATTR_BATTERY_LEVEL] = battery
@property
def is_opening(self):
"""Return if the cover is opening or not."""
return self._state == STATE_OPENING
@property
def is_closing(self):
"""Return if the cover is closing or not."""
return self._state == STATE_CLOSING
@property
def is_closed(self):
"""Return if the cover is closed or not."""
if self._state == STATE_CLOSED:
return True
return None if self._state is None else False
@property
def current_cover_position(self):
"""Return current position of cover."""
if not self._supported_features & SUPPORT_SET_POSITION:
return None
return self._device.status.level
@property
def device_class(self):
"""Define this cover as a garage door."""
return self._device_class
@property
def device_state_attributes(self):
"""Get additional state attributes."""
return self._state_attrs
@property
def supported_features(self):
"""Flag supported features."""
return self._supported_features
|
import pytest
from plumbum.colorlib.styles import ANSIStyle, Color, AttributeNotFound, ColorNotFound
from plumbum.colorlib.names import color_html, FindNearest
# Just check to see if this file is importable
from plumbum.cli.image import Image
class TestNearestColor:
def test_exact(self):
assert FindNearest(0,0,0).all_fast() == 0
for n,color in enumerate(color_html):
# Ignoring duplicates
if n not in (16, 21, 46, 51, 196, 201, 226, 231, 244):
rgb = (int(color[1:3],16), int(color[3:5],16), int(color[5:7],16))
assert FindNearest(*rgb).all_fast() == n
def test_nearby(self):
assert FindNearest(1,2,2).all_fast() == 0
assert FindNearest(7,7,9).all_fast() == 232
def test_simplecolor(self):
assert FindNearest(1,2,4).only_basic() == 0
assert FindNearest(0,255,0).only_basic() == 2
assert FindNearest(100,100,0).only_basic() == 3
assert FindNearest(140,140,140).only_basic() == 7
class TestColorLoad:
def test_rgb(self):
blue = Color(0,0,255) # Red, Green, Blue
assert blue.rgb == (0,0,255)
def test_simple_name(self):
green = Color.from_simple('green')
assert green.number == 2
def test_different_names(self):
assert Color('Dark Blue') == Color('Dark_Blue')
assert Color('Dark_blue') == Color('Dark_Blue')
assert Color('DARKBLUE') == Color('Dark_Blue')
assert Color('DarkBlue') == Color('Dark_Blue')
assert Color('Dark Green') == Color('Dark_Green')
def test_loading_methods(self):
assert Color("Yellow") == Color.from_full("Yellow")
assert (Color.from_full("yellow").representation !=
Color.from_simple("yellow").representation)
class TestANSIColor:
@classmethod
def setup_class(cls):
ANSIStyle.use_color = True
def test_ansi(self):
assert str(ANSIStyle(fgcolor=Color('reset'))) == '\033[39m'
assert str(ANSIStyle(fgcolor=Color.from_full('green'))) == '\033[38;5;2m'
assert str(ANSIStyle(fgcolor=Color.from_simple('red'))) == '\033[31m'
class TestNearestColor:
def test_allcolors(self):
myrange = (0,1,2,5,17,39,48,73,82,140,193,210,240,244,250,254,255)
for r in myrange:
for g in myrange:
for b in myrange:
near = FindNearest(r,g,b)
assert near.all_slow() == near.all_fast(), 'Tested: {0}, {1}, {2}'.format(r,g,b)
|
import json
import logging
import voluptuous as vol
from homeassistant.components import mqtt
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_COLOR_TEMP,
ATTR_EFFECT,
ATTR_FLASH,
ATTR_HS_COLOR,
ATTR_TRANSITION,
ATTR_WHITE_VALUE,
FLASH_LONG,
FLASH_SHORT,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
SUPPORT_COLOR_TEMP,
SUPPORT_EFFECT,
SUPPORT_FLASH,
SUPPORT_TRANSITION,
SUPPORT_WHITE_VALUE,
LightEntity,
)
from homeassistant.components.mqtt import (
CONF_COMMAND_TOPIC,
CONF_QOS,
CONF_RETAIN,
CONF_STATE_TOPIC,
MqttAttributes,
MqttAvailability,
MqttDiscoveryUpdate,
MqttEntityDeviceInfo,
subscription,
)
from homeassistant.const import (
CONF_BRIGHTNESS,
CONF_COLOR_TEMP,
CONF_DEVICE,
CONF_EFFECT,
CONF_NAME,
CONF_OPTIMISTIC,
CONF_RGB,
CONF_UNIQUE_ID,
CONF_WHITE_VALUE,
CONF_XY,
STATE_ON,
)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.helpers.typing import ConfigType
import homeassistant.util.color as color_util
from ..debug_info import log_messages
from .schema import MQTT_LIGHT_SCHEMA_SCHEMA
from .schema_basic import CONF_BRIGHTNESS_SCALE
_LOGGER = logging.getLogger(__name__)
DOMAIN = "mqtt_json"
DEFAULT_BRIGHTNESS = False
DEFAULT_COLOR_TEMP = False
DEFAULT_EFFECT = False
DEFAULT_FLASH_TIME_LONG = 10
DEFAULT_FLASH_TIME_SHORT = 2
DEFAULT_NAME = "MQTT JSON Light"
DEFAULT_OPTIMISTIC = False
DEFAULT_RGB = False
DEFAULT_WHITE_VALUE = False
DEFAULT_XY = False
DEFAULT_HS = False
DEFAULT_BRIGHTNESS_SCALE = 255
CONF_EFFECT_LIST = "effect_list"
CONF_FLASH_TIME_LONG = "flash_time_long"
CONF_FLASH_TIME_SHORT = "flash_time_short"
CONF_HS = "hs"
CONF_MAX_MIREDS = "max_mireds"
CONF_MIN_MIREDS = "min_mireds"
# Stealing some of these from the base MQTT configs.
PLATFORM_SCHEMA_JSON = (
mqtt.MQTT_RW_PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_BRIGHTNESS, default=DEFAULT_BRIGHTNESS): cv.boolean,
vol.Optional(
CONF_BRIGHTNESS_SCALE, default=DEFAULT_BRIGHTNESS_SCALE
): vol.All(vol.Coerce(int), vol.Range(min=1)),
vol.Optional(CONF_COLOR_TEMP, default=DEFAULT_COLOR_TEMP): cv.boolean,
vol.Optional(CONF_DEVICE): mqtt.MQTT_ENTITY_DEVICE_INFO_SCHEMA,
vol.Optional(CONF_EFFECT, default=DEFAULT_EFFECT): cv.boolean,
vol.Optional(CONF_EFFECT_LIST): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(
CONF_FLASH_TIME_LONG, default=DEFAULT_FLASH_TIME_LONG
): cv.positive_int,
vol.Optional(
CONF_FLASH_TIME_SHORT, default=DEFAULT_FLASH_TIME_SHORT
): cv.positive_int,
vol.Optional(CONF_HS, default=DEFAULT_HS): cv.boolean,
vol.Optional(CONF_MAX_MIREDS): cv.positive_int,
vol.Optional(CONF_MIN_MIREDS): cv.positive_int,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean,
vol.Optional(CONF_QOS, default=mqtt.DEFAULT_QOS): vol.All(
vol.Coerce(int), vol.In([0, 1, 2])
),
vol.Optional(CONF_RETAIN, default=mqtt.DEFAULT_RETAIN): cv.boolean,
vol.Optional(CONF_RGB, default=DEFAULT_RGB): cv.boolean,
vol.Optional(CONF_STATE_TOPIC): mqtt.valid_subscribe_topic,
vol.Optional(CONF_UNIQUE_ID): cv.string,
vol.Optional(CONF_WHITE_VALUE, default=DEFAULT_WHITE_VALUE): cv.boolean,
vol.Optional(CONF_XY, default=DEFAULT_XY): cv.boolean,
}
)
.extend(mqtt.MQTT_AVAILABILITY_SCHEMA.schema)
.extend(mqtt.MQTT_JSON_ATTRS_SCHEMA.schema)
.extend(MQTT_LIGHT_SCHEMA_SCHEMA.schema)
)
async def async_setup_entity_json(
hass, config: ConfigType, async_add_entities, config_entry, discovery_data
):
"""Set up a MQTT JSON Light."""
async_add_entities([MqttLightJson(config, config_entry, discovery_data)])
class MqttLightJson(
MqttAttributes,
MqttAvailability,
MqttDiscoveryUpdate,
MqttEntityDeviceInfo,
LightEntity,
RestoreEntity,
):
"""Representation of a MQTT JSON light."""
def __init__(self, config, config_entry, discovery_data):
"""Initialize MQTT JSON light."""
self._state = False
self._sub_state = None
self._supported_features = 0
self._topic = None
self._optimistic = False
self._brightness = None
self._color_temp = None
self._effect = None
self._hs = None
self._white_value = None
self._flash_times = None
self._unique_id = config.get(CONF_UNIQUE_ID)
# Load config
self._setup_from_config(config)
device_config = config.get(CONF_DEVICE)
MqttAttributes.__init__(self, config)
MqttAvailability.__init__(self, config)
MqttDiscoveryUpdate.__init__(self, discovery_data, self.discovery_update)
MqttEntityDeviceInfo.__init__(self, device_config, config_entry)
async def async_added_to_hass(self):
"""Subscribe to MQTT events."""
await super().async_added_to_hass()
await self._subscribe_topics()
async def discovery_update(self, discovery_payload):
"""Handle updated discovery message."""
config = PLATFORM_SCHEMA_JSON(discovery_payload)
self._setup_from_config(config)
await self.attributes_discovery_update(config)
await self.availability_discovery_update(config)
await self.device_info_discovery_update(config)
await self._subscribe_topics()
self.async_write_ha_state()
def _setup_from_config(self, config):
"""(Re)Setup the entity."""
self._config = config
self._topic = {
key: config.get(key) for key in (CONF_STATE_TOPIC, CONF_COMMAND_TOPIC)
}
optimistic = config[CONF_OPTIMISTIC]
self._optimistic = optimistic or self._topic[CONF_STATE_TOPIC] is None
self._flash_times = {
key: config.get(key)
for key in (CONF_FLASH_TIME_SHORT, CONF_FLASH_TIME_LONG)
}
self._supported_features = SUPPORT_TRANSITION | SUPPORT_FLASH
self._supported_features |= config[CONF_RGB] and SUPPORT_COLOR
self._supported_features |= config[CONF_BRIGHTNESS] and SUPPORT_BRIGHTNESS
self._supported_features |= config[CONF_COLOR_TEMP] and SUPPORT_COLOR_TEMP
self._supported_features |= config[CONF_EFFECT] and SUPPORT_EFFECT
self._supported_features |= config[CONF_WHITE_VALUE] and SUPPORT_WHITE_VALUE
self._supported_features |= config[CONF_XY] and SUPPORT_COLOR
self._supported_features |= config[CONF_HS] and SUPPORT_COLOR
async def _subscribe_topics(self):
"""(Re)Subscribe to topics."""
last_state = await self.async_get_last_state()
@callback
@log_messages(self.hass, self.entity_id)
def state_received(msg):
"""Handle new MQTT messages."""
values = json.loads(msg.payload)
if values["state"] == "ON":
self._state = True
elif values["state"] == "OFF":
self._state = False
if self._supported_features and SUPPORT_COLOR:
try:
red = int(values["color"]["r"])
green = int(values["color"]["g"])
blue = int(values["color"]["b"])
self._hs = color_util.color_RGB_to_hs(red, green, blue)
except KeyError:
pass
except ValueError:
_LOGGER.warning("Invalid RGB color value received")
try:
x_color = float(values["color"]["x"])
y_color = float(values["color"]["y"])
self._hs = color_util.color_xy_to_hs(x_color, y_color)
except KeyError:
pass
except ValueError:
_LOGGER.warning("Invalid XY color value received")
try:
hue = float(values["color"]["h"])
saturation = float(values["color"]["s"])
self._hs = (hue, saturation)
except KeyError:
pass
except ValueError:
_LOGGER.warning("Invalid HS color value received")
if self._supported_features and SUPPORT_BRIGHTNESS:
try:
self._brightness = int(
values["brightness"]
/ float(self._config[CONF_BRIGHTNESS_SCALE])
* 255
)
except KeyError:
pass
except (TypeError, ValueError):
_LOGGER.warning("Invalid brightness value received")
if self._supported_features and SUPPORT_COLOR_TEMP:
try:
self._color_temp = int(values["color_temp"])
except KeyError:
pass
except ValueError:
_LOGGER.warning("Invalid color temp value received")
if self._supported_features and SUPPORT_EFFECT:
try:
self._effect = values["effect"]
except KeyError:
pass
if self._supported_features and SUPPORT_WHITE_VALUE:
try:
self._white_value = int(values["white_value"])
except KeyError:
pass
except ValueError:
_LOGGER.warning("Invalid white value received")
self.async_write_ha_state()
if self._topic[CONF_STATE_TOPIC] is not None:
self._sub_state = await subscription.async_subscribe_topics(
self.hass,
self._sub_state,
{
"state_topic": {
"topic": self._topic[CONF_STATE_TOPIC],
"msg_callback": state_received,
"qos": self._config[CONF_QOS],
}
},
)
if self._optimistic and last_state:
self._state = last_state.state == STATE_ON
if last_state.attributes.get(ATTR_BRIGHTNESS):
self._brightness = last_state.attributes.get(ATTR_BRIGHTNESS)
if last_state.attributes.get(ATTR_HS_COLOR):
self._hs = last_state.attributes.get(ATTR_HS_COLOR)
if last_state.attributes.get(ATTR_COLOR_TEMP):
self._color_temp = last_state.attributes.get(ATTR_COLOR_TEMP)
if last_state.attributes.get(ATTR_EFFECT):
self._effect = last_state.attributes.get(ATTR_EFFECT)
if last_state.attributes.get(ATTR_WHITE_VALUE):
self._white_value = last_state.attributes.get(ATTR_WHITE_VALUE)
async def async_will_remove_from_hass(self):
"""Unsubscribe when removed."""
self._sub_state = await subscription.async_unsubscribe_topics(
self.hass, self._sub_state
)
await MqttAttributes.async_will_remove_from_hass(self)
await MqttAvailability.async_will_remove_from_hass(self)
await MqttDiscoveryUpdate.async_will_remove_from_hass(self)
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
return self._brightness
@property
def color_temp(self):
"""Return the color temperature in mired."""
return self._color_temp
@property
def min_mireds(self):
"""Return the coldest color_temp that this light supports."""
return self._config.get(CONF_MIN_MIREDS, super().min_mireds)
@property
def max_mireds(self):
"""Return the warmest color_temp that this light supports."""
return self._config.get(CONF_MAX_MIREDS, super().max_mireds)
@property
def effect(self):
"""Return the current effect."""
return self._effect
@property
def effect_list(self):
"""Return the list of supported effects."""
return self._config.get(CONF_EFFECT_LIST)
@property
def hs_color(self):
"""Return the hs color value."""
return self._hs
@property
def white_value(self):
"""Return the white property."""
return self._white_value
@property
def should_poll(self):
"""No polling needed for a MQTT light."""
return False
@property
def name(self):
"""Return the name of the device if any."""
return self._config[CONF_NAME]
@property
def unique_id(self):
"""Return a unique ID."""
return self._unique_id
@property
def is_on(self):
"""Return true if device is on."""
return self._state
@property
def assumed_state(self):
"""Return true if we do optimistic updates."""
return self._optimistic
@property
def supported_features(self):
"""Flag supported features."""
return self._supported_features
async def async_turn_on(self, **kwargs):
"""Turn the device on.
This method is a coroutine.
"""
should_update = False
message = {"state": "ON"}
if ATTR_HS_COLOR in kwargs and (
self._config[CONF_HS] or self._config[CONF_RGB] or self._config[CONF_XY]
):
hs_color = kwargs[ATTR_HS_COLOR]
message["color"] = {}
if self._config[CONF_RGB]:
# If there's a brightness topic set, we don't want to scale the
# RGB values given using the brightness.
if self._config[CONF_BRIGHTNESS]:
brightness = 255
else:
brightness = kwargs.get(ATTR_BRIGHTNESS, 255)
rgb = color_util.color_hsv_to_RGB(
hs_color[0], hs_color[1], brightness / 255 * 100
)
message["color"]["r"] = rgb[0]
message["color"]["g"] = rgb[1]
message["color"]["b"] = rgb[2]
if self._config[CONF_XY]:
xy_color = color_util.color_hs_to_xy(*kwargs[ATTR_HS_COLOR])
message["color"]["x"] = xy_color[0]
message["color"]["y"] = xy_color[1]
if self._config[CONF_HS]:
message["color"]["h"] = hs_color[0]
message["color"]["s"] = hs_color[1]
if self._optimistic:
self._hs = kwargs[ATTR_HS_COLOR]
should_update = True
if ATTR_FLASH in kwargs:
flash = kwargs.get(ATTR_FLASH)
if flash == FLASH_LONG:
message["flash"] = self._flash_times[CONF_FLASH_TIME_LONG]
elif flash == FLASH_SHORT:
message["flash"] = self._flash_times[CONF_FLASH_TIME_SHORT]
if ATTR_TRANSITION in kwargs:
message["transition"] = kwargs[ATTR_TRANSITION]
if ATTR_BRIGHTNESS in kwargs and self._config[CONF_BRIGHTNESS]:
brightness_normalized = kwargs[ATTR_BRIGHTNESS] / DEFAULT_BRIGHTNESS_SCALE
brightness_scale = self._config[CONF_BRIGHTNESS_SCALE]
device_brightness = min(
round(brightness_normalized * brightness_scale), brightness_scale
)
# Make sure the brightness is not rounded down to 0
device_brightness = max(device_brightness, 1)
message["brightness"] = device_brightness
if self._optimistic:
self._brightness = kwargs[ATTR_BRIGHTNESS]
should_update = True
if ATTR_COLOR_TEMP in kwargs:
message["color_temp"] = int(kwargs[ATTR_COLOR_TEMP])
if self._optimistic:
self._color_temp = kwargs[ATTR_COLOR_TEMP]
should_update = True
if ATTR_EFFECT in kwargs:
message["effect"] = kwargs[ATTR_EFFECT]
if self._optimistic:
self._effect = kwargs[ATTR_EFFECT]
should_update = True
if ATTR_WHITE_VALUE in kwargs:
message["white_value"] = int(kwargs[ATTR_WHITE_VALUE])
if self._optimistic:
self._white_value = kwargs[ATTR_WHITE_VALUE]
should_update = True
mqtt.async_publish(
self.hass,
self._topic[CONF_COMMAND_TOPIC],
json.dumps(message),
self._config[CONF_QOS],
self._config[CONF_RETAIN],
)
if self._optimistic:
# Optimistically assume that the light has changed state.
self._state = True
should_update = True
if should_update:
self.async_write_ha_state()
async def async_turn_off(self, **kwargs):
"""Turn the device off.
This method is a coroutine.
"""
message = {"state": "OFF"}
if ATTR_TRANSITION in kwargs:
message["transition"] = kwargs[ATTR_TRANSITION]
mqtt.async_publish(
self.hass,
self._topic[CONF_COMMAND_TOPIC],
json.dumps(message),
self._config[CONF_QOS],
self._config[CONF_RETAIN],
)
if self._optimistic:
# Optimistically assume that the light has changed state.
self._state = False
self.async_write_ha_state()
|
import pytest
from homeassistant import config_entries, data_entry_flow, setup
from homeassistant.components.spider.const import DOMAIN
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from tests.async_mock import Mock, patch
from tests.common import MockConfigEntry
USERNAME = "spider-username"
PASSWORD = "spider-password"
SPIDER_USER_DATA = {
CONF_USERNAME: USERNAME,
CONF_PASSWORD: PASSWORD,
}
@pytest.fixture(name="spider")
def spider_fixture() -> Mock:
"""Patch libraries."""
with patch("homeassistant.components.spider.config_flow.SpiderApi") as spider:
yield spider
async def test_user(hass, spider):
"""Test user config."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
with patch(
"homeassistant.components.spider.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.spider.async_setup_entry", return_value=True
) as mock_setup_entry:
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=SPIDER_USER_DATA
)
await hass.async_block_till_done()
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == DOMAIN
assert result["data"][CONF_USERNAME] == USERNAME
assert result["data"][CONF_PASSWORD] == PASSWORD
assert not result["result"].unique_id
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_import(hass, spider):
"""Test import step."""
await setup.async_setup_component(hass, "persistent_notification", {})
with patch(
"homeassistant.components.spider.async_setup",
return_value=True,
) as mock_setup, patch(
"homeassistant.components.spider.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data=SPIDER_USER_DATA,
)
await hass.async_block_till_done()
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == DOMAIN
assert result["data"][CONF_USERNAME] == USERNAME
assert result["data"][CONF_PASSWORD] == PASSWORD
assert not result["result"].unique_id
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_abort_if_already_setup(hass, spider):
"""Test we abort if Spider is already setup."""
MockConfigEntry(domain=DOMAIN, data=SPIDER_USER_DATA).add_to_hass(hass)
# Should fail, config exist (import)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}, data=SPIDER_USER_DATA
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "single_instance_allowed"
# Should fail, config exist (flow)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=SPIDER_USER_DATA
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "single_instance_allowed"
|
import pytest
from PyQt5.QtCore import QUrl, QIODevice
from PyQt5.QtNetwork import QNetworkRequest, QNetworkReply
from qutebrowser.browser.webkit.network import networkreply
@pytest.fixture
def req():
return QNetworkRequest(QUrl('http://www.qutebrowser.org/'))
class TestFixedDataNetworkReply:
def test_attributes(self, req):
reply = networkreply.FixedDataNetworkReply(req, b'', 'test/foo')
assert reply.request() == req
assert reply.url() == req.url()
assert reply.openMode() == QIODevice.ReadOnly
assert reply.header(QNetworkRequest.ContentTypeHeader) == 'test/foo'
http_code = reply.attribute(QNetworkRequest.HttpStatusCodeAttribute)
http_reason = reply.attribute(
QNetworkRequest.HttpReasonPhraseAttribute)
assert http_code == 200
assert http_reason == 'OK'
assert reply.isFinished()
assert not reply.isRunning()
@pytest.mark.parametrize('data', [b'', b'foobar',
b'Hello World! This is a test.'])
def test_data(self, qtbot, req, data):
reply = networkreply.FixedDataNetworkReply(req, data, 'test/foo')
with qtbot.waitSignal(reply.metaDataChanged), \
qtbot.waitSignal(reply.readyRead), \
qtbot.waitSignal(reply.finished):
pass
assert reply.bytesAvailable() == len(data)
assert reply.readAll() == data
@pytest.mark.parametrize('chunk_size', [1, 2, 3])
def test_data_chunked(self, chunk_size, req):
data = b'123'
reply = networkreply.FixedDataNetworkReply(req, data, 'test/foo')
while data:
assert reply.bytesAvailable() == len(data)
assert reply.readData(chunk_size) == data[:chunk_size]
data = data[chunk_size:]
def test_abort(self, req):
reply = networkreply.FixedDataNetworkReply(req, b'foo', 'test/foo')
reply.abort()
assert reply.readAll() == b'foo'
def test_error_network_reply(qtbot, req):
reply = networkreply.ErrorNetworkReply(
req, "This is an error", QNetworkReply.UnknownNetworkError)
with qtbot.waitSignal(reply.error), qtbot.waitSignal(reply.finished):
pass
reply.abort() # shouldn't do anything
assert reply.request() == req
assert reply.url() == req.url()
assert reply.openMode() == QIODevice.ReadOnly
assert reply.isFinished()
assert not reply.isRunning()
assert reply.bytesAvailable() == 0
assert reply.readData(1) == b''
assert reply.error() == QNetworkReply.UnknownNetworkError
assert reply.errorString() == "This is an error"
def test_redirect_network_reply():
url = QUrl('https://www.example.com/')
reply = networkreply.RedirectNetworkReply(url)
assert reply.readData(1) == b''
assert reply.attribute(QNetworkRequest.RedirectionTargetAttribute) == url
reply.abort() # shouldn't do anything
|
from pathlib import Path
from homeassistant.components.camera import SUPPORT_ON_OFF, Camera
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Demo camera platform."""
async_add_entities([DemoCamera("Demo camera")])
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Demo config entry."""
await async_setup_platform(hass, {}, async_add_entities)
class DemoCamera(Camera):
"""The representation of a Demo camera."""
def __init__(self, name):
"""Initialize demo camera component."""
super().__init__()
self._name = name
self._motion_status = False
self.is_streaming = True
self._images_index = 0
async def async_camera_image(self):
"""Return a faked still image response."""
self._images_index = (self._images_index + 1) % 4
image_path = Path(__file__).parent / f"demo_{self._images_index}.jpg"
return await self.hass.async_add_executor_job(image_path.read_bytes)
@property
def name(self):
"""Return the name of this camera."""
return self._name
@property
def supported_features(self):
"""Camera support turn on/off features."""
return SUPPORT_ON_OFF
@property
def is_on(self):
"""Whether camera is on (streaming)."""
return self.is_streaming
@property
def motion_detection_enabled(self):
"""Camera Motion Detection Status."""
return self._motion_status
async def async_enable_motion_detection(self):
"""Enable the Motion detection in base station (Arm)."""
self._motion_status = True
self.async_write_ha_state()
async def async_disable_motion_detection(self):
"""Disable the motion detection in base station (Disarm)."""
self._motion_status = False
self.async_write_ha_state()
async def async_turn_off(self):
"""Turn off camera."""
self.is_streaming = False
self.async_write_ha_state()
async def async_turn_on(self):
"""Turn on camera."""
self.is_streaming = True
self.async_write_ha_state()
|
import asyncio
import time
import random
from collections import Counter
import discord
from redbot.core import bank, errors
from redbot.core.i18n import Translator
from redbot.core.utils.chat_formatting import box, bold, humanize_list, humanize_number
from redbot.core.utils.common_filters import normalize_smartquotes
from .log import LOG
__all__ = ["TriviaSession"]
T_ = Translator("TriviaSession", __file__)
_ = lambda s: s
_REVEAL_MESSAGES = (
_("I know this one! {answer}!"),
_("Easy: {answer}."),
_("Oh really? It's {answer} of course."),
)
_FAIL_MESSAGES = (
_("To the next one I guess..."),
_("Moving on..."),
_("I'm sure you'll know the answer of the next one."),
_("\N{PENSIVE FACE} Next one."),
)
_ = T_
class TriviaSession:
"""Class to run a session of trivia with the user.
To run the trivia session immediately, use `TriviaSession.start` instead of
instantiating directly.
Attributes
----------
ctx : `commands.Context`
Context object from which this session will be run.
This object assumes the session was started in `ctx.channel`
by `ctx.author`.
question_list : `dict`
A list of tuples mapping questions (`str`) to answers (`list` of
`str`).
settings : `dict`
Settings for the trivia session, with values for the following:
- ``max_score`` (`int`)
- ``delay`` (`float`)
- ``timeout`` (`float`)
- ``reveal_answer`` (`bool`)
- ``bot_plays`` (`bool`)
- ``allow_override`` (`bool`)
- ``payout_multiplier`` (`float`)
scores : `collections.Counter`
A counter with the players as keys, and their scores as values. The
players are of type `discord.Member`.
count : `int`
The number of questions which have been asked.
"""
def __init__(self, ctx, question_list: dict, settings: dict):
self.ctx = ctx
list_ = list(question_list.items())
random.shuffle(list_)
self.question_list = list_
self.settings = settings
self.scores = Counter()
self.count = 0
self._last_response = time.time()
self._task = None
@classmethod
def start(cls, ctx, question_list, settings):
"""Create and start a trivia session.
This allows the session to manage the running and cancellation of its
own tasks.
Parameters
----------
ctx : `commands.Context`
Same as `TriviaSession.ctx`
question_list : `dict`
Same as `TriviaSession.question_list`
settings : `dict`
Same as `TriviaSession.settings`
Returns
-------
TriviaSession
The new trivia session being run.
"""
session = cls(ctx, question_list, settings)
loop = ctx.bot.loop
session._task = loop.create_task(session.run())
session._task.add_done_callback(session._error_handler)
return session
def _error_handler(self, fut):
"""Catches errors in the session task."""
try:
fut.result()
except asyncio.CancelledError:
pass
except Exception as exc:
LOG.error("A trivia session has encountered an error.\n", exc_info=exc)
asyncio.create_task(
self.ctx.send(
_(
"An unexpected error occurred in the trivia session.\nCheck your console or logs for details."
)
)
)
self.stop()
async def run(self):
"""Run the trivia session.
In order for the trivia session to be stopped correctly, this should
only be called internally by `TriviaSession.start`.
"""
await self._send_startup_msg()
max_score = self.settings["max_score"]
delay = self.settings["delay"]
timeout = self.settings["timeout"]
for question, answers in self._iter_questions():
async with self.ctx.typing():
await asyncio.sleep(3)
self.count += 1
msg = bold(_("Question number {num}!").format(num=self.count)) + "\n\n" + question
await self.ctx.send(msg)
continue_ = await self.wait_for_answer(answers, delay, timeout)
if continue_ is False:
break
if any(score >= max_score for score in self.scores.values()):
await self.end_game()
break
else:
await self.ctx.send(_("There are no more questions!"))
await self.end_game()
async def _send_startup_msg(self):
list_names = []
for idx, tup in enumerate(self.settings["lists"].items()):
name, author = tup
if author:
title = _("{trivia_list} (by {author})").format(trivia_list=name, author=author)
else:
title = name
list_names.append(title)
await self.ctx.send(
_("Starting Trivia: {list_names}").format(list_names=humanize_list(list_names))
)
def _iter_questions(self):
"""Iterate over questions and answers for this session.
Yields
------
`tuple`
A tuple containing the question (`str`) and the answers (`tuple` of
`str`).
"""
for question, answers in self.question_list:
answers = _parse_answers(answers)
yield question, answers
async def wait_for_answer(self, answers, delay: float, timeout: float):
"""Wait for a correct answer, and then respond.
Scores are also updated in this method.
Returns False if waiting was cancelled; this is usually due to the
session being forcibly stopped.
Parameters
----------
answers : `iterable` of `str`
A list of valid answers to the current question.
delay : float
How long users have to respond (in seconds).
timeout : float
How long before the session ends due to no responses (in seconds).
Returns
-------
bool
:code:`True` if the session wasn't interrupted.
"""
try:
message = await self.ctx.bot.wait_for(
"message", check=self.check_answer(answers), timeout=delay
)
except asyncio.TimeoutError:
if time.time() - self._last_response >= timeout:
await self.ctx.send(_("Guys...? Well, I guess I'll stop then."))
self.stop()
return False
if self.settings["reveal_answer"]:
reply = T_(random.choice(_REVEAL_MESSAGES)).format(answer=answers[0])
else:
reply = T_(random.choice(_FAIL_MESSAGES))
if self.settings["bot_plays"]:
reply += _(" **+1** for me!")
self.scores[self.ctx.guild.me] += 1
await self.ctx.send(reply)
else:
self.scores[message.author] += 1
reply = _("You got it {user}! **+1** to you!").format(user=message.author.display_name)
await self.ctx.send(reply)
return True
def check_answer(self, answers):
"""Get a predicate to check for correct answers.
The returned predicate takes a message as its only parameter,
and returns ``True`` if the message contains any of the
given answers.
Parameters
----------
answers : `iterable` of `str`
The answers which the predicate must check for.
Returns
-------
function
The message predicate.
"""
answers = tuple(s.lower() for s in answers)
def _pred(message: discord.Message):
early_exit = message.channel != self.ctx.channel or message.author == self.ctx.guild.me
if early_exit:
return False
self._last_response = time.time()
guess = message.content.lower()
guess = normalize_smartquotes(guess)
for answer in answers:
if " " in answer and answer in guess:
# Exact matching, issue #331
return True
elif any(word == answer for word in guess.split(" ")):
return True
return False
return _pred
async def end_game(self):
"""End the trivia session and display scores."""
if self.scores:
await self.send_table()
multiplier = self.settings["payout_multiplier"]
if multiplier > 0:
await self.pay_winner(multiplier)
self.stop()
async def send_table(self):
"""Send a table of scores to the session's channel."""
table = "+ Results: \n\n"
for user, score in self.scores.most_common():
table += "+ {}\t{}\n".format(user, score)
await self.ctx.send(box(table, lang="diff"))
def stop(self):
"""Stop the trivia session, without showing scores."""
self.ctx.bot.dispatch("trivia_end", self)
def force_stop(self):
"""Cancel whichever tasks this session is running."""
self._task.cancel()
channel = self.ctx.channel
LOG.debug("Force stopping trivia session; #%s in %s", channel, channel.guild.id)
async def pay_winner(self, multiplier: float):
"""Pay the winner of this trivia session.
The winner is only payed if there are at least 3 human contestants.
Parameters
----------
multiplier : float
The coefficient of the winner's score, used to determine the amount
paid.
"""
(winner, score) = next((tup for tup in self.scores.most_common(1)), (None, None))
me_ = self.ctx.guild.me
if winner is not None and winner != me_ and score > 0:
contestants = list(self.scores.keys())
if me_ in contestants:
contestants.remove(me_)
if len(contestants) >= 3:
amount = int(multiplier * score)
if amount > 0:
LOG.debug("Paying trivia winner: %d credits --> %s", amount, str(winner))
try:
await bank.deposit_credits(winner, int(multiplier * score))
except errors.BalanceTooHigh as e:
await bank.set_balance(winner, e.max_balance)
await self.ctx.send(
_(
"Congratulations, {user}, you have received {num} {currency}"
" for coming first."
).format(
user=winner.display_name,
num=humanize_number(amount),
currency=await bank.get_currency_name(self.ctx.guild),
)
)
def _parse_answers(answers):
"""Parse the raw answers to readable strings.
The reason this exists is because of YAML's ambiguous syntax. For example,
if the answer to a question in YAML is ``yes``, YAML will load it as the
boolean value ``True``, which is not necessarily the desired answer. This
function aims to undo that for bools, and possibly for numbers in the
future too.
Parameters
----------
answers : `iterable` of `str`
The raw answers loaded from YAML.
Returns
-------
`tuple` of `str`
The answers in readable/ guessable strings.
"""
ret = []
for answer in answers:
if isinstance(answer, bool):
if answer is True:
ret.extend(["True", "Yes", "On"])
else:
ret.extend(["False", "No", "Off"])
else:
ret.append(str(answer))
# Uniquify list
seen = set()
return tuple(x for x in ret if not (x in seen or seen.add(x)))
|
import logging
import anthemav
import voluptuous as vol
from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity
from homeassistant.components.media_player.const import (
SUPPORT_SELECT_SOURCE,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
)
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PORT,
EVENT_HOMEASSISTANT_STOP,
STATE_OFF,
STATE_ON,
)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
_LOGGER = logging.getLogger(__name__)
DOMAIN = "anthemav"
DEFAULT_PORT = 14999
SUPPORT_ANTHEMAV = (
SUPPORT_VOLUME_SET
| SUPPORT_VOLUME_MUTE
| SUPPORT_TURN_ON
| SUPPORT_TURN_OFF
| SUPPORT_SELECT_SOURCE
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up our socket to the AVR."""
host = config[CONF_HOST]
port = config[CONF_PORT]
name = config.get(CONF_NAME)
device = None
_LOGGER.info("Provisioning Anthem AVR device at %s:%d", host, port)
@callback
def async_anthemav_update_callback(message):
"""Receive notification from transport that new data exists."""
_LOGGER.debug("Received update callback from AVR: %s", message)
async_dispatcher_send(hass, DOMAIN)
avr = await anthemav.Connection.create(
host=host, port=port, update_callback=async_anthemav_update_callback
)
device = AnthemAVR(avr, name)
_LOGGER.debug("dump_devicedata: %s", device.dump_avrdata)
_LOGGER.debug("dump_conndata: %s", avr.dump_conndata)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, device.avr.close)
async_add_entities([device])
class AnthemAVR(MediaPlayerEntity):
"""Entity reading values from Anthem AVR protocol."""
def __init__(self, avr, name):
"""Initialize entity with transport."""
super().__init__()
self.avr = avr
self._name = name
def _lookup(self, propname, dval=None):
return getattr(self.avr.protocol, propname, dval)
async def async_added_to_hass(self):
"""When entity is added to hass."""
self.async_on_remove(
async_dispatcher_connect(self.hass, DOMAIN, self.async_write_ha_state)
)
@property
def supported_features(self):
"""Flag media player features that are supported."""
return SUPPORT_ANTHEMAV
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def name(self):
"""Return name of device."""
return self._name or self._lookup("model")
@property
def state(self):
"""Return state of power on/off."""
pwrstate = self._lookup("power")
if pwrstate is True:
return STATE_ON
if pwrstate is False:
return STATE_OFF
return None
@property
def is_volume_muted(self):
"""Return boolean reflecting mute state on device."""
return self._lookup("mute", False)
@property
def volume_level(self):
"""Return volume level from 0 to 1."""
return self._lookup("volume_as_percentage", 0.0)
@property
def media_title(self):
"""Return current input name (closest we have to media title)."""
return self._lookup("input_name", "No Source")
@property
def app_name(self):
"""Return details about current video and audio stream."""
return (
f"{self._lookup('video_input_resolution_text', '')} "
f"{self._lookup('audio_input_name', '')}"
)
@property
def source(self):
"""Return currently selected input."""
return self._lookup("input_name", "Unknown")
@property
def source_list(self):
"""Return all active, configured inputs."""
return self._lookup("input_list", ["Unknown"])
async def async_select_source(self, source):
"""Change AVR to the designated source (by name)."""
self._update_avr("input_name", source)
async def async_turn_off(self):
"""Turn AVR power off."""
self._update_avr("power", False)
async def async_turn_on(self):
"""Turn AVR power on."""
self._update_avr("power", True)
async def async_set_volume_level(self, volume):
"""Set AVR volume (0 to 1)."""
self._update_avr("volume_as_percentage", volume)
async def async_mute_volume(self, mute):
"""Engage AVR mute."""
self._update_avr("mute", mute)
def _update_avr(self, propname, value):
"""Update a property in the AVR."""
_LOGGER.info("Sending command to AVR: set %s to %s", propname, str(value))
setattr(self.avr.protocol, propname, value)
@property
def dump_avrdata(self):
"""Return state of avr object for debugging forensics."""
attrs = vars(self)
items_string = ", ".join(f"{item}: {item}" for item in attrs.items())
return f"dump_avrdata: {items_string}"
|
from homeassistant.core import State
from tests.common import async_mock_service
async def test_reproducing_states(hass, caplog):
"""Test reproducing Automation states."""
hass.states.async_set("automation.entity_off", "off", {})
hass.states.async_set("automation.entity_on", "on", {})
turn_on_calls = async_mock_service(hass, "automation", "turn_on")
turn_off_calls = async_mock_service(hass, "automation", "turn_off")
# These calls should do nothing as entities already in desired state
await hass.helpers.state.async_reproduce_state(
[State("automation.entity_off", "off"), State("automation.entity_on", "on")]
)
assert len(turn_on_calls) == 0
assert len(turn_off_calls) == 0
# Test invalid state is handled
await hass.helpers.state.async_reproduce_state(
[State("automation.entity_off", "not_supported")]
)
assert "not_supported" in caplog.text
assert len(turn_on_calls) == 0
assert len(turn_off_calls) == 0
# Make sure correct services are called
await hass.helpers.state.async_reproduce_state(
[
State("automation.entity_on", "off"),
State("automation.entity_off", "on"),
# Should not raise
State("automation.non_existing", "on"),
]
)
assert len(turn_on_calls) == 1
assert turn_on_calls[0].domain == "automation"
assert turn_on_calls[0].data == {"entity_id": "automation.entity_off"}
assert len(turn_off_calls) == 1
assert turn_off_calls[0].domain == "automation"
assert turn_off_calls[0].data == {"entity_id": "automation.entity_on"}
|
import filecmp
import json
import shutil
import yaml
import vcr.migration
# Use the libYAML versions if possible
try:
from yaml import CLoader as Loader
except ImportError:
from yaml import Loader
def test_try_migrate_with_json(tmpdir):
cassette = tmpdir.join("cassette.json").strpath
shutil.copy("tests/fixtures/migration/old_cassette.json", cassette)
assert vcr.migration.try_migrate(cassette)
with open("tests/fixtures/migration/new_cassette.json", "r") as f:
expected_json = json.load(f)
with open(cassette, "r") as f:
actual_json = json.load(f)
assert actual_json == expected_json
def test_try_migrate_with_yaml(tmpdir):
cassette = tmpdir.join("cassette.yaml").strpath
shutil.copy("tests/fixtures/migration/old_cassette.yaml", cassette)
assert vcr.migration.try_migrate(cassette)
with open("tests/fixtures/migration/new_cassette.yaml", "r") as f:
expected_yaml = yaml.load(f, Loader=Loader)
with open(cassette, "r") as f:
actual_yaml = yaml.load(f, Loader=Loader)
assert actual_yaml == expected_yaml
def test_try_migrate_with_invalid_or_new_cassettes(tmpdir):
cassette = tmpdir.join("cassette").strpath
files = [
"tests/fixtures/migration/not_cassette.txt",
"tests/fixtures/migration/new_cassette.yaml",
"tests/fixtures/migration/new_cassette.json",
]
for file_path in files:
shutil.copy(file_path, cassette)
assert not vcr.migration.try_migrate(cassette)
assert filecmp.cmp(cassette, file_path) # shold not change file
|
import os
from . import exceptions
from paasta_tools.async_utils import async_ttl_cache
class File:
chunk_size = 1024
def __init__(self, host, task=None, path=None):
self.host = host
self.task = task
self.path = path
if self.task is None:
self._host_path = self.path
else:
self._host_path = None # Defer until later (_fetch) so we don't make HTTP requests in __init__.
self._offset = 0
# Used during fetch, class level so the dict isn't constantly alloc'd
self._params = {
"path": self._host_path,
"offset": -1,
"length": self.chunk_size,
}
def __eq__(self, y):
return self.key() == y.key()
def __hash__(self):
return hash(self.__str__())
def __repr__(self):
return f"<open file '{self.path}', for '{self._where}'>"
def __str__(self):
return f"{self._where}:{self.path}"
def key(self):
return "{}:{}".format(self.host.key(), self._host_path)
@property
def _where(self):
return self.task["id"] if self.task is not None else self.host.key()
async def _fetch(self):
# fill in path if it wasn't set in __init__
if self._params["path"] is None:
self._params["path"] = os.path.join(await self.task.directory(), self.path)
resp = await self.host.fetch("/files/read.json", params=self._params)
if resp.status == 404:
raise exceptions.FileDoesNotExist("No such file or directory.")
return await resp.json()
async def exists(self):
try:
await self.size()
return True
except exceptions.FileDoesNotExist:
return False
except exceptions.SlaveDoesNotExist:
return False
# When reading a file, it is common to first check whether it exists, then
# look at the size to determine where to seek. Instead of requiring
# multiple requests to the slave, the size is cached for a very short
# period of time.
@async_ttl_cache(ttl=0.5, cleanup_self=True)
async def size(self):
return (await self._fetch())["offset"]
async def seek(self, offset, whence=os.SEEK_SET):
if whence == os.SEEK_SET:
self._offset = 0 + offset
elif whence == os.SEEK_CUR:
self._offset += offset
elif whence == os.SEEK_END:
self._offset = await self.size() + offset
def tell(self):
return self._offset
def _length(self, start, size):
if size and self.tell() - start + self.chunk_size > size:
return size - (self.tell() - start)
return self.chunk_size
async def _get_chunk(self, loc, size=None):
if size is None:
size = self.chunk_size
await self.seek(loc, os.SEEK_SET)
self._params["offset"] = loc
self._params["length"] = size
data = (await self._fetch())["data"]
await self.seek(len(data), os.SEEK_CUR)
return data
async def _read(self, size=None):
start = self.tell()
def pre(x):
return x == ""
def post(x):
return size and (self.tell() - start) >= size
blob = None
while blob != "" and not (size and (self.tell() - start) >= size):
blob = await self._get_chunk(self.tell(), size=self._length(start, size))
yield blob
async def _read_reverse(self, size=None):
fsize = await self.size()
if not size:
size = fsize
def next_block():
current = fsize
while (current - self.chunk_size) > (fsize - size):
current -= self.chunk_size
yield current
for pos in next_block():
yield await self._get_chunk(pos)
yield await self._get_chunk(fsize - size, size % self.chunk_size)
async def _readlines(self, size=None):
last = ""
async for blob in self._read(size):
# This is not streaming and assumes small chunk sizes
blob_lines = (last + blob).split("\n")
for line in blob_lines[: len(blob_lines) - 1]:
yield line
last = blob_lines[-1]
async def _readlines_reverse(self, size=None):
buf = ""
async for blob in self._read_reverse(size):
blob_lines = (blob + buf).split("\n")
for line in reversed(blob_lines[1:]):
yield line
buf = blob_lines[0]
yield buf
|
import arrow
from sqlalchemy import or_, cast, Integer
from lemur import database
from lemur.authorities.models import Authority
from lemur.authorities import service as authorities_service
from lemur.certificates import service as certificate_service
from lemur.certificates.schemas import CertificateUploadInputSchema
from lemur.common.utils import truthiness, parse_cert_chain, parse_certificate
from lemur.common import validators
from lemur.destinations.models import Destination
from lemur.domains.models import Domain
from lemur.notifications.models import Notification
from lemur.pending_certificates.models import PendingCertificate
from lemur.plugins.base import plugins
from lemur.roles.models import Role
from lemur.users import service as user_service
def get(pending_cert_id):
"""
Retrieve pending certificate by ID
"""
return database.get(PendingCertificate, pending_cert_id)
def get_by_external_id(issuer, external_id):
"""
Retrieves a pending certificate by its issuer and external_id
Since external_id is not necessarily unique between CAs
:param issuer:
:param external_id:
:return: PendingCertificate or None
"""
if isinstance(external_id, int):
external_id = str(external_id)
return (
PendingCertificate.query.filter(PendingCertificate.authority_id == issuer.id)
.filter(PendingCertificate.external_id == external_id)
.one_or_none()
)
def get_by_name(pending_cert_name):
"""
Retrieve pending certificate by name
"""
return database.get(PendingCertificate, pending_cert_name, field="name")
def delete(pending_certificate):
database.delete(pending_certificate)
def delete_by_id(id):
database.delete(get(id))
def get_unresolved_pending_certs():
"""
Retrieve a list of unresolved pending certs given a list of ids
Filters out non-existing pending certs
"""
query = database.session_query(PendingCertificate).filter(
PendingCertificate.resolved.is_(False)
)
return database.find_all(query, PendingCertificate, {}).all()
def get_pending_certs(pending_ids):
"""
Retrieve a list of pending certs given a list of ids
Filters out non-existing pending certs
"""
pending_certs = []
if "all" in pending_ids:
query = database.session_query(PendingCertificate)
return database.find_all(query, PendingCertificate, {}).all()
else:
for pending_id in pending_ids:
pending_cert = get(pending_id)
if pending_cert:
pending_certs.append(pending_cert)
return pending_certs
def create_certificate(pending_certificate, certificate, user):
"""
Create and store a certificate with pending certificate's info
Args:
pending_certificate: PendingCertificate which will populate the certificate
certificate: dict from Authority, which contains the body, chain and external id
user: User that called this function, used as 'creator' of the certificate if it does
not have an owner
"""
certificate["owner"] = pending_certificate.owner
data, errors = CertificateUploadInputSchema().load(certificate)
if errors:
raise Exception(
"Unable to create certificate: {reasons}".format(reasons=errors)
)
data.update(vars(pending_certificate))
# Copy relationships, vars doesn't copy this without explicit fields
data["notifications"] = list(pending_certificate.notifications)
data["destinations"] = list(pending_certificate.destinations)
data["sources"] = list(pending_certificate.sources)
data["roles"] = list(pending_certificate.roles)
data["replaces"] = list(pending_certificate.replaces)
data["rotation_policy"] = pending_certificate.rotation_policy
# Replace external id and chain with the one fetched from source
data["external_id"] = certificate["external_id"]
data["chain"] = certificate["chain"]
creator = user_service.get_by_email(pending_certificate.owner)
if not creator:
# Owner of the pending certificate is not the creator, so use the current user who called
# this as the creator (usually lemur)
creator = user
if pending_certificate.rename:
# If generating name from certificate, remove the one from pending certificate
del data["name"]
data["creator"] = creator
cert = certificate_service.import_certificate(**data)
database.update(cert)
return cert
def increment_attempt(pending_certificate):
"""
Increments pending certificate attempt counter and updates it in the database.
"""
pending_certificate.number_attempts += 1
database.update(pending_certificate)
return pending_certificate.number_attempts
def update(pending_cert_id, **kwargs):
"""
Updates a pending certificate. The allowed fields are validated by
PendingCertificateEditInputSchema.
"""
pending_cert = get(pending_cert_id)
for key, value in kwargs.items():
setattr(pending_cert, key, value)
return database.update(pending_cert)
def cancel(pending_certificate, **kwargs):
"""
Cancel a pending certificate. A check should be done prior to this function to decide to
revoke the certificate or just abort cancelling.
Args:
pending_certificate: PendingCertificate to be cancelled
Returns: the pending certificate if successful, raises Exception if there was an issue
"""
plugin = plugins.get(pending_certificate.authority.plugin_name)
plugin.cancel_ordered_certificate(pending_certificate, **kwargs)
pending_certificate.status = "Cancelled"
database.update(pending_certificate)
return pending_certificate
def render(args):
query = database.session_query(PendingCertificate)
time_range = args.pop("time_range")
destination_id = args.pop("destination_id")
notification_id = args.pop("notification_id", None)
show = args.pop("show")
# owner = args.pop('owner')
# creator = args.pop('creator') # TODO we should enabling filtering by owner
filt = args.pop("filter")
if filt:
terms = filt.split(";")
if "issuer" in terms:
# we can't rely on issuer being correct in the cert directly so we combine queries
sub_query = (
database.session_query(Authority.id)
.filter(Authority.name.ilike("%{0}%".format(terms[1])))
.subquery()
)
query = query.filter(
or_(
PendingCertificate.issuer.ilike("%{0}%".format(terms[1])),
PendingCertificate.authority_id.in_(sub_query),
)
)
elif "destination" in terms:
query = query.filter(
PendingCertificate.destinations.any(Destination.id == terms[1])
)
elif "notify" in filt:
query = query.filter(PendingCertificate.notify == truthiness(terms[1]))
elif "active" in filt:
query = query.filter(PendingCertificate.active == truthiness(terms[1]))
elif "cn" in terms:
query = query.filter(
or_(
PendingCertificate.cn.ilike("%{0}%".format(terms[1])),
PendingCertificate.domains.any(
Domain.name.ilike("%{0}%".format(terms[1]))
),
)
)
elif "id" in terms:
query = query.filter(PendingCertificate.id == cast(terms[1], Integer))
else:
query = database.filter(query, PendingCertificate, terms)
if show:
sub_query = (
database.session_query(Role.name)
.filter(Role.user_id == args["user"].id)
.subquery()
)
query = query.filter(
or_(
PendingCertificate.user_id == args["user"].id,
PendingCertificate.owner.in_(sub_query),
)
)
if destination_id:
query = query.filter(
PendingCertificate.destinations.any(Destination.id == destination_id)
)
if notification_id:
query = query.filter(
PendingCertificate.notifications.any(Notification.id == notification_id)
)
if time_range:
to = arrow.now().shift(weeks=+time_range).format("YYYY-MM-DD")
now = arrow.now().format("YYYY-MM-DD")
query = query.filter(PendingCertificate.not_after <= to).filter(
PendingCertificate.not_after >= now
)
# Only show unresolved certificates in the UI
query = query.filter(PendingCertificate.resolved.is_(False))
return database.sort_and_page(query, PendingCertificate, args)
def upload(pending_certificate_id, **kwargs):
"""
Uploads a (signed) pending certificate. The allowed fields are validated by
PendingCertificateUploadInputSchema. The certificate is also validated to be
signed by the correct authoritity.
"""
pending_cert = get(pending_certificate_id)
partial_cert = kwargs
uploaded_chain = partial_cert["chain"]
authority = authorities_service.get(pending_cert.authority.id)
# Construct the chain for cert validation
if uploaded_chain:
chain = uploaded_chain + "\n" + authority.authority_certificate.body
else:
chain = authority.authority_certificate.body
parsed_chain = parse_cert_chain(chain)
# Check that the certificate is actually signed by the CA to avoid incorrect cert pasting
validators.verify_cert_chain(
[parse_certificate(partial_cert["body"])] + parsed_chain
)
final_cert = create_certificate(pending_cert, partial_cert, pending_cert.user)
pending_cert_final_result = update(pending_cert.id, resolved_cert_id=final_cert.id)
update(pending_cert.id, resolved=True)
return pending_cert_final_result
|
from homeassistant.components.vacuum import (
ATTR_CLEANED_AREA,
STATE_CLEANING,
STATE_DOCKED,
STATE_IDLE,
STATE_PAUSED,
STATE_RETURNING,
SUPPORT_BATTERY,
SUPPORT_CLEAN_SPOT,
SUPPORT_FAN_SPEED,
SUPPORT_LOCATE,
SUPPORT_PAUSE,
SUPPORT_RETURN_HOME,
SUPPORT_SEND_COMMAND,
SUPPORT_START,
SUPPORT_STATE,
SUPPORT_STATUS,
SUPPORT_STOP,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
StateVacuumEntity,
VacuumEntity,
)
SUPPORT_MINIMAL_SERVICES = SUPPORT_TURN_ON | SUPPORT_TURN_OFF
SUPPORT_BASIC_SERVICES = (
SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_STATUS | SUPPORT_BATTERY
)
SUPPORT_MOST_SERVICES = (
SUPPORT_TURN_ON
| SUPPORT_TURN_OFF
| SUPPORT_STOP
| SUPPORT_RETURN_HOME
| SUPPORT_STATUS
| SUPPORT_BATTERY
)
SUPPORT_ALL_SERVICES = (
SUPPORT_TURN_ON
| SUPPORT_TURN_OFF
| SUPPORT_PAUSE
| SUPPORT_STOP
| SUPPORT_RETURN_HOME
| SUPPORT_FAN_SPEED
| SUPPORT_SEND_COMMAND
| SUPPORT_LOCATE
| SUPPORT_STATUS
| SUPPORT_BATTERY
| SUPPORT_CLEAN_SPOT
)
SUPPORT_STATE_SERVICES = (
SUPPORT_STATE
| SUPPORT_PAUSE
| SUPPORT_STOP
| SUPPORT_RETURN_HOME
| SUPPORT_FAN_SPEED
| SUPPORT_BATTERY
| SUPPORT_CLEAN_SPOT
| SUPPORT_START
)
FAN_SPEEDS = ["min", "medium", "high", "max"]
DEMO_VACUUM_COMPLETE = "0_Ground_floor"
DEMO_VACUUM_MOST = "1_First_floor"
DEMO_VACUUM_BASIC = "2_Second_floor"
DEMO_VACUUM_MINIMAL = "3_Third_floor"
DEMO_VACUUM_NONE = "4_Fourth_floor"
DEMO_VACUUM_STATE = "5_Fifth_floor"
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Demo config entry."""
await async_setup_platform(hass, {}, async_add_entities)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Demo vacuums."""
async_add_entities(
[
DemoVacuum(DEMO_VACUUM_COMPLETE, SUPPORT_ALL_SERVICES),
DemoVacuum(DEMO_VACUUM_MOST, SUPPORT_MOST_SERVICES),
DemoVacuum(DEMO_VACUUM_BASIC, SUPPORT_BASIC_SERVICES),
DemoVacuum(DEMO_VACUUM_MINIMAL, SUPPORT_MINIMAL_SERVICES),
DemoVacuum(DEMO_VACUUM_NONE, 0),
StateDemoVacuum(DEMO_VACUUM_STATE),
]
)
class DemoVacuum(VacuumEntity):
"""Representation of a demo vacuum."""
def __init__(self, name, supported_features):
"""Initialize the vacuum."""
self._name = name
self._supported_features = supported_features
self._state = False
self._status = "Charging"
self._fan_speed = FAN_SPEEDS[1]
self._cleaned_area = 0
self._battery_level = 100
@property
def name(self):
"""Return the name of the vacuum."""
return self._name
@property
def should_poll(self):
"""No polling needed for a demo vacuum."""
return False
@property
def is_on(self):
"""Return true if vacuum is on."""
return self._state
@property
def status(self):
"""Return the status of the vacuum."""
return self._status
@property
def fan_speed(self):
"""Return the status of the vacuum."""
return self._fan_speed
@property
def fan_speed_list(self):
"""Return the status of the vacuum."""
return FAN_SPEEDS
@property
def battery_level(self):
"""Return the status of the vacuum."""
return max(0, min(100, self._battery_level))
@property
def device_state_attributes(self):
"""Return device state attributes."""
return {ATTR_CLEANED_AREA: round(self._cleaned_area, 2)}
@property
def supported_features(self):
"""Flag supported features."""
return self._supported_features
def turn_on(self, **kwargs):
"""Turn the vacuum on."""
if self.supported_features & SUPPORT_TURN_ON == 0:
return
self._state = True
self._cleaned_area += 5.32
self._battery_level -= 2
self._status = "Cleaning"
self.schedule_update_ha_state()
def turn_off(self, **kwargs):
"""Turn the vacuum off."""
if self.supported_features & SUPPORT_TURN_OFF == 0:
return
self._state = False
self._status = "Charging"
self.schedule_update_ha_state()
def stop(self, **kwargs):
"""Stop the vacuum."""
if self.supported_features & SUPPORT_STOP == 0:
return
self._state = False
self._status = "Stopping the current task"
self.schedule_update_ha_state()
def clean_spot(self, **kwargs):
"""Perform a spot clean-up."""
if self.supported_features & SUPPORT_CLEAN_SPOT == 0:
return
self._state = True
self._cleaned_area += 1.32
self._battery_level -= 1
self._status = "Cleaning spot"
self.schedule_update_ha_state()
def locate(self, **kwargs):
"""Locate the vacuum (usually by playing a song)."""
if self.supported_features & SUPPORT_LOCATE == 0:
return
self._status = "Hi, I'm over here!"
self.schedule_update_ha_state()
def start_pause(self, **kwargs):
"""Start, pause or resume the cleaning task."""
if self.supported_features & SUPPORT_PAUSE == 0:
return
self._state = not self._state
if self._state:
self._status = "Resuming the current task"
self._cleaned_area += 1.32
self._battery_level -= 1
else:
self._status = "Pausing the current task"
self.schedule_update_ha_state()
def set_fan_speed(self, fan_speed, **kwargs):
"""Set the vacuum's fan speed."""
if self.supported_features & SUPPORT_FAN_SPEED == 0:
return
if fan_speed in self.fan_speed_list:
self._fan_speed = fan_speed
self.schedule_update_ha_state()
def return_to_base(self, **kwargs):
"""Tell the vacuum to return to its dock."""
if self.supported_features & SUPPORT_RETURN_HOME == 0:
return
self._state = False
self._status = "Returning home..."
self._battery_level += 5
self.schedule_update_ha_state()
def send_command(self, command, params=None, **kwargs):
"""Send a command to the vacuum."""
if self.supported_features & SUPPORT_SEND_COMMAND == 0:
return
self._status = f"Executing {command}({params})"
self._state = True
self.schedule_update_ha_state()
class StateDemoVacuum(StateVacuumEntity):
"""Representation of a demo vacuum supporting states."""
def __init__(self, name):
"""Initialize the vacuum."""
self._name = name
self._supported_features = SUPPORT_STATE_SERVICES
self._state = STATE_DOCKED
self._fan_speed = FAN_SPEEDS[1]
self._cleaned_area = 0
self._battery_level = 100
@property
def name(self):
"""Return the name of the vacuum."""
return self._name
@property
def should_poll(self):
"""No polling needed for a demo vacuum."""
return False
@property
def supported_features(self):
"""Flag supported features."""
return self._supported_features
@property
def state(self):
"""Return the current state of the vacuum."""
return self._state
@property
def battery_level(self):
"""Return the current battery level of the vacuum."""
return max(0, min(100, self._battery_level))
@property
def fan_speed(self):
"""Return the current fan speed of the vacuum."""
return self._fan_speed
@property
def fan_speed_list(self):
"""Return the list of supported fan speeds."""
return FAN_SPEEDS
@property
def device_state_attributes(self):
"""Return device state attributes."""
return {ATTR_CLEANED_AREA: round(self._cleaned_area, 2)}
def start(self):
"""Start or resume the cleaning task."""
if self.supported_features & SUPPORT_START == 0:
return
if self._state != STATE_CLEANING:
self._state = STATE_CLEANING
self._cleaned_area += 1.32
self._battery_level -= 1
self.schedule_update_ha_state()
def pause(self):
"""Pause the cleaning task."""
if self.supported_features & SUPPORT_PAUSE == 0:
return
if self._state == STATE_CLEANING:
self._state = STATE_PAUSED
self.schedule_update_ha_state()
def stop(self, **kwargs):
"""Stop the cleaning task, do not return to dock."""
if self.supported_features & SUPPORT_STOP == 0:
return
self._state = STATE_IDLE
self.schedule_update_ha_state()
def return_to_base(self, **kwargs):
"""Return dock to charging base."""
if self.supported_features & SUPPORT_RETURN_HOME == 0:
return
self._state = STATE_RETURNING
self.schedule_update_ha_state()
self.hass.loop.call_later(30, self.__set_state_to_dock)
def clean_spot(self, **kwargs):
"""Perform a spot clean-up."""
if self.supported_features & SUPPORT_CLEAN_SPOT == 0:
return
self._state = STATE_CLEANING
self._cleaned_area += 1.32
self._battery_level -= 1
self.schedule_update_ha_state()
def set_fan_speed(self, fan_speed, **kwargs):
"""Set the vacuum's fan speed."""
if self.supported_features & SUPPORT_FAN_SPEED == 0:
return
if fan_speed in self.fan_speed_list:
self._fan_speed = fan_speed
self.schedule_update_ha_state()
def __set_state_to_dock(self):
self._state = STATE_DOCKED
self.schedule_update_ha_state()
|
from __future__ import absolute_import
import unittest
from lark import Lark
from lark.lexer import Token
from lark.tree import Tree
from lark.visitors import Visitor, Transformer, Discard
from lark.parsers.earley_forest import TreeForestTransformer, handles_ambiguity
class TestTreeForestTransformer(unittest.TestCase):
grammar = """
start: ab bc cd
!ab: "A" "B"?
!bc: "B"? "C"?
!cd: "C"? "D"
"""
parser = Lark(grammar, parser='earley', ambiguity='forest')
forest = parser.parse("ABCD")
def test_identity_resolve_ambiguity(self):
l = Lark(self.grammar, parser='earley', ambiguity='resolve')
tree1 = l.parse("ABCD")
tree2 = TreeForestTransformer(resolve_ambiguity=True).transform(self.forest)
self.assertEqual(tree1, tree2)
def test_identity_explicit_ambiguity(self):
l = Lark(self.grammar, parser='earley', ambiguity='explicit')
tree1 = l.parse("ABCD")
tree2 = TreeForestTransformer(resolve_ambiguity=False).transform(self.forest)
self.assertEqual(tree1, tree2)
def test_tree_class(self):
class CustomTree(Tree):
pass
class TreeChecker(Visitor):
def __default__(self, tree):
assert isinstance(tree, CustomTree)
tree = TreeForestTransformer(resolve_ambiguity=False, tree_class=CustomTree).transform(self.forest)
TreeChecker().visit(tree)
def test_token_calls(self):
visited = [False] * 4
class CustomTransformer(TreeForestTransformer):
def A(self, node):
assert node.type == 'A'
visited[0] = True
def B(self, node):
assert node.type == 'B'
visited[1] = True
def C(self, node):
assert node.type == 'C'
visited[2] = True
def D(self, node):
assert node.type == 'D'
visited[3] = True
tree = CustomTransformer(resolve_ambiguity=False).transform(self.forest)
assert visited == [True] * 4
def test_default_token(self):
token_count = [0]
class CustomTransformer(TreeForestTransformer):
def __default_token__(self, node):
token_count[0] += 1
assert isinstance(node, Token)
tree = CustomTransformer(resolve_ambiguity=True).transform(self.forest)
self.assertEqual(token_count[0], 4)
def test_rule_calls(self):
visited_start = [False]
visited_ab = [False]
visited_bc = [False]
visited_cd = [False]
class CustomTransformer(TreeForestTransformer):
def start(self, data):
visited_start[0] = True
def ab(self, data):
visited_ab[0] = True
def bc(self, data):
visited_bc[0] = True
def cd(self, data):
visited_cd[0] = True
tree = CustomTransformer(resolve_ambiguity=False).transform(self.forest)
self.assertTrue(visited_start[0])
self.assertTrue(visited_ab[0])
self.assertTrue(visited_bc[0])
self.assertTrue(visited_cd[0])
def test_default_rule(self):
rule_count = [0]
class CustomTransformer(TreeForestTransformer):
def __default__(self, name, data):
rule_count[0] += 1
tree = CustomTransformer(resolve_ambiguity=True).transform(self.forest)
self.assertEqual(rule_count[0], 4)
def test_default_ambig(self):
ambig_count = [0]
class CustomTransformer(TreeForestTransformer):
def __default_ambig__(self, name, data):
if len(data) > 1:
ambig_count[0] += 1
tree = CustomTransformer(resolve_ambiguity=False).transform(self.forest)
self.assertEqual(ambig_count[0], 1)
def test_handles_ambiguity(self):
class CustomTransformer(TreeForestTransformer):
@handles_ambiguity
def start(self, data):
assert isinstance(data, list)
assert len(data) == 4
for tree in data:
assert tree.data == 'start'
return 'handled'
@handles_ambiguity
def ab(self, data):
assert isinstance(data, list)
assert len(data) == 1
assert data[0].data == 'ab'
tree = CustomTransformer(resolve_ambiguity=False).transform(self.forest)
self.assertEqual(tree, 'handled')
def test_discard(self):
class CustomTransformer(TreeForestTransformer):
def bc(self, data):
raise Discard()
def D(self, node):
raise Discard()
class TreeChecker(Transformer):
def bc(self, children):
assert False
def D(self, token):
assert False
tree = CustomTransformer(resolve_ambiguity=False).transform(self.forest)
TreeChecker(visit_tokens=True).transform(tree)
def test_aliases(self):
visited_ambiguous = [False]
visited_full = [False]
class CustomTransformer(TreeForestTransformer):
@handles_ambiguity
def start(self, data):
for tree in data:
assert tree.data == 'ambiguous' or tree.data == 'full'
def ambiguous(self, data):
visited_ambiguous[0] = True
assert len(data) == 3
assert data[0].data == 'ab'
assert data[1].data == 'bc'
assert data[2].data == 'cd'
return self.tree_class('ambiguous', data)
def full(self, data):
visited_full[0] = True
assert len(data) == 1
assert data[0].data == 'abcd'
return self.tree_class('full', data)
grammar = """
start: ab bc cd -> ambiguous
| abcd -> full
!ab: "A" "B"?
!bc: "B"? "C"?
!cd: "C"? "D"
!abcd: "ABCD"
"""
l = Lark(grammar, parser='earley', ambiguity='forest')
forest = l.parse('ABCD')
tree = CustomTransformer(resolve_ambiguity=False).transform(forest)
self.assertTrue(visited_ambiguous[0])
self.assertTrue(visited_full[0])
def test_transformation(self):
class CustomTransformer(TreeForestTransformer):
def __default__(self, name, data):
result = []
for item in data:
if isinstance(item, list):
result += item
else:
result.append(item)
return result
def __default_token__(self, node):
return node.lower()
def __default_ambig__(self, name, data):
return data[0]
result = CustomTransformer(resolve_ambiguity=False).transform(self.forest)
expected = ['a', 'b', 'c', 'd']
self.assertEqual(result, expected)
if __name__ == '__main__':
unittest.main()
|
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_PROBLEM,
BinarySensorEntity,
)
from homeassistant.const import CONF_EMAIL
from . import PoolSenseEntity
from .const import DOMAIN
BINARY_SENSORS = {
"pH Status": {
"unit": None,
"icon": None,
"name": "pH Status",
"device_class": DEVICE_CLASS_PROBLEM,
},
"Chlorine Status": {
"unit": None,
"icon": None,
"name": "Chlorine Status",
"device_class": DEVICE_CLASS_PROBLEM,
},
}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Defer sensor setup to the shared sensor module."""
coordinator = hass.data[DOMAIN][config_entry.entry_id]
binary_sensors_list = []
for binary_sensor in BINARY_SENSORS:
binary_sensors_list.append(
PoolSenseBinarySensor(
coordinator, config_entry.data[CONF_EMAIL], binary_sensor
)
)
async_add_entities(binary_sensors_list, False)
class PoolSenseBinarySensor(PoolSenseEntity, BinarySensorEntity):
"""Representation of PoolSense binary sensors."""
@property
def is_on(self):
"""Return true if the binary sensor is on."""
return self.coordinator.data[self.info_type] == "red"
@property
def icon(self):
"""Return the icon."""
return BINARY_SENSORS[self.info_type]["icon"]
@property
def device_class(self):
"""Return the class of this device."""
return BINARY_SENSORS[self.info_type]["device_class"]
@property
def name(self):
"""Return the name of the binary sensor."""
return f"PoolSense {BINARY_SENSORS[self.info_type]['name']}"
|
import logging
from asterisk_mbox import Client as asteriskClient
from asterisk_mbox.commands import (
CMD_MESSAGE_CDR,
CMD_MESSAGE_CDR_AVAILABLE,
CMD_MESSAGE_LIST,
)
import voluptuous as vol
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT
from homeassistant.core import callback
from homeassistant.helpers import discovery
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_send, dispatcher_connect
_LOGGER = logging.getLogger(__name__)
DOMAIN = "asterisk_mbox"
SIGNAL_DISCOVER_PLATFORM = "asterisk_mbox.discover_platform"
SIGNAL_MESSAGE_REQUEST = "asterisk_mbox.message_request"
SIGNAL_MESSAGE_UPDATE = "asterisk_mbox.message_updated"
SIGNAL_CDR_UPDATE = "asterisk_mbox.message_updated"
SIGNAL_CDR_REQUEST = "asterisk_mbox.message_request"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_PORT): cv.port,
}
)
},
extra=vol.ALLOW_EXTRA,
)
def setup(hass, config):
"""Set up for the Asterisk Voicemail box."""
conf = config.get(DOMAIN)
host = conf[CONF_HOST]
port = conf[CONF_PORT]
password = conf[CONF_PASSWORD]
hass.data[DOMAIN] = AsteriskData(hass, host, port, password, config)
return True
class AsteriskData:
"""Store Asterisk mailbox data."""
def __init__(self, hass, host, port, password, config):
"""Init the Asterisk data object."""
self.hass = hass
self.config = config
self.messages = None
self.cdr = None
dispatcher_connect(self.hass, SIGNAL_MESSAGE_REQUEST, self._request_messages)
dispatcher_connect(self.hass, SIGNAL_CDR_REQUEST, self._request_cdr)
dispatcher_connect(self.hass, SIGNAL_DISCOVER_PLATFORM, self._discover_platform)
# Only connect after signal connection to ensure we don't miss any
self.client = asteriskClient(host, port, password, self.handle_data)
@callback
def _discover_platform(self, component):
_LOGGER.debug("Adding mailbox %s", component)
self.hass.async_create_task(
discovery.async_load_platform(
self.hass, "mailbox", component, {}, self.config
)
)
@callback
def handle_data(self, command, msg):
"""Handle changes to the mailbox."""
if command == CMD_MESSAGE_LIST:
_LOGGER.debug("AsteriskVM sent updated message list: Len %d", len(msg))
old_messages = self.messages
self.messages = sorted(
msg, key=lambda item: item["info"]["origtime"], reverse=True
)
if not isinstance(old_messages, list):
async_dispatcher_send(self.hass, SIGNAL_DISCOVER_PLATFORM, DOMAIN)
async_dispatcher_send(self.hass, SIGNAL_MESSAGE_UPDATE, self.messages)
elif command == CMD_MESSAGE_CDR:
_LOGGER.debug(
"AsteriskVM sent updated CDR list: Len %d", len(msg.get("entries", []))
)
self.cdr = msg["entries"]
async_dispatcher_send(self.hass, SIGNAL_CDR_UPDATE, self.cdr)
elif command == CMD_MESSAGE_CDR_AVAILABLE:
if not isinstance(self.cdr, list):
_LOGGER.debug("AsteriskVM adding CDR platform")
self.cdr = []
async_dispatcher_send(
self.hass, SIGNAL_DISCOVER_PLATFORM, "asterisk_cdr"
)
async_dispatcher_send(self.hass, SIGNAL_CDR_REQUEST)
else:
_LOGGER.debug(
"AsteriskVM sent unknown message '%d' len: %d", command, len(msg)
)
@callback
def _request_messages(self):
"""Handle changes to the mailbox."""
_LOGGER.debug("Requesting message list")
self.client.messages()
@callback
def _request_cdr(self):
"""Handle changes to the CDR."""
_LOGGER.debug("Requesting CDR list")
self.client.get_cdr()
|
import datetime
import json
import logging
import os
from typing import Any, Dict
from google.cloud import pubsub_v1
import voluptuous as vol
from homeassistant.const import EVENT_STATE_CHANGED, STATE_UNAVAILABLE, STATE_UNKNOWN
from homeassistant.core import Event, HomeAssistant
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entityfilter import FILTER_SCHEMA
_LOGGER = logging.getLogger(__name__)
DOMAIN = "google_pubsub"
CONF_PROJECT_ID = "project_id"
CONF_TOPIC_NAME = "topic_name"
CONF_SERVICE_PRINCIPAL = "credentials_json"
CONF_FILTER = "filter"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_PROJECT_ID): cv.string,
vol.Required(CONF_TOPIC_NAME): cv.string,
vol.Required(CONF_SERVICE_PRINCIPAL): cv.string,
vol.Required(CONF_FILTER): FILTER_SCHEMA,
}
)
},
extra=vol.ALLOW_EXTRA,
)
def setup(hass: HomeAssistant, yaml_config: Dict[str, Any]):
"""Activate Google Pub/Sub component."""
config = yaml_config[DOMAIN]
project_id = config[CONF_PROJECT_ID]
topic_name = config[CONF_TOPIC_NAME]
service_principal_path = os.path.join(
hass.config.config_dir, config[CONF_SERVICE_PRINCIPAL]
)
if not os.path.isfile(service_principal_path):
_LOGGER.error("Path to credentials file cannot be found")
return False
entities_filter = config[CONF_FILTER]
publisher = pubsub_v1.PublisherClient.from_service_account_json(
service_principal_path
)
topic_path = publisher.topic_path( # pylint: disable=no-member
project_id, topic_name
)
encoder = DateTimeJSONEncoder()
def send_to_pubsub(event: Event):
"""Send states to Pub/Sub."""
state = event.data.get("new_state")
if (
state is None
or state.state in (STATE_UNKNOWN, "", STATE_UNAVAILABLE)
or not entities_filter(state.entity_id)
):
return
as_dict = state.as_dict()
data = json.dumps(obj=as_dict, default=encoder.encode).encode("utf-8")
publisher.publish(topic_path, data=data)
hass.bus.listen(EVENT_STATE_CHANGED, send_to_pubsub)
return True
class DateTimeJSONEncoder(json.JSONEncoder):
"""Encode python objects.
Additionally add encoding for datetime objects as isoformat.
"""
def default(self, o):
"""Implement encoding logic."""
if isinstance(o, datetime.datetime):
return o.isoformat()
return super().default(o)
|
import pytest
from vcr.persisters.filesystem import FilesystemPersister
from vcr.serializers import jsonserializer, yamlserializer
@pytest.mark.parametrize(
"cassette_path, serializer",
[
("tests/fixtures/migration/old_cassette.json", jsonserializer),
("tests/fixtures/migration/old_cassette.yaml", yamlserializer),
],
)
def test_load_cassette_with_old_cassettes(cassette_path, serializer):
with pytest.raises(ValueError) as excinfo:
FilesystemPersister.load_cassette(cassette_path, serializer)
assert "run the migration script" in excinfo.exconly()
@pytest.mark.parametrize(
"cassette_path, serializer",
[
("tests/fixtures/migration/not_cassette.txt", jsonserializer),
("tests/fixtures/migration/not_cassette.txt", yamlserializer),
],
)
def test_load_cassette_with_invalid_cassettes(cassette_path, serializer):
with pytest.raises(Exception) as excinfo:
FilesystemPersister.load_cassette(cassette_path, serializer)
assert "run the migration script" not in excinfo.exconly()
|
from homeassistant.components.binary_sensor import BinarySensorEntity
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
from . import DOMAIN, GeniusDevice
GH_STATE_ATTR = "outputOnOff"
async def async_setup_platform(
hass: HomeAssistantType, config: ConfigType, async_add_entities, discovery_info=None
) -> None:
"""Set up the Genius Hub sensor entities."""
if discovery_info is None:
return
broker = hass.data[DOMAIN]["broker"]
switches = [
GeniusBinarySensor(broker, d, GH_STATE_ATTR)
for d in broker.client.device_objs
if GH_STATE_ATTR in d.data["state"]
]
async_add_entities(switches, update_before_add=True)
class GeniusBinarySensor(GeniusDevice, BinarySensorEntity):
"""Representation of a Genius Hub binary_sensor."""
def __init__(self, broker, device, state_attr) -> None:
"""Initialize the binary sensor."""
super().__init__(broker, device)
self._state_attr = state_attr
if device.type[:21] == "Dual Channel Receiver":
self._name = f"{device.type[:21]} {device.id}"
else:
self._name = f"{device.type} {device.id}"
@property
def is_on(self) -> bool:
"""Return the status of the sensor."""
return self._device.data["state"][self._state_attr]
|
import copy
import numpy as np
from chainer import reporter
import chainer.training.extensions
from chainercv.evaluations import eval_instance_segmentation_voc
from chainercv.utils import apply_to_iterator
class InstanceSegmentationVOCEvaluator(chainer.training.extensions.Evaluator):
"""An evaluation extension of instance-segmentation by PASCAL VOC metric.
This extension iterates over an iterator and evaluates the prediction
results by average precisions (APs) and mean of them
(mean Average Precision, mAP).
This extension reports the following values with keys.
Please note that :obj:`'ap/<label_names[l]>'` is reported only if
:obj:`label_names` is specified.
* :obj:`'map'`: Mean of average precisions (mAP).
* :obj:`'ap/<label_names[l]>'`: Average precision for class \
:obj:`label_names[l]`, where :math:`l` is the index of the class. \
For example, this evaluator reports :obj:`'ap/aeroplane'`, \
:obj:`'ap/bicycle'`, etc. if :obj:`label_names` is \
:obj:`~chainercv.datasets.sbd_instance_segmentation_label_names`. \
If there is no bounding box assigned to class :obj:`label_names[l]` \
in either ground truth or prediction, it reports :obj:`numpy.nan` as \
its average precision. \
In this case, mAP is computed without this class.
Args:
iterator (chainer.Iterator): An iterator. Each sample should be
following tuple :obj:`img, bbox, label` or
:obj:`img, bbox, label, difficult`.
:obj:`img` is an image, :obj:`bbox` is coordinates of bounding
boxes, :obj:`label` is labels of the bounding boxes and
:obj:`difficult` is whether the bounding boxes are difficult or
not. If :obj:`difficult` is returned, difficult ground truth
will be ignored from evaluation.
target (chainer.Link): An instance-segmentation link. This link must
have :meth:`predict` method that takes a list of images and returns
:obj:`bboxes`, :obj:`labels` and :obj:`scores`.
iou_thresh (float): Intersection over Union (IoU) threshold for
calulating average precision. The default value is 0.5.
use_07_metric (bool): Whether to use PASCAL VOC 2007 evaluation metric
for calculating average precision. The default value is
:obj:`False`.
label_names (iterable of strings): An iterable of names of classes.
If this value is specified, average precision for each class is
also reported with the key :obj:`'ap/<label_names[l]>'`.
comm (~chainermn.communicators.CommunicatorBase):
A ChainerMN communicator.
If it is specified, this extension scatters the iterator of
root worker and gathers the results to the root worker.
"""
trigger = 1, 'epoch'
default_name = 'validation'
priority = chainer.training.PRIORITY_WRITER
def __init__(
self, iterator, target,
iou_thresh=0.5, use_07_metric=False, label_names=None,
comm=None,
):
if iterator is None:
iterator = {}
super(InstanceSegmentationVOCEvaluator, self).__init__(
iterator, target)
self.iou_thresh = iou_thresh
self.use_07_metric = use_07_metric
self.label_names = label_names
self.comm = comm
def evaluate(self):
target = self._targets['main']
if self.comm is not None and self.comm.rank != 0:
apply_to_iterator(target.predict, None, comm=self.comm)
return {}
iterator = self._iterators['main']
if hasattr(iterator, 'reset'):
iterator.reset()
it = iterator
else:
it = copy.copy(iterator)
in_values, out_values, rest_values = apply_to_iterator(
target.predict, it, comm=self.comm)
# delete unused iterators explicitly
del in_values
pred_masks, pred_labels, pred_scores = out_values
gt_masks, gt_labels = rest_values
result = eval_instance_segmentation_voc(
pred_masks, pred_labels, pred_scores,
gt_masks, gt_labels,
iou_thresh=self.iou_thresh,
use_07_metric=self.use_07_metric)
report = {'map': result['map']}
if self.label_names is not None:
for l, label_name in enumerate(self.label_names):
try:
report['ap/{:s}'.format(label_name)] = result['ap'][l]
except IndexError:
report['ap/{:s}'.format(label_name)] = np.nan
observation = {}
with reporter.report_scope(observation):
reporter.report(report, target)
return observation
|
from homeassistant.components.climate import _LOGGER
from homeassistant.components.climate.const import (
ATTR_AUX_HEAT,
ATTR_FAN_MODE,
ATTR_HUMIDITY,
ATTR_HVAC_MODE,
ATTR_PRESET_MODE,
ATTR_SWING_MODE,
ATTR_TARGET_TEMP_HIGH,
ATTR_TARGET_TEMP_LOW,
DOMAIN,
SERVICE_SET_AUX_HEAT,
SERVICE_SET_FAN_MODE,
SERVICE_SET_HUMIDITY,
SERVICE_SET_HVAC_MODE,
SERVICE_SET_PRESET_MODE,
SERVICE_SET_SWING_MODE,
SERVICE_SET_TEMPERATURE,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_TEMPERATURE,
ENTITY_MATCH_ALL,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
)
from homeassistant.loader import bind_hass
async def async_set_preset_mode(hass, preset_mode, entity_id=ENTITY_MATCH_ALL):
"""Set new preset mode."""
data = {ATTR_PRESET_MODE: preset_mode}
if entity_id:
data[ATTR_ENTITY_ID] = entity_id
await hass.services.async_call(DOMAIN, SERVICE_SET_PRESET_MODE, data, blocking=True)
@bind_hass
def set_preset_mode(hass, preset_mode, entity_id=ENTITY_MATCH_ALL):
"""Set new preset mode."""
data = {ATTR_PRESET_MODE: preset_mode}
if entity_id:
data[ATTR_ENTITY_ID] = entity_id
hass.services.call(DOMAIN, SERVICE_SET_PRESET_MODE, data)
async def async_set_aux_heat(hass, aux_heat, entity_id=ENTITY_MATCH_ALL):
"""Turn all or specified climate devices auxiliary heater on."""
data = {ATTR_AUX_HEAT: aux_heat}
if entity_id:
data[ATTR_ENTITY_ID] = entity_id
await hass.services.async_call(DOMAIN, SERVICE_SET_AUX_HEAT, data, blocking=True)
@bind_hass
def set_aux_heat(hass, aux_heat, entity_id=ENTITY_MATCH_ALL):
"""Turn all or specified climate devices auxiliary heater on."""
data = {ATTR_AUX_HEAT: aux_heat}
if entity_id:
data[ATTR_ENTITY_ID] = entity_id
hass.services.call(DOMAIN, SERVICE_SET_AUX_HEAT, data)
async def async_set_temperature(
hass,
temperature=None,
entity_id=ENTITY_MATCH_ALL,
target_temp_high=None,
target_temp_low=None,
hvac_mode=None,
):
"""Set new target temperature."""
kwargs = {
key: value
for key, value in [
(ATTR_TEMPERATURE, temperature),
(ATTR_TARGET_TEMP_HIGH, target_temp_high),
(ATTR_TARGET_TEMP_LOW, target_temp_low),
(ATTR_ENTITY_ID, entity_id),
(ATTR_HVAC_MODE, hvac_mode),
]
if value is not None
}
_LOGGER.debug("set_temperature start data=%s", kwargs)
await hass.services.async_call(
DOMAIN, SERVICE_SET_TEMPERATURE, kwargs, blocking=True
)
@bind_hass
def set_temperature(
hass,
temperature=None,
entity_id=ENTITY_MATCH_ALL,
target_temp_high=None,
target_temp_low=None,
hvac_mode=None,
):
"""Set new target temperature."""
kwargs = {
key: value
for key, value in [
(ATTR_TEMPERATURE, temperature),
(ATTR_TARGET_TEMP_HIGH, target_temp_high),
(ATTR_TARGET_TEMP_LOW, target_temp_low),
(ATTR_ENTITY_ID, entity_id),
(ATTR_HVAC_MODE, hvac_mode),
]
if value is not None
}
_LOGGER.debug("set_temperature start data=%s", kwargs)
hass.services.call(DOMAIN, SERVICE_SET_TEMPERATURE, kwargs)
async def async_set_humidity(hass, humidity, entity_id=ENTITY_MATCH_ALL):
"""Set new target humidity."""
data = {ATTR_HUMIDITY: humidity}
if entity_id is not None:
data[ATTR_ENTITY_ID] = entity_id
await hass.services.async_call(DOMAIN, SERVICE_SET_HUMIDITY, data, blocking=True)
@bind_hass
def set_humidity(hass, humidity, entity_id=ENTITY_MATCH_ALL):
"""Set new target humidity."""
data = {ATTR_HUMIDITY: humidity}
if entity_id is not None:
data[ATTR_ENTITY_ID] = entity_id
hass.services.call(DOMAIN, SERVICE_SET_HUMIDITY, data)
async def async_set_fan_mode(hass, fan, entity_id=ENTITY_MATCH_ALL):
"""Set all or specified climate devices fan mode on."""
data = {ATTR_FAN_MODE: fan}
if entity_id:
data[ATTR_ENTITY_ID] = entity_id
await hass.services.async_call(DOMAIN, SERVICE_SET_FAN_MODE, data, blocking=True)
@bind_hass
def set_fan_mode(hass, fan, entity_id=ENTITY_MATCH_ALL):
"""Set all or specified climate devices fan mode on."""
data = {ATTR_FAN_MODE: fan}
if entity_id:
data[ATTR_ENTITY_ID] = entity_id
hass.services.call(DOMAIN, SERVICE_SET_FAN_MODE, data)
async def async_set_hvac_mode(hass, hvac_mode, entity_id=ENTITY_MATCH_ALL):
"""Set new target operation mode."""
data = {ATTR_HVAC_MODE: hvac_mode}
if entity_id is not None:
data[ATTR_ENTITY_ID] = entity_id
await hass.services.async_call(DOMAIN, SERVICE_SET_HVAC_MODE, data, blocking=True)
@bind_hass
def set_operation_mode(hass, hvac_mode, entity_id=ENTITY_MATCH_ALL):
"""Set new target operation mode."""
data = {ATTR_HVAC_MODE: hvac_mode}
if entity_id is not None:
data[ATTR_ENTITY_ID] = entity_id
hass.services.call(DOMAIN, SERVICE_SET_HVAC_MODE, data)
async def async_set_swing_mode(hass, swing_mode, entity_id=ENTITY_MATCH_ALL):
"""Set new target swing mode."""
data = {ATTR_SWING_MODE: swing_mode}
if entity_id is not None:
data[ATTR_ENTITY_ID] = entity_id
await hass.services.async_call(DOMAIN, SERVICE_SET_SWING_MODE, data, blocking=True)
@bind_hass
def set_swing_mode(hass, swing_mode, entity_id=ENTITY_MATCH_ALL):
"""Set new target swing mode."""
data = {ATTR_SWING_MODE: swing_mode}
if entity_id is not None:
data[ATTR_ENTITY_ID] = entity_id
hass.services.call(DOMAIN, SERVICE_SET_SWING_MODE, data)
async def async_turn_on(hass, entity_id=ENTITY_MATCH_ALL):
"""Turn on device."""
data = {}
if entity_id is not None:
data[ATTR_ENTITY_ID] = entity_id
await hass.services.async_call(DOMAIN, SERVICE_TURN_ON, data, blocking=True)
async def async_turn_off(hass, entity_id=ENTITY_MATCH_ALL):
"""Turn off device."""
data = {}
if entity_id is not None:
data[ATTR_ENTITY_ID] = entity_id
await hass.services.async_call(DOMAIN, SERVICE_TURN_OFF, data, blocking=True)
|
import logging
from aiohttp import web
import voluptuous as vol
from homeassistant.components.sensor import DOMAIN as SENSOR
from homeassistant.const import (
CONF_WEBHOOK_ID,
HTTP_OK,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
VOLUME_GALLONS,
VOLUME_LITERS,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_send
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ["webhook"]
PLAATO_DEVICE_SENSORS = "sensors"
PLAATO_DEVICE_ATTRS = "attrs"
ATTR_DEVICE_ID = "device_id"
ATTR_DEVICE_NAME = "device_name"
ATTR_TEMP_UNIT = "temp_unit"
ATTR_VOLUME_UNIT = "volume_unit"
ATTR_BPM = "bpm"
ATTR_TEMP = "temp"
ATTR_SG = "sg"
ATTR_OG = "og"
ATTR_BUBBLES = "bubbles"
ATTR_ABV = "abv"
ATTR_CO2_VOLUME = "co2_volume"
ATTR_BATCH_VOLUME = "batch_volume"
SENSOR_UPDATE = f"{DOMAIN}_sensor_update"
SENSOR_DATA_KEY = f"{DOMAIN}.{SENSOR}"
WEBHOOK_SCHEMA = vol.Schema(
{
vol.Required(ATTR_DEVICE_NAME): cv.string,
vol.Required(ATTR_DEVICE_ID): cv.positive_int,
vol.Required(ATTR_TEMP_UNIT): vol.Any(TEMP_CELSIUS, TEMP_FAHRENHEIT),
vol.Required(ATTR_VOLUME_UNIT): vol.Any(VOLUME_LITERS, VOLUME_GALLONS),
vol.Required(ATTR_BPM): cv.positive_int,
vol.Required(ATTR_TEMP): vol.Coerce(float),
vol.Required(ATTR_SG): vol.Coerce(float),
vol.Required(ATTR_OG): vol.Coerce(float),
vol.Required(ATTR_ABV): vol.Coerce(float),
vol.Required(ATTR_CO2_VOLUME): vol.Coerce(float),
vol.Required(ATTR_BATCH_VOLUME): vol.Coerce(float),
vol.Required(ATTR_BUBBLES): cv.positive_int,
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, hass_config):
"""Set up the Plaato component."""
return True
async def async_setup_entry(hass, entry):
"""Configure based on config entry."""
if DOMAIN not in hass.data:
hass.data[DOMAIN] = {}
webhook_id = entry.data[CONF_WEBHOOK_ID]
hass.components.webhook.async_register(DOMAIN, "Plaato", webhook_id, handle_webhook)
hass.async_create_task(hass.config_entries.async_forward_entry_setup(entry, SENSOR))
return True
async def async_unload_entry(hass, entry):
"""Unload a config entry."""
hass.components.webhook.async_unregister(entry.data[CONF_WEBHOOK_ID])
hass.data[SENSOR_DATA_KEY]()
await hass.config_entries.async_forward_entry_unload(entry, SENSOR)
return True
async def handle_webhook(hass, webhook_id, request):
"""Handle incoming webhook from Plaato."""
try:
data = WEBHOOK_SCHEMA(await request.json())
except vol.MultipleInvalid as error:
_LOGGER.warning("An error occurred when parsing webhook data <%s>", error)
return
device_id = _device_id(data)
attrs = {
ATTR_DEVICE_NAME: data.get(ATTR_DEVICE_NAME),
ATTR_DEVICE_ID: data.get(ATTR_DEVICE_ID),
ATTR_TEMP_UNIT: data.get(ATTR_TEMP_UNIT),
ATTR_VOLUME_UNIT: data.get(ATTR_VOLUME_UNIT),
}
sensors = {
ATTR_TEMP: data.get(ATTR_TEMP),
ATTR_BPM: data.get(ATTR_BPM),
ATTR_SG: data.get(ATTR_SG),
ATTR_OG: data.get(ATTR_OG),
ATTR_ABV: data.get(ATTR_ABV),
ATTR_CO2_VOLUME: data.get(ATTR_CO2_VOLUME),
ATTR_BATCH_VOLUME: data.get(ATTR_BATCH_VOLUME),
ATTR_BUBBLES: data.get(ATTR_BUBBLES),
}
hass.data[DOMAIN][device_id] = {
PLAATO_DEVICE_ATTRS: attrs,
PLAATO_DEVICE_SENSORS: sensors,
}
async_dispatcher_send(hass, SENSOR_UPDATE, device_id)
return web.Response(text=f"Saving status for {device_id}", status=HTTP_OK)
def _device_id(data):
"""Return name of device sensor."""
return f"{data.get(ATTR_DEVICE_NAME)}_{data.get(ATTR_DEVICE_ID)}"
|
import re
import sys
import requests
def to_int(value):
value = ''.join((x for x in value if x.isdigit()))
try:
return int(value)
except Exception:
return 0
def to_tuple(version):
return tuple(to_int(x) for x in version.split('.'))
def main():
project = sys.argv[1]
json = requests.get('https://pypi.org/pypi/%s/json' % project).json()
for version in sorted(json['releases'], key=to_tuple):
print(version)
wheel_packages = [
p for p in json['releases'][version]
if p['packagetype'] == 'bdist_wheel'
]
for p in wheel_packages:
print(' %(python_version)s %(filename)s' % p)
if __name__ == '__main__':
main()
|
import asyncio
import logging
import math
from typing import Any, Callable, Dict, List, Optional
from aioesphomeapi import (
APIClient,
APIConnectionError,
DeviceInfo,
EntityInfo,
EntityState,
HomeassistantServiceCall,
UserService,
UserServiceArgType,
)
import voluptuous as vol
from homeassistant import const
from homeassistant.components import zeroconf
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
CONF_HOST,
CONF_PASSWORD,
CONF_PORT,
EVENT_HOMEASSISTANT_STOP,
)
from homeassistant.core import Event, State, callback
from homeassistant.exceptions import TemplateError
from homeassistant.helpers import template
import homeassistant.helpers.config_validation as cv
import homeassistant.helpers.device_registry as dr
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.event import async_track_state_change_event
from homeassistant.helpers.json import JSONEncoder
from homeassistant.helpers.storage import Store
from homeassistant.helpers.template import Template
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
# Import config flow so that it's added to the registry
from .config_flow import EsphomeFlowHandler # noqa: F401
from .entry_data import DATA_KEY, RuntimeEntryData
DOMAIN = "esphome"
_LOGGER = logging.getLogger(__name__)
STORAGE_VERSION = 1
# No config schema - only configuration entry
CONFIG_SCHEMA = vol.Schema({}, extra=vol.ALLOW_EXTRA)
async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool:
"""Stub to allow setting up this component.
Configuration through YAML is not supported at this time.
"""
return True
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool:
"""Set up the esphome component."""
hass.data.setdefault(DATA_KEY, {})
host = entry.data[CONF_HOST]
port = entry.data[CONF_PORT]
password = entry.data[CONF_PASSWORD]
device_id = None
zeroconf_instance = await zeroconf.async_get_instance(hass)
cli = APIClient(
hass.loop,
host,
port,
password,
client_info=f"Home Assistant {const.__version__}",
zeroconf_instance=zeroconf_instance,
)
# Store client in per-config-entry hass.data
store = Store(
hass, STORAGE_VERSION, f"esphome.{entry.entry_id}", encoder=JSONEncoder
)
entry_data = hass.data[DATA_KEY][entry.entry_id] = RuntimeEntryData(
client=cli, entry_id=entry.entry_id, store=store
)
async def on_stop(event: Event) -> None:
"""Cleanup the socket client on HA stop."""
await _cleanup_instance(hass, entry)
# Use async_listen instead of async_listen_once so that we don't deregister
# the callback twice when shutting down Home Assistant.
# "Unable to remove unknown listener <function EventBus.async_listen_once.<locals>.onetime_listener>"
entry_data.cleanup_callbacks.append(
hass.bus.async_listen(EVENT_HOMEASSISTANT_STOP, on_stop)
)
@callback
def async_on_state(state: EntityState) -> None:
"""Send dispatcher updates when a new state is received."""
entry_data.async_update_state(hass, state)
@callback
def async_on_service_call(service: HomeassistantServiceCall) -> None:
"""Call service when user automation in ESPHome config is triggered."""
domain, service_name = service.service.split(".", 1)
service_data = service.data
if service.data_template:
try:
data_template = {
key: Template(value) for key, value in service.data_template.items()
}
template.attach(hass, data_template)
service_data.update(
template.render_complex(data_template, service.variables)
)
except TemplateError as ex:
_LOGGER.error("Error rendering data template for %s: %s", host, ex)
return
if service.is_event:
# ESPHome uses servicecall packet for both events and service calls
# Ensure the user can only send events of form 'esphome.xyz'
if domain != "esphome":
_LOGGER.error(
"Can only generate events under esphome domain! (%s)", host
)
return
# Call native tag scan
if service_name == "tag_scanned":
tag_id = service_data["tag_id"]
hass.async_create_task(
hass.components.tag.async_scan_tag(tag_id, device_id)
)
return
hass.bus.async_fire(service.service, service_data)
else:
hass.async_create_task(
hass.services.async_call(
domain, service_name, service_data, blocking=True
)
)
async def send_home_assistant_state_event(event: Event) -> None:
"""Forward Home Assistant states updates to ESPHome."""
new_state = event.data.get("new_state")
if new_state is None:
return
entity_id = event.data.get("entity_id")
await cli.send_home_assistant_state(entity_id, new_state.state)
async def _send_home_assistant_state(
entity_id: str, new_state: Optional[State]
) -> None:
"""Forward Home Assistant states to ESPHome."""
await cli.send_home_assistant_state(entity_id, new_state.state)
@callback
def async_on_state_subscription(entity_id: str) -> None:
"""Subscribe and forward states for requested entities."""
unsub = async_track_state_change_event(
hass, [entity_id], send_home_assistant_state_event
)
entry_data.disconnect_callbacks.append(unsub)
new_state = hass.states.get(entity_id)
if new_state is None:
return
# Send initial state
hass.async_create_task(_send_home_assistant_state(entity_id, new_state))
async def on_login() -> None:
"""Subscribe to states and list entities on successful API login."""
nonlocal device_id
try:
entry_data.device_info = await cli.device_info()
entry_data.available = True
device_id = await _async_setup_device_registry(
hass, entry, entry_data.device_info
)
entry_data.async_update_device_state(hass)
entity_infos, services = await cli.list_entities_services()
await entry_data.async_update_static_infos(hass, entry, entity_infos)
await _setup_services(hass, entry_data, services)
await cli.subscribe_states(async_on_state)
await cli.subscribe_service_calls(async_on_service_call)
await cli.subscribe_home_assistant_states(async_on_state_subscription)
hass.async_create_task(entry_data.async_save_to_store())
except APIConnectionError as err:
_LOGGER.warning("Error getting initial data for %s: %s", host, err)
# Re-connection logic will trigger after this
await cli.disconnect()
try_connect = await _setup_auto_reconnect_logic(hass, cli, entry, host, on_login)
async def complete_setup() -> None:
"""Complete the config entry setup."""
infos, services = await entry_data.async_load_from_store()
await entry_data.async_update_static_infos(hass, entry, infos)
await _setup_services(hass, entry_data, services)
# Create connection attempt outside of HA's tracked task in order
# not to delay startup.
hass.loop.create_task(try_connect(is_disconnect=False))
hass.async_create_task(complete_setup())
return True
async def _setup_auto_reconnect_logic(
hass: HomeAssistantType, cli: APIClient, entry: ConfigEntry, host: str, on_login
):
"""Set up the re-connect logic for the API client."""
async def try_connect(tries: int = 0, is_disconnect: bool = True) -> None:
"""Try connecting to the API client. Will retry if not successful."""
if entry.entry_id not in hass.data[DOMAIN]:
# When removing/disconnecting manually
return
data: RuntimeEntryData = hass.data[DOMAIN][entry.entry_id]
for disconnect_cb in data.disconnect_callbacks:
disconnect_cb()
data.disconnect_callbacks = []
data.available = False
data.async_update_device_state(hass)
if is_disconnect:
# This can happen often depending on WiFi signal strength.
# So therefore all these connection warnings are logged
# as infos. The "unavailable" logic will still trigger so the
# user knows if the device is not connected.
_LOGGER.info("Disconnected from ESPHome API for %s", host)
if tries != 0:
# If not first re-try, wait and print message
# Cap wait time at 1 minute. This is because while working on the
# device (e.g. soldering stuff), users don't want to have to wait
# a long time for their device to show up in HA again (this was
# mentioned a lot in early feedback)
#
# In the future another API will be set up so that the ESP can
# notify HA of connectivity directly, but for new we'll use a
# really short reconnect interval.
tries = min(tries, 10) # prevent OverflowError
wait_time = int(round(min(1.8 ** tries, 60.0)))
_LOGGER.info("Trying to reconnect to %s in %s seconds", host, wait_time)
await asyncio.sleep(wait_time)
try:
await cli.connect(on_stop=try_connect, login=True)
except APIConnectionError as error:
_LOGGER.info("Can't connect to ESPHome API for %s: %s", host, error)
# Schedule re-connect in event loop in order not to delay HA
# startup. First connect is scheduled in tracked tasks.
data.reconnect_task = hass.loop.create_task(
try_connect(tries + 1, is_disconnect=False)
)
else:
_LOGGER.info("Successfully connected to %s", host)
hass.async_create_task(on_login())
return try_connect
async def _async_setup_device_registry(
hass: HomeAssistantType, entry: ConfigEntry, device_info: DeviceInfo
):
"""Set up device registry feature for a particular config entry."""
sw_version = device_info.esphome_version
if device_info.compilation_time:
sw_version += f" ({device_info.compilation_time})"
device_registry = await dr.async_get_registry(hass)
entry = device_registry.async_get_or_create(
config_entry_id=entry.entry_id,
connections={(dr.CONNECTION_NETWORK_MAC, device_info.mac_address)},
name=device_info.name,
manufacturer="espressif",
model=device_info.model,
sw_version=sw_version,
)
return entry.id
async def _register_service(
hass: HomeAssistantType, entry_data: RuntimeEntryData, service: UserService
):
service_name = f"{entry_data.device_info.name}_{service.name}"
schema = {}
for arg in service.args:
schema[vol.Required(arg.name)] = {
UserServiceArgType.BOOL: cv.boolean,
UserServiceArgType.INT: vol.Coerce(int),
UserServiceArgType.FLOAT: vol.Coerce(float),
UserServiceArgType.STRING: cv.string,
UserServiceArgType.BOOL_ARRAY: [cv.boolean],
UserServiceArgType.INT_ARRAY: [vol.Coerce(int)],
UserServiceArgType.FLOAT_ARRAY: [vol.Coerce(float)],
UserServiceArgType.STRING_ARRAY: [cv.string],
}[arg.type_]
async def execute_service(call):
await entry_data.client.execute_service(service, call.data)
hass.services.async_register(
DOMAIN, service_name, execute_service, vol.Schema(schema)
)
async def _setup_services(
hass: HomeAssistantType, entry_data: RuntimeEntryData, services: List[UserService]
):
old_services = entry_data.services.copy()
to_unregister = []
to_register = []
for service in services:
if service.key in old_services:
# Already exists
matching = old_services.pop(service.key)
if matching != service:
# Need to re-register
to_unregister.append(matching)
to_register.append(service)
else:
# New service
to_register.append(service)
for service in old_services.values():
to_unregister.append(service)
entry_data.services = {serv.key: serv for serv in services}
for service in to_unregister:
service_name = f"{entry_data.device_info.name}_{service.name}"
hass.services.async_remove(DOMAIN, service_name)
for service in to_register:
await _register_service(hass, entry_data, service)
async def _cleanup_instance(
hass: HomeAssistantType, entry: ConfigEntry
) -> RuntimeEntryData:
"""Cleanup the esphome client if it exists."""
data: RuntimeEntryData = hass.data[DATA_KEY].pop(entry.entry_id)
if data.reconnect_task is not None:
data.reconnect_task.cancel()
for disconnect_cb in data.disconnect_callbacks:
disconnect_cb()
for cleanup_callback in data.cleanup_callbacks:
cleanup_callback()
await data.client.disconnect()
return data
async def async_unload_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool:
"""Unload an esphome config entry."""
entry_data = await _cleanup_instance(hass, entry)
tasks = []
for platform in entry_data.loaded_platforms:
tasks.append(hass.config_entries.async_forward_entry_unload(entry, platform))
if tasks:
await asyncio.wait(tasks)
return True
async def platform_async_setup_entry(
hass: HomeAssistantType,
entry: ConfigEntry,
async_add_entities,
*,
component_key: str,
info_type,
entity_type,
state_type,
) -> None:
"""Set up an esphome platform.
This method is in charge of receiving, distributing and storing
info and state updates.
"""
entry_data: RuntimeEntryData = hass.data[DOMAIN][entry.entry_id]
entry_data.info[component_key] = {}
entry_data.old_info[component_key] = {}
entry_data.state[component_key] = {}
@callback
def async_list_entities(infos: List[EntityInfo]):
"""Update entities of this platform when entities are listed."""
old_infos = entry_data.info[component_key]
new_infos = {}
add_entities = []
for info in infos:
if not isinstance(info, info_type):
# Filter out infos that don't belong to this platform.
continue
if info.key in old_infos:
# Update existing entity
old_infos.pop(info.key)
else:
# Create new entity
entity = entity_type(entry.entry_id, component_key, info.key)
add_entities.append(entity)
new_infos[info.key] = info
# Remove old entities
for info in old_infos.values():
entry_data.async_remove_entity(hass, component_key, info.key)
# First copy the now-old info into the backup object
entry_data.old_info[component_key] = entry_data.info[component_key]
# Then update the actual info
entry_data.info[component_key] = new_infos
# Add entities to Home Assistant
async_add_entities(add_entities)
signal = f"esphome_{entry.entry_id}_on_list"
entry_data.cleanup_callbacks.append(
async_dispatcher_connect(hass, signal, async_list_entities)
)
@callback
def async_entity_state(state: EntityState):
"""Notify the appropriate entity of an updated state."""
if not isinstance(state, state_type):
return
entry_data.state[component_key][state.key] = state
entry_data.async_update_entity(hass, component_key, state.key)
signal = f"esphome_{entry.entry_id}_on_state"
entry_data.cleanup_callbacks.append(
async_dispatcher_connect(hass, signal, async_entity_state)
)
def esphome_state_property(func):
"""Wrap a state property of an esphome entity.
This checks if the state object in the entity is set, and
prevents writing NAN values to the Home Assistant state machine.
"""
@property
def _wrapper(self):
if self._state is None:
return None
val = func(self)
if isinstance(val, float) and math.isnan(val):
# Home Assistant doesn't use NAN values in state machine
# (not JSON serializable)
return None
return val
return _wrapper
class EsphomeEnumMapper:
"""Helper class to convert between hass and esphome enum values."""
def __init__(self, func: Callable[[], Dict[int, str]]):
"""Construct a EsphomeEnumMapper."""
self._func = func
def from_esphome(self, value: int) -> str:
"""Convert from an esphome int representation to a hass string."""
return self._func()[value]
def from_hass(self, value: str) -> int:
"""Convert from a hass string to a esphome int representation."""
inverse = {v: k for k, v in self._func().items()}
return inverse[value]
def esphome_map_enum(func: Callable[[], Dict[int, str]]):
"""Map esphome int enum values to hass string constants.
This class has to be used as a decorator. This ensures the aioesphomeapi
import is only happening at runtime.
"""
return EsphomeEnumMapper(func)
class EsphomeEntity(Entity):
"""Define a generic esphome entity."""
def __init__(self, entry_id: str, component_key: str, key: int):
"""Initialize."""
self._entry_id = entry_id
self._component_key = component_key
self._key = key
self._remove_callbacks: List[Callable[[], None]] = []
async def async_added_to_hass(self) -> None:
"""Register callbacks."""
kwargs = {
"entry_id": self._entry_id,
"component_key": self._component_key,
"key": self._key,
}
self._remove_callbacks.append(
async_dispatcher_connect(
self.hass,
(
f"esphome_{kwargs.get('entry_id')}"
f"_update_{kwargs.get('component_key')}_{kwargs.get('key')}"
),
self._on_state_update,
)
)
self._remove_callbacks.append(
async_dispatcher_connect(
self.hass,
(
f"esphome_{kwargs.get('entry_id')}_remove_"
f"{kwargs.get('component_key')}_{kwargs.get('key')}"
),
self.async_remove,
)
)
self._remove_callbacks.append(
async_dispatcher_connect(
self.hass,
f"esphome_{kwargs.get('entry_id')}_on_device_update",
self._on_device_update,
)
)
async def _on_state_update(self) -> None:
"""Update the entity state when state or static info changed."""
self.async_write_ha_state()
async def _on_device_update(self) -> None:
"""Update the entity state when device info has changed."""
if self._entry_data.available:
# Don't update the HA state yet when the device comes online.
# Only update the HA state when the full state arrives
# through the next entity state packet.
return
self.async_write_ha_state()
async def async_will_remove_from_hass(self) -> None:
"""Unregister callbacks."""
for remove_callback in self._remove_callbacks:
remove_callback()
self._remove_callbacks = []
@property
def _entry_data(self) -> RuntimeEntryData:
return self.hass.data[DATA_KEY][self._entry_id]
@property
def _static_info(self) -> EntityInfo:
# Check if value is in info database. Use a single lookup.
info = self._entry_data.info[self._component_key].get(self._key)
if info is not None:
return info
# This entity is in the removal project and has been removed from .info
# already, look in old_info
return self._entry_data.old_info[self._component_key].get(self._key)
@property
def _device_info(self) -> DeviceInfo:
return self._entry_data.device_info
@property
def _client(self) -> APIClient:
return self._entry_data.client
@property
def _state(self) -> Optional[EntityState]:
try:
return self._entry_data.state[self._component_key][self._key]
except KeyError:
return None
@property
def available(self) -> bool:
"""Return if the entity is available."""
device = self._device_info
if device.has_deep_sleep:
# During deep sleep the ESP will not be connectable (by design)
# For these cases, show it as available
return True
return self._entry_data.available
@property
def unique_id(self) -> Optional[str]:
"""Return a unique id identifying the entity."""
if not self._static_info.unique_id:
return None
return self._static_info.unique_id
@property
def device_info(self) -> Dict[str, Any]:
"""Return device registry information for this entity."""
return {
"connections": {(dr.CONNECTION_NETWORK_MAC, self._device_info.mac_address)}
}
@property
def name(self) -> str:
"""Return the name of the entity."""
return self._static_info.name
@property
def should_poll(self) -> bool:
"""Disable polling."""
return False
|
from withings_api.common import NotifyAppli
from homeassistant.components.withings.common import (
WITHINGS_MEASUREMENTS_MAP,
async_get_entity_id,
)
from homeassistant.components.withings.const import Measurement
from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_registry import EntityRegistry
from .common import ComponentFactory, new_profile_config
async def test_binary_sensor(
hass: HomeAssistant, component_factory: ComponentFactory
) -> None:
"""Test binary sensor."""
in_bed_attribute = WITHINGS_MEASUREMENTS_MAP[Measurement.IN_BED]
person0 = new_profile_config("person0", 0)
person1 = new_profile_config("person1", 1)
entity_registry: EntityRegistry = (
await hass.helpers.entity_registry.async_get_registry()
)
await component_factory.configure_component(profile_configs=(person0, person1))
assert not await async_get_entity_id(hass, in_bed_attribute, person0.user_id)
assert not await async_get_entity_id(hass, in_bed_attribute, person1.user_id)
# person 0
await component_factory.setup_profile(person0.user_id)
await component_factory.setup_profile(person1.user_id)
entity_id0 = await async_get_entity_id(hass, in_bed_attribute, person0.user_id)
entity_id1 = await async_get_entity_id(hass, in_bed_attribute, person1.user_id)
assert entity_id0
assert entity_id1
assert entity_registry.async_is_registered(entity_id0)
assert hass.states.get(entity_id0).state == STATE_UNAVAILABLE
resp = await component_factory.call_webhook(person0.user_id, NotifyAppli.BED_IN)
assert resp.message_code == 0
await hass.async_block_till_done()
assert hass.states.get(entity_id0).state == STATE_ON
resp = await component_factory.call_webhook(person0.user_id, NotifyAppli.BED_OUT)
assert resp.message_code == 0
await hass.async_block_till_done()
assert hass.states.get(entity_id0).state == STATE_OFF
# person 1
assert hass.states.get(entity_id1).state == STATE_UNAVAILABLE
resp = await component_factory.call_webhook(person1.user_id, NotifyAppli.BED_IN)
assert resp.message_code == 0
await hass.async_block_till_done()
assert hass.states.get(entity_id1).state == STATE_ON
# Unload
await component_factory.unload(person0)
await component_factory.unload(person1)
|
from __future__ import division
import numpy as np
import chainer
import chainer.functions as F
import chainer.links as L
from chainercv.functions import ps_roi_max_align_2d
from chainercv.links.connection.conv_2d_bn_activ import Conv2DBNActiv
from chainercv.links.model.faster_rcnn.region_proposal_network import \
RegionProposalNetwork
from chainercv.links.model.light_head_rcnn.global_context_module import \
GlobalContextModule
from chainercv.links.model.light_head_rcnn.light_head_rcnn import \
LightHeadRCNN
from chainercv.links.model.resnet.resblock import ResBlock
from chainercv.links.model.resnet.resnet import ResNet101
from chainercv import utils
class LightHeadRCNNResNet101(LightHeadRCNN):
"""Light-Head R-CNN based on ResNet101.
When you specify the path of a pre-trained chainer model serialized as
a :obj:`.npz` file in the constructor, this chain model automatically
initializes all the parameters with it.
When a string in prespecified set is provided, a pretrained model is
loaded from weights distributed on the Internet.
The list of pretrained models supported are as follows:
* :obj:`coco`: Loads weights trained with the trainval split of \
COCO Detection Dataset.
* :obj:`imagenet`: Loads weights trained with ImageNet Classfication \
task for the feature extractor and the head modules. \
Weights that do not have a corresponding layer in ResNet101 \
will be randomly initialized.
For descriptions on the interface of this model, please refer to
:class:`~light_head_rcnn.links.model.light_head_rcnn_base.LightHeadRCNN`
:class:`~light_head_rcnn.links.model.light_head_rcnn_base.LightHeadRCNN`
supports finer control on random initializations of weights by arguments
:obj:`resnet_initialW`, :obj:`rpn_initialW`, :obj:`loc_initialW` and
:obj:`score_initialW`.
It accepts a callable that takes an array and edits its values.
If :obj:`None` is passed as an initializer, the default initializer is
used.
Args:
n_fg_class (int): The number of classes excluding the background.
pretrained_model (string): The destination of the pre-trained
chainer model serialized as a :obj:`.npz` file.
If this is one of the strings described
above, it automatically loads weights stored under a directory
:obj:`$CHAINER_DATASET_ROOT/pfnet/chainercv/models/`,
where :obj:`$CHAINER_DATASET_ROOT` is set as
:obj:`$HOME/.chainer/dataset` unless you specify another value
by modifying the environment variable.
min_size (int): A preprocessing paramter for :meth:`prepare`.
max_size (int): A preprocessing paramter for :meth:`prepare`.
ratios (list of floats): This is ratios of width to height of
the anchors.
anchor_scales (list of numbers): This is areas of anchors.
Those areas will be the product of the square of an element in
:obj:`anchor_scales` and the original area of the reference
window.
resnet_initialW (callable): Initializer for the layers corresponding to
the ResNet101 layers.
rpn_initialW (callable): Initializer for Region Proposal Network
layers.
loc_initialW (callable): Initializer for the localization head.
score_initialW (callable): Initializer for the score head.
proposal_creator_params (dict): Key valued paramters for
:class:`~chainercv.links.model.faster_rcnn.ProposalCreator`.
"""
_models = {
'coco': {
'param': {'n_fg_class': 80},
'url': 'https://chainercv-models.preferred.jp/'
'light_head_rcnn_resnet101_trained_2019_06_13.npz',
'cv2': True
},
}
feat_stride = 16
proposal_creator_params = {
'nms_thresh': 0.7,
'n_train_pre_nms': 12000,
'n_train_post_nms': 2000,
'n_test_pre_nms': 6000,
'n_test_post_nms': 1000,
'force_cpu_nms': False,
'min_size': 0,
}
def __init__(
self,
n_fg_class=None,
pretrained_model=None,
min_size=800, max_size=1333, roi_size=7,
ratios=[0.5, 1, 2], anchor_scales=[2, 4, 8, 16, 32],
loc_normalize_mean=(0., 0., 0., 0.),
loc_normalize_std=(0.1, 0.1, 0.2, 0.2),
resnet_initialW=None, rpn_initialW=None,
global_module_initialW=None,
loc_initialW=None, score_initialW=None,
proposal_creator_params=None,
):
param, path = utils.prepare_pretrained_model(
{'n_fg_class': n_fg_class}, pretrained_model, self._models)
if resnet_initialW is None and pretrained_model:
resnet_initialW = chainer.initializers.HeNormal()
if rpn_initialW is None:
rpn_initialW = chainer.initializers.Normal(0.01)
if global_module_initialW is None:
global_module_initialW = chainer.initializers.Normal(0.01)
if loc_initialW is None:
loc_initialW = chainer.initializers.Normal(0.001)
if score_initialW is None:
score_initialW = chainer.initializers.Normal(0.01)
if proposal_creator_params is not None:
self.proposal_creator_params = proposal_creator_params
extractor = ResNet101Extractor(
initialW=resnet_initialW)
rpn = RegionProposalNetwork(
1024, 512,
ratios=ratios,
anchor_scales=anchor_scales,
feat_stride=self.feat_stride,
initialW=rpn_initialW,
proposal_creator_params=self.proposal_creator_params,
)
head = LightHeadRCNNResNet101Head(
param['n_fg_class'] + 1,
roi_size=roi_size,
spatial_scale=1. / self.feat_stride,
global_module_initialW=global_module_initialW,
loc_initialW=loc_initialW,
score_initialW=score_initialW
)
mean = np.array([122.7717, 115.9465, 102.9801],
dtype=np.float32)[:, None, None]
super(LightHeadRCNNResNet101, self).__init__(
extractor, rpn, head, mean, min_size, max_size,
loc_normalize_mean, loc_normalize_std)
if path == 'imagenet':
self._copy_imagenet_pretrained_resnet()
elif path:
chainer.serializers.load_npz(path, self)
def _copy_imagenet_pretrained_resnet(self):
def _copy_conv2dbn(src, dst):
dst.conv.W.array = src.conv.W.array
if src.conv.b is not None and dst.conv.b is not None:
dst.conv.b.array = src.conv.b.array
dst.bn.gamma.array = src.bn.gamma.array
dst.bn.beta.array = src.bn.beta.array
dst.bn.avg_var = src.bn.avg_var
dst.bn.avg_mean = src.bn.avg_mean
def _copy_bottleneck(src, dst):
if hasattr(src, 'residual_conv'):
_copy_conv2dbn(src.residual_conv, dst.residual_conv)
_copy_conv2dbn(src.conv1, dst.conv1)
_copy_conv2dbn(src.conv2, dst.conv2)
_copy_conv2dbn(src.conv3, dst.conv3)
def _copy_resblock(src, dst):
for layer_name in src.layer_names:
_copy_bottleneck(
getattr(src, layer_name), getattr(dst, layer_name))
pretrained_model = ResNet101(arch='he', pretrained_model='imagenet')
_copy_conv2dbn(pretrained_model.conv1, self.extractor.conv1)
_copy_resblock(pretrained_model.res2, self.extractor.res2)
_copy_resblock(pretrained_model.res3, self.extractor.res3)
_copy_resblock(pretrained_model.res4, self.extractor.res4)
_copy_resblock(pretrained_model.res5, self.extractor.res5)
class ResNet101Extractor(chainer.Chain):
"""ResNet101 Extractor for Light-Head R-CNN ResNet101 implementation.
This class is used as an extractor for LightHeadRCNNResNet101.
This outputs feature maps.
Args:
initialW: Initializer for ResNet101 extractor.
"""
def __init__(self, initialW=None):
super(ResNet101Extractor, self).__init__()
if initialW is None:
initialW = chainer.initializers.HeNormal()
kwargs = {
'initialW': initialW,
'bn_kwargs': {'eps': 1e-5, 'decay': 0.997},
'stride_first': True
}
with self.init_scope():
# ResNet
self.conv1 = Conv2DBNActiv(
3, 64, 7, 2, 3, nobias=True, initialW=initialW)
self.pool1 = lambda x: F.max_pooling_2d(
x, ksize=3, stride=2, pad=1, cover_all=False)
self.res2 = ResBlock(3, 64, 64, 256, 1, **kwargs)
self.res3 = ResBlock(4, 256, 128, 512, 2, **kwargs)
self.res4 = ResBlock(23, 512, 256, 1024, 2, **kwargs)
self.res5 = ResBlock(3, 1024, 512, 2048, 1, 2, **kwargs)
def __call__(self, x):
"""Forward the chain.
Args:
x (~chainer.Variable): 4D image variable.
"""
with chainer.using_config('train', False):
h = self.pool1(self.conv1(x))
h = self.res2(h)
h.unchain_backward()
h = self.res3(h)
res4 = self.res4(h)
res5 = self.res5(res4)
return res4, res5
class LightHeadRCNNResNet101Head(chainer.Chain):
def __init__(
self, n_class, roi_size, spatial_scale,
global_module_initialW=None,
loc_initialW=None, score_initialW=None
):
super(LightHeadRCNNResNet101Head, self).__init__()
self.n_class = n_class
self.spatial_scale = spatial_scale
self.roi_size = roi_size
with self.init_scope():
self.global_context_module = GlobalContextModule(
2048, 256, self.roi_size * self.roi_size * 10, 15,
initialW=global_module_initialW)
self.fc1 = L.Linear(
self.roi_size * self.roi_size * 10, 2048,
initialW=score_initialW)
self.score = L.Linear(2048, n_class, initialW=score_initialW)
self.cls_loc = L.Linear(2048, 4 * n_class, initialW=loc_initialW)
def __call__(self, x, rois, roi_indices):
# global context module
h = self.global_context_module(x)
# psroi max align
pool = ps_roi_max_align_2d(
h, rois, roi_indices,
(10, self.roi_size, self.roi_size),
self.spatial_scale, self.roi_size,
sampling_ratio=2)
pool = F.where(
self.xp.isinf(pool.array),
self.xp.zeros(pool.shape, dtype=pool.dtype), pool)
# fc
fc1 = F.relu(self.fc1(pool))
roi_cls_locs = self.cls_loc(fc1)
roi_scores = self.score(fc1)
return roi_cls_locs, roi_scores
|
from django.views.debug import SafeExceptionReporterFilter
from weblate.utils.requirements import get_versions_list
class WeblateExceptionReporterFilter(SafeExceptionReporterFilter):
def get_post_parameters(self, request):
if hasattr(request, "META"):
meta = request.META
if hasattr(request, "user"):
meta["WEBLATE_USER"] = repr(request.user.username)
else:
meta["WEBLATE_USER"] = ""
if hasattr(request, "session") and "django_language" in request.session:
meta["WEBLATE_LANGUAGE"] = request.session["django_language"]
else:
meta["WEBLATE_LANGUAGE"] = ""
for name, _url, version in get_versions_list():
meta[f"WEBLATE_VERSION:{name}"] = version
return super().get_post_parameters(request)
|
from django.conf import settings
from django.core.exceptions import ValidationError
from django.core.files.storage import FileSystemStorage
from django.db import models
from django.urls import reverse
from django.utils.translation import gettext_lazy as _
from weblate.fonts.utils import get_font_name
from weblate.fonts.validators import validate_font
from weblate.lang.models import Language
from weblate.trans.mixins import UserDisplayMixin
from weblate.trans.models import Project
from weblate.utils.data import data_dir
FONT_STORAGE = FileSystemStorage(location=data_dir("fonts"))
class Font(models.Model, UserDisplayMixin):
family = models.CharField(verbose_name=_("Font family"), max_length=100, blank=True)
style = models.CharField(verbose_name=_("Font style"), max_length=100, blank=True)
font = models.FileField(
verbose_name=_("Font file"),
validators=[validate_font],
storage=FONT_STORAGE,
help_text=_("OpenType and TrueType fonts are supported."),
)
project = models.ForeignKey(Project, on_delete=models.deletion.CASCADE)
timestamp = models.DateTimeField(auto_now_add=True)
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
null=True,
blank=True,
on_delete=models.deletion.SET_NULL,
)
class Meta:
unique_together = [("family", "style", "project")]
def __str__(self):
return f"{self.family} {self.style}"
def save(
self, force_insert=False, force_update=False, using=None, update_fields=None
):
self.clean()
super().save(force_insert, force_update, using, update_fields)
def get_absolute_url(self):
return reverse("font", kwargs={"pk": self.pk, "project": self.project.slug})
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.field_errors = {}
def clean_fields(self, exclude=None):
self.field_errors = {}
try:
super().clean_fields(exclude)
except ValidationError as error:
self.field_errors = error.error_dict
raise
def clean(self):
# Try to parse file only if it passed validation
if "font" not in self.field_errors and not self.family:
self.family, self.style = get_font_name(self.font)
def get_usage(self):
related = FontGroup.objects.filter(
models.Q(font=self) | models.Q(fontoverride__font=self)
)
return related.order().distinct()
class FontGroupQuerySet(models.QuerySet):
def order(self):
return self.order_by("name")
class FontGroup(models.Model):
name = models.SlugField(
verbose_name=_("Font group name"),
max_length=100,
help_text=_(
"Identifier you will use in checks to select this font group. "
"Avoid whitespaces and special characters."
),
)
font = models.ForeignKey(
Font,
verbose_name=_("Default font"),
on_delete=models.deletion.CASCADE,
help_text=_("Default font is used unless per language override matches."),
)
project = models.ForeignKey(Project, on_delete=models.deletion.CASCADE)
objects = FontGroupQuerySet.as_manager()
class Meta:
unique_together = [("name", "project")]
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse(
"font_group", kwargs={"pk": self.pk, "project": self.project.slug}
)
class FontOverride(models.Model):
group = models.ForeignKey(FontGroup, on_delete=models.deletion.CASCADE)
font = models.ForeignKey(
Font, on_delete=models.deletion.CASCADE, verbose_name=_("Font")
)
language = models.ForeignKey(
Language, on_delete=models.deletion.CASCADE, verbose_name=_("Language")
)
class Meta:
unique_together = [("group", "language")]
def __str__(self):
return f"{self.group}:{self.font}:{self.language}"
|
import os
from nikola.plugin_categories import Task
from nikola import utils
class Redirect(Task):
"""Generate redirections."""
name = "redirect"
def gen_tasks(self):
"""Generate redirections tasks."""
kw = {
'redirections': self.site.config['REDIRECTIONS'],
'output_folder': self.site.config['OUTPUT_FOLDER'],
'filters': self.site.config['FILTERS'],
'index_file': self.site.config['INDEX_FILE'],
}
yield self.group_task()
if kw['redirections']:
for src, dst in kw["redirections"]:
src_path = os.path.join(kw["output_folder"], src.lstrip('/'))
if src_path.endswith("/"):
src_path += kw['index_file']
yield utils.apply_filters({
'basename': self.name,
'name': src_path,
'targets': [src_path],
'actions': [(utils.create_redirect, (src_path, dst))],
'clean': True,
'uptodate': [utils.config_changed(kw, 'nikola.plugins.task.redirect')],
}, kw["filters"])
|
from __future__ import print_function
try:
import asyncio
except ImportError:
import trollius as asyncio
from datetime import datetime
import sys
import weakref
import urwid
from urwid.raw_display import Screen
from urwid.display_common import BaseScreen
import logging
logging.basicConfig()
loop = asyncio.get_event_loop()
# -----------------------------------------------------------------------------
# General-purpose setup code
def build_widgets():
input1 = urwid.Edit('What is your name? ')
input2 = urwid.Edit('What is your quest? ')
input3 = urwid.Edit('What is the capital of Assyria? ')
inputs = [input1, input2, input3]
def update_clock(widget_ref):
widget = widget_ref()
if not widget:
# widget is dead; the main loop must've been destroyed
return
widget.set_text(datetime.now().isoformat())
# Schedule us to update the clock again in one second
loop.call_later(1, update_clock, widget_ref)
clock = urwid.Text('')
update_clock(weakref.ref(clock))
return urwid.Filler(urwid.Pile([clock] + inputs), 'top')
def unhandled(key):
if key == 'ctrl c':
raise urwid.ExitMainLoop
# -----------------------------------------------------------------------------
# Demo 1
def demo1():
"""Plain old urwid app. Just happens to be run atop asyncio as the event
loop.
Note that the clock is updated using the asyncio loop directly, not via any
of urwid's facilities.
"""
main_widget = build_widgets()
urwid_loop = urwid.MainLoop(
main_widget,
event_loop=urwid.AsyncioEventLoop(loop=loop),
unhandled_input=unhandled,
)
urwid_loop.run()
# -----------------------------------------------------------------------------
# Demo 2
class AsyncScreen(Screen):
"""An urwid screen that speaks to an asyncio stream, rather than polling
file descriptors.
This is fairly limited; it can't, for example, determine the size of the
remote screen. Fixing that depends on the nature of the stream.
"""
def __init__(self, reader, writer, encoding="utf-8"):
self.reader = reader
self.writer = writer
self.encoding = encoding
Screen.__init__(self, None, None)
_pending_task = None
def write(self, data):
self.writer.write(data.encode(self.encoding))
def flush(self):
pass
def hook_event_loop(self, event_loop, callback):
# Wait on the reader's read coro, and when there's data to read, call
# the callback and then wait again
def pump_reader(fut=None):
if fut is None:
# First call, do nothing
pass
elif fut.cancelled():
# This is in response to an earlier .read() call, so don't
# schedule another one!
return
elif fut.exception():
pass
else:
try:
self.parse_input(
event_loop, callback, bytearray(fut.result()))
except urwid.ExitMainLoop:
# This will immediately close the transport and thus the
# connection, which in turn calls connection_lost, which
# stops the screen and the loop
self.writer.abort()
# create_task() schedules a coroutine without using `yield from` or
# `await`, which are syntax errors in Pythons before 3.5
self._pending_task = event_loop._loop.create_task(
self.reader.read(1024))
self._pending_task.add_done_callback(pump_reader)
pump_reader()
def unhook_event_loop(self, event_loop):
if self._pending_task:
self._pending_task.cancel()
del self._pending_task
class UrwidProtocol(asyncio.Protocol):
def connection_made(self, transport):
print("Got a client!")
self.transport = transport
# StreamReader is super convenient here; it has a regular method on our
# end (feed_data), and a coroutine on the other end that will
# faux-block until there's data to be read. We could also just call a
# method directly on the screen, but this keeps the screen somewhat
# separate from the protocol.
self.reader = asyncio.StreamReader(loop=loop)
screen = AsyncScreen(self.reader, transport)
main_widget = build_widgets()
self.urwid_loop = urwid.MainLoop(
main_widget,
event_loop=urwid.AsyncioEventLoop(loop=loop),
screen=screen,
unhandled_input=unhandled,
)
self.urwid_loop.start()
def data_received(self, data):
self.reader.feed_data(data)
def connection_lost(self, exc):
print("Lost a client...")
self.reader.feed_eof()
self.urwid_loop.stop()
def demo2():
"""Urwid app served over the network to multiple clients at once, using an
asyncio Protocol.
"""
coro = loop.create_server(UrwidProtocol, port=12345)
loop.run_until_complete(coro)
print("OK, good to go! Try this in another terminal (or two):")
print()
print(" socat TCP:127.0.0.1:12345 STDIN,rawer")
print()
loop.run_forever()
if __name__ == '__main__':
if len(sys.argv) == 2:
which = sys.argv[1]
else:
which = None
if which == '1':
demo1()
elif which == '2':
demo2()
else:
print("Please run me with an argument of either 1 or 2.")
sys.exit(1)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import unittest
from absl import flags
import mock
from perfkitbenchmarker import errors
from perfkitbenchmarker import os_types
from perfkitbenchmarker import providers
from perfkitbenchmarker import static_virtual_machine
from perfkitbenchmarker import virtual_machine
from perfkitbenchmarker.configs import benchmark_config_spec
from perfkitbenchmarker.configs import option_decoders
from perfkitbenchmarker.providers.aws import aws_disk
from perfkitbenchmarker.providers.gcp import gce_virtual_machine
from tests import pkb_common_test_case
from six.moves import range
FLAGS = flags.FLAGS
_COMPONENT = 'test_component'
_OPTION = 'test_option'
_GCP_ONLY_VM_CONFIG = {'GCP': {'machine_type': 'n1-standard-1'}}
_GCP_AWS_VM_CONFIG = {'GCP': {'machine_type': 'n1-standard-1'},
'AWS': {'machine_type': 'm4.large'}}
_GCP_AWS_DISK_CONFIG = {'GCP': {}, 'AWS': {}}
def _GetFlagDict(flag_values):
return {name: flag_values[name] for name in flag_values}
class PerCloudConfigSpecTestCase(pkb_common_test_case.PkbCommonTestCase):
def setUp(self):
super(PerCloudConfigSpecTestCase, self).setUp()
self._spec_class = option_decoders._PerCloudConfigSpec
def testDefaults(self):
spec = self._spec_class(_COMPONENT)
for cloud in providers.VALID_CLOUDS:
self.assertIsNone(getattr(spec, cloud))
def testDict(self):
spec = self._spec_class(_COMPONENT, GCP={})
self.assertEqual(spec.GCP, {})
for cloud in frozenset(providers.VALID_CLOUDS).difference([providers.GCP]):
self.assertIsNone(getattr(spec, cloud))
def testNonDict(self):
with self.assertRaises(errors.Config.InvalidValue) as cm:
self._spec_class(_COMPONENT, GCP=[])
self.assertEqual(str(cm.exception), (
'Invalid test_component.GCP value: "[]" (of type "list"). Value must '
'be one of the following types: dict.'))
def testUnrecognizedCloud(self):
with self.assertRaises(errors.Config.UnrecognizedOption) as cm:
self._spec_class(_COMPONENT, fake_provider={})
self.assertEqual(str(cm.exception), (
'Unrecognized options were found in test_component: fake_provider.'))
class PerCloudConfigDecoderTestCase(pkb_common_test_case.PkbCommonTestCase):
def setUp(self):
super(PerCloudConfigDecoderTestCase, self).setUp()
self._decoder = option_decoders.PerCloudConfigDecoder(option=_OPTION)
def testRejectNone(self):
with self.assertRaises(errors.Config.InvalidValue):
self._decoder.Decode(None, _COMPONENT, {})
def testAcceptNone(self):
decoder = option_decoders.PerCloudConfigDecoder(none_ok=True,
option=_OPTION)
self.assertIsNone(decoder.Decode(None, _COMPONENT, {}))
def testEmptyDict(self):
result = self._decoder.Decode({}, _COMPONENT, {})
self.assertIsInstance(result, option_decoders._PerCloudConfigSpec)
self.assertEqual(result.__dict__, {
cloud: None for cloud in providers.VALID_CLOUDS})
def testNonEmptyDict(self):
result = self._decoder.Decode(_GCP_ONLY_VM_CONFIG, _COMPONENT, {})
self.assertIsInstance(result, option_decoders._PerCloudConfigSpec)
expected_attributes = {cloud: None for cloud in providers.VALID_CLOUDS}
expected_attributes['GCP'] = {'machine_type': 'n1-standard-1'}
self.assertEqual(result.__dict__, expected_attributes)
class StaticVmDecoderTestCase(pkb_common_test_case.PkbCommonTestCase):
def setUp(self):
super(StaticVmDecoderTestCase, self).setUp()
self._decoder = benchmark_config_spec._StaticVmDecoder()
def testNone(self):
with self.assertRaises(errors.Config.InvalidValue):
self._decoder.Decode(None, _COMPONENT, {})
def testValidInput(self):
result = self._decoder.Decode({'ssh_port': 111}, _COMPONENT, {})
self.assertIsInstance(result, static_virtual_machine.StaticVmSpec)
self.assertEqual(result.ssh_port, 111)
def testVmSpecFlag(self):
FLAGS.install_packages = False
FLAGS['install_packages'].present = True
result = self._decoder.Decode({}, _COMPONENT, FLAGS)
self.assertFalse(result.install_packages)
def testDiskSpecFlag(self):
FLAGS.scratch_dir = '/path/from/flag'
FLAGS['scratch_dir'].present = True
result = self._decoder.Decode({
'disk_specs': [{
'mount_point': '/path/from/spec'
}]
}, _COMPONENT, FLAGS)
self.assertEqual(result.disk_specs[0].mount_point, '/path/from/flag')
class StaticVmListDecoderTestCase(pkb_common_test_case.PkbCommonTestCase):
def setUp(self):
super(StaticVmListDecoderTestCase, self).setUp()
self._decoder = benchmark_config_spec._StaticVmListDecoder()
def testNone(self):
with self.assertRaises(errors.Config.InvalidValue):
self._decoder.Decode(None, _COMPONENT, {})
def testValidList(self):
input_list = [{'ssh_port': i} for i in range(3)]
result = self._decoder.Decode(input_list, _COMPONENT, {})
self.assertIsInstance(result, list)
self.assertEqual([vm_spec.ssh_port for vm_spec in result], list(range(3)))
def testInvalidList(self):
input_list = [{'ssh_port': 0}, {'ssh_port': 1}, {'ssh_pory': 2}]
with self.assertRaises(errors.Config.UnrecognizedOption) as cm:
self._decoder.Decode(input_list, _COMPONENT, {})
self.assertEqual(str(cm.exception), (
'Unrecognized options were found in test_component[2]: ssh_pory.'))
class VmGroupSpecTestCase(pkb_common_test_case.PkbCommonTestCase):
def setUp(self):
super(VmGroupSpecTestCase, self).setUp()
self._spec_class = benchmark_config_spec._VmGroupSpec
self._kwargs = {'cloud': providers.GCP, 'os_type': os_types.UBUNTU1804,
'vm_spec': _GCP_AWS_VM_CONFIG}
def testMissingValues(self):
with self.assertRaises(errors.Config.MissingOption) as cm:
self._spec_class(_COMPONENT)
self.assertEqual(str(cm.exception), (
'Required options were missing from test_component: cloud, os_type, '
'vm_spec.'))
def testDefaults(self):
result = self._spec_class(_COMPONENT, **self._kwargs)
self.assertIsInstance(result, benchmark_config_spec._VmGroupSpec)
self.assertEqual(result.cloud, 'GCP')
self.assertEqual(result.disk_count, 1)
self.assertIsNone(result.disk_spec)
self.assertEqual(result.os_type, 'ubuntu1804')
self.assertEqual(result.static_vms, [])
self.assertEqual(result.vm_count, 1)
self.assertIsInstance(result.vm_spec, gce_virtual_machine.GceVmSpec)
def testInvalidCloud(self):
self._kwargs['cloud'] = 'fake_provider'
with self.assertRaises(errors.Config.InvalidValue) as cm:
self._spec_class(_COMPONENT, **self._kwargs)
self.assertEqual(str(cm.exception), (
'Invalid test_component.cloud value: "fake_provider". Value must be '
'one of the following: {0}.'.format(', '.join(providers.VALID_CLOUDS))))
def testInvalidDiskCount(self):
self._kwargs['disk_count'] = -1
with self.assertRaises(errors.Config.InvalidValue) as cm:
self._spec_class(_COMPONENT, **self._kwargs)
self.assertEqual(str(cm.exception), (
'Invalid test_component.disk_count value: "-1". '
'Value must be at least 0.'))
def testInvalidDiskSpec(self):
self._kwargs['disk_spec'] = {'GCP': None}
with self.assertRaises(errors.Config.InvalidValue) as cm:
self._spec_class(_COMPONENT, **self._kwargs)
self.assertEqual(str(cm.exception), (
'Invalid test_component.disk_spec.GCP value: "None" (of type '
'"NoneType"). Value must be one of the following types: dict.'))
def testInvalidOsType(self):
self._kwargs['os_type'] = 'fake_os_type'
with self.assertRaises(errors.Config.InvalidValue) as cm:
self._spec_class(_COMPONENT, **self._kwargs)
self.assertEqual(str(cm.exception), (
'Invalid test_component.os_type value: "fake_os_type". Value must be '
'one of the following: {0}.'.format(', '.join(os_types.ALL))))
def testInvalidStaticVms(self):
self._kwargs['static_vms'] = [{'fake_option': None}]
with self.assertRaises(errors.Config.UnrecognizedOption) as cm:
self._spec_class(_COMPONENT, **self._kwargs)
self.assertEqual(str(cm.exception), (
'Unrecognized options were found in test_component.static_vms[0]: '
'fake_option.'))
def testInvalidVmCount(self):
self._kwargs['vm_count'] = None
with self.assertRaises(errors.Config.InvalidValue) as cm:
self._spec_class(_COMPONENT, **self._kwargs)
self.assertEqual(str(cm.exception), (
'Invalid test_component.vm_count value: "None" (of type "NoneType"). '
'Value must be one of the following types: int.'))
self._kwargs['vm_count'] = -1
with self.assertRaises(errors.Config.InvalidValue) as cm:
self._spec_class(_COMPONENT, **self._kwargs)
self.assertEqual(str(cm.exception), (
'Invalid test_component.vm_count value: "-1". '
'Value must be at least 0.'))
def testInvalidVmSpec(self):
self._kwargs['vm_spec'] = {'GCP': None}
with self.assertRaises(errors.Config.InvalidValue) as cm:
self._spec_class(_COMPONENT, **self._kwargs)
self.assertEqual(str(cm.exception), (
'Invalid test_component.vm_spec.GCP value: "None" (of type '
'"NoneType"). Value must be one of the following types: dict.'))
def testValidInput(self):
result = self._spec_class(
_COMPONENT, cloud=providers.AWS, disk_count=0,
disk_spec=_GCP_AWS_DISK_CONFIG, os_type=os_types.AMAZONLINUX2,
static_vms=[{}], vm_count=0, vm_spec=_GCP_AWS_VM_CONFIG)
self.assertIsInstance(result, benchmark_config_spec._VmGroupSpec)
self.assertEqual(result.cloud, 'AWS')
self.assertEqual(result.disk_count, 0)
self.assertIsInstance(result.disk_spec, aws_disk.AwsDiskSpec)
self.assertEqual(result.os_type, 'amazonlinux2')
self.assertIsInstance(result.static_vms, list)
self.assertEqual(len(result.static_vms), 1)
self.assertIsInstance(result.static_vms[0],
static_virtual_machine.StaticVmSpec)
self.assertEqual(result.vm_count, 0)
self.assertIsInstance(result.vm_spec, virtual_machine.BaseVmSpec)
def testMissingCloudDiskConfig(self):
with self.assertRaises(errors.Config.MissingOption) as cm:
self._spec_class(
_COMPONENT,
cloud=providers.GCP,
os_type=os_types.UBUNTU1804,
disk_spec={},
vm_spec=_GCP_AWS_VM_CONFIG)
self.assertEqual(
str(cm.exception),
('test_component.cloud is "GCP", but test_component.disk_spec does not '
'contain a configuration for "GCP".'))
def testMissingCloudVmConfig(self):
with self.assertRaises(errors.Config.MissingOption) as cm:
self._spec_class(
_COMPONENT,
cloud=providers.GCP,
os_type=os_types.UBUNTU1804,
vm_spec={})
self.assertEqual(
str(cm.exception),
('test_component.cloud is "GCP", but test_component.vm_spec does not '
'contain a configuration for "GCP".'))
def createNonPresentFlags(self):
FLAGS.cloud = providers.AWS
FLAGS.num_vms = 3
FLAGS.os_type = os_types.WINDOWS2019_CORE
def createPresentFlags(self):
self.createNonPresentFlags()
FLAGS['cloud'].present = True
FLAGS['num_vms'].present = True
FLAGS['os_type'].present = True
def testPresentFlagsAndPresentConfigValues(self):
self.createPresentFlags()
result = self._spec_class(
_COMPONENT, flag_values=FLAGS, vm_count=2, **self._kwargs)
self.assertEqual(result.cloud, 'AWS')
self.assertEqual(result.os_type, 'windows2019_core')
self.assertEqual(result.vm_count, 2)
def testPresentFlagsAndNonPresentConfigValues(self):
self.createPresentFlags()
result = self._spec_class(
_COMPONENT, flag_values=FLAGS, vm_spec=_GCP_AWS_VM_CONFIG)
self.assertEqual(result.cloud, 'AWS')
self.assertEqual(result.os_type, 'windows2019_core')
self.assertEqual(result.vm_count, 1)
def testNonPresentFlagsAndPresentConfigValues(self):
self.createNonPresentFlags()
result = self._spec_class(
_COMPONENT, flag_values=self.createNonPresentFlags(), vm_count=2,
**self._kwargs)
self.assertEqual(result.cloud, 'GCP')
self.assertEqual(result.os_type, 'ubuntu1804')
self.assertEqual(result.vm_count, 2)
def testVmCountNone(self):
self.createNonPresentFlags()
result = self._spec_class(
_COMPONENT, vm_count=None, flag_values=FLAGS, **self._kwargs)
self.assertEqual(result.vm_count, 3)
def testCallsLoadProviderAndChecksRequirements(self):
self.createNonPresentFlags()
FLAGS.ignore_package_requirements = False
with mock.patch(providers.__name__ + '.LoadProvider'):
self._spec_class(_COMPONENT, flag_values=FLAGS, **self._kwargs)
providers.LoadProvider.assert_called_once_with('GCP', False)
def testCallsLoadProviderAndIgnoresRequirements(self):
self.createNonPresentFlags()
FLAGS.ignore_package_requirements = True
with mock.patch(providers.__name__ + '.LoadProvider'):
self._spec_class(_COMPONENT, flag_values=FLAGS, **self._kwargs)
providers.LoadProvider.assert_called_once_with('GCP', True)
class VmGroupsDecoderTestCase(pkb_common_test_case.PkbCommonTestCase):
def setUp(self):
super(VmGroupsDecoderTestCase, self).setUp()
self._decoder = benchmark_config_spec._VmGroupsDecoder()
def testNone(self):
with self.assertRaises(errors.Config.InvalidValue):
self._decoder.Decode(None, _COMPONENT, {})
def testValidInput(self):
result = self._decoder.Decode({
'default': {'cloud': providers.GCP, 'os_type': os_types.UBUNTU1804,
'vm_spec': _GCP_AWS_VM_CONFIG}}, _COMPONENT, {})
self.assertIsInstance(result, dict)
self.assertEqual(len(result), 1)
self.assertIsInstance(result['default'], benchmark_config_spec._VmGroupSpec)
self.assertEqual(result['default'].cloud, 'GCP')
self.assertEqual(result['default'].os_type, 'ubuntu1804')
self.assertIsInstance(result['default'].vm_spec,
gce_virtual_machine.GceVmSpec)
def testInvalidInput(self):
with self.assertRaises(errors.Config.UnrecognizedOption) as cm:
self._decoder.Decode(
{'default': {'cloud': providers.GCP, 'os_type': os_types.UBUNTU1804,
'static_vms': [{}, {'fake_option': 1.2}],
'vm_spec': _GCP_AWS_VM_CONFIG}},
_COMPONENT, {})
self.assertEqual(str(cm.exception), (
'Unrecognized options were found in '
'test_component.default.static_vms[1]: fake_option.'))
class CloudRedisDecoderTestCase(pkb_common_test_case.PkbCommonTestCase):
def setUp(self):
super(CloudRedisDecoderTestCase, self).setUp()
self._decoder = benchmark_config_spec._CloudRedisDecoder()
FLAGS.cloud = providers.GCP
FLAGS.run_uri = 'test'
def testNone(self):
with self.assertRaises(errors.Config.InvalidValue):
self._decoder.Decode(None, _COMPONENT, {})
def testValidInput(self):
result = self._decoder.Decode({
'redis_version': 'redis_3_2'
}, _COMPONENT, FLAGS)
self.assertIsInstance(result, benchmark_config_spec._CloudRedisSpec)
self.assertEqual(result.redis_version, 'redis_3_2')
def testInvalidInput(self):
with self.assertRaises(errors.Config.UnrecognizedOption) as cm:
self._decoder.Decode({'foo': 'bar'}, _COMPONENT, FLAGS)
self.assertEqual(str(cm.exception), (
'Unrecognized options were found in '
'test_component: foo.'))
class CloudRedisSpecTestCase(pkb_common_test_case.PkbCommonTestCase):
def setUp(self):
super(CloudRedisSpecTestCase, self).setUp()
self._spec_class = benchmark_config_spec._CloudRedisSpec
def testMissingValues(self):
with self.assertRaises(errors.Config.MissingOption) as cm:
self._spec_class(_COMPONENT)
self.assertEqual(str(cm.exception), (
'Required options were missing from test_component: cloud.'))
def testDefaults(self):
result = self._spec_class(_COMPONENT, flag_values=FLAGS)
self.assertIsInstance(result, benchmark_config_spec._CloudRedisSpec)
self.assertEqual(result.redis_version, 'redis_3_2')
class BenchmarkConfigSpecTestCase(pkb_common_test_case.PkbCommonTestCase):
def setUp(self):
super(BenchmarkConfigSpecTestCase, self).setUp()
self._spec_class = benchmark_config_spec.BenchmarkConfigSpec
self._description = 'Test description.'
self._vm_groups = {'default': {'cloud': providers.GCP,
'os_type': os_types.UBUNTU1804,
'vm_spec': _GCP_AWS_VM_CONFIG}}
self._kwargs = {'description': self._description,
'vm_groups': self._vm_groups}
def testValidInput(self):
result = self._spec_class(_COMPONENT, flag_values=FLAGS, **self._kwargs)
self.assertIsInstance(result, benchmark_config_spec.BenchmarkConfigSpec)
self.assertEqual(result.description, 'Test description.')
self.assertIsNot(result.flags, _GetFlagDict(flags.FLAGS))
self.assertIsInstance(result.vm_groups, dict)
self.assertEqual(len(result.vm_groups), 1)
self.assertIsInstance(result.vm_groups['default'],
benchmark_config_spec._VmGroupSpec)
self.assertEqual(result.vm_groups['default'].cloud, 'GCP')
self.assertEqual(result.vm_groups['default'].os_type, 'ubuntu1804')
self.assertIsInstance(result.vm_groups['default'].vm_spec,
gce_virtual_machine.GceVmSpec)
def testInvalidVmGroups(self):
self._kwargs['vm_groups']['default']['static_vms'] = [{'disk_specs': [{
'disk_size': 0.5}]}]
with self.assertRaises(errors.Config.InvalidValue) as cm:
self._spec_class(_COMPONENT, flag_values=FLAGS, **self._kwargs)
self.assertEqual(str(cm.exception), (
'Invalid test_component.vm_groups.default.static_vms[0].disk_specs[0]'
'.disk_size value: "0.5" (of type "float"). Value must be one of the '
'following types: NoneType, int.'))
def testMismatchedOsTypes(self):
self._kwargs['vm_groups'] = {
os_type + '_group': {'os_type': os_type, 'vm_spec': _GCP_AWS_VM_CONFIG}
for os_type in (os_types.UBUNTU1804, os_types.RHEL8,
os_types.WINDOWS2019_CORE)}
expected_os_types = os_types.JUJU, os_types.WINDOWS2019_CORE
with self.assertRaises(errors.Config.InvalidValue) as cm:
self._spec_class(
_COMPONENT,
expected_os_types=expected_os_types,
flag_values=FLAGS,
**self._kwargs)
self.assertEqual(str(cm.exception), (
"VM groups in test_component may only have the following OS types: "
"'juju', 'windows2019_core'. The following VM group options are "
"invalid:{sep}"
"test_component.vm_groups['rhel8_group'].os_type: 'rhel8'{sep}"
"test_component.vm_groups['ubuntu1804_group'].os_type: 'ubuntu1804'"
.format(sep=os.linesep)))
def testFlagOverridesPropagate(self):
self._kwargs['flags'] = {'cloud': providers.AWS,
'ignore_package_requirements': True}
result = self._spec_class(_COMPONENT, flag_values=FLAGS, **self._kwargs)
self.assertIsInstance(result, benchmark_config_spec.BenchmarkConfigSpec)
self.assertEqual(result.description, 'Test description.')
self.assertIsInstance(result.flags, dict)
self.assertIsNot(result.flags, _GetFlagDict(flags.FLAGS))
self.assertEqual(result.flags['cloud'], 'AWS')
self.assertEqual(FLAGS['cloud'].value, 'GCP')
self.assertIsInstance(result.vm_groups, dict)
self.assertEqual(len(result.vm_groups), 1)
self.assertIsInstance(result.vm_groups['default'],
benchmark_config_spec._VmGroupSpec)
self.assertEqual(result.vm_groups['default'].cloud, 'AWS')
self.assertEqual(result.vm_groups['default'].os_type, 'ubuntu1804')
self.assertIsInstance(result.vm_groups['default'].vm_spec,
virtual_machine.BaseVmSpec)
if __name__ == '__main__':
unittest.main()
|
import numpy as np
import xarray as xr
from . import requires_dask
class Unstacking:
def setup(self):
data = np.random.RandomState(0).randn(1, 1000, 500)
self.ds = xr.DataArray(data).stack(flat_dim=["dim_1", "dim_2"])
def time_unstack_fast(self):
self.ds.unstack("flat_dim")
def time_unstack_slow(self):
self.ds[:, ::-1].unstack("flat_dim")
class UnstackingDask(Unstacking):
def setup(self, *args, **kwargs):
requires_dask()
super().setup(**kwargs)
self.ds = self.ds.chunk({"flat_dim": 50})
|
from pylutron import OccupancyGroup
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_OCCUPANCY,
BinarySensorEntity,
)
from . import LUTRON_CONTROLLER, LUTRON_DEVICES, LutronDevice
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Lutron occupancy sensors."""
if discovery_info is None:
return
devs = []
for (area_name, device) in hass.data[LUTRON_DEVICES]["binary_sensor"]:
dev = LutronOccupancySensor(area_name, device, hass.data[LUTRON_CONTROLLER])
devs.append(dev)
add_entities(devs)
class LutronOccupancySensor(LutronDevice, BinarySensorEntity):
"""Representation of a Lutron Occupancy Group.
The Lutron integration API reports "occupancy groups" rather than
individual sensors. If two sensors are in the same room, they're
reported as a single occupancy group.
"""
@property
def is_on(self):
"""Return true if the binary sensor is on."""
# Error cases will end up treated as unoccupied.
return self._lutron_device.state == OccupancyGroup.State.OCCUPIED
@property
def device_class(self):
"""Return that this is an occupancy sensor."""
return DEVICE_CLASS_OCCUPANCY
@property
def name(self):
"""Return the name of the device."""
# The default LutronDevice naming would create 'Kitchen Occ Kitchen',
# but since there can only be one OccupancyGroup per area we go
# with something shorter.
return f"{self._area_name} Occupancy"
@property
def device_state_attributes(self):
"""Return the state attributes."""
return {"lutron_integration_id": self._lutron_device.id}
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import flags
from perfkitbenchmarker import configs
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.windows_packages import nuttcp
from six.moves import range
FLAGS = flags.FLAGS
BENCHMARK_NAME = 'nuttcp'
BENCHMARK_CONFIG = """
nuttcp:
description: Run nuttcp between two VMs.
vm_groups:
vm_1:
vm_spec: *default_single_core
vm_2:
vm_spec: *default_single_core
"""
def GetConfig(user_config):
return configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
def Prepare(benchmark_spec):
for vm in benchmark_spec.vms:
vm.Install('nuttcp')
vm.AllowPort(nuttcp.CONTROL_PORT)
vm.AllowPort(nuttcp.UDP_PORT)
def RunNuttcp(vms, exec_path):
"""Run nuttcps tests.
Args:
vms: list of vms to run the tests.
exec_path: path to the nuttcp executable.
Returns:
list of samples
"""
results = []
def _RunNuttcpTest(sending_vm, receiving_vm, iteration):
if vm_util.ShouldRunOnExternalIpAddress():
results.extend(
nuttcp.RunNuttcp(sending_vm, receiving_vm, exec_path,
receiving_vm.ip_address, 'external', iteration))
if vm_util.ShouldRunOnInternalIpAddress(sending_vm, receiving_vm):
results.extend(
nuttcp.RunNuttcp(sending_vm, receiving_vm, exec_path,
receiving_vm.internal_ip, 'internal', iteration))
# run in both directions just for completeness
for iteration in range(FLAGS.nuttcp_udp_iterations):
_RunNuttcpTest(vms[0], vms[1], iteration)
if FLAGS.nuttcp_udp_run_both_directions:
_RunNuttcpTest(vms[1], vms[0], iteration)
return results
def Run(benchmark_spec):
vms = benchmark_spec.vms
exec_path = nuttcp.GetExecPath()
return RunNuttcp(vms, exec_path)
def Cleanup(unused_benchmark_spec):
pass
|
import asyncio
import json
from homeassistant import config_entries, setup
from homeassistant.components.hunterdouglas_powerview.const import DOMAIN
from tests.async_mock import AsyncMock, MagicMock, patch
from tests.common import MockConfigEntry, load_fixture
def _get_mock_powerview_userdata(userdata=None, get_resources=None):
mock_powerview_userdata = MagicMock()
if not userdata:
userdata = json.loads(load_fixture("hunterdouglas_powerview/userdata.json"))
if get_resources:
type(mock_powerview_userdata).get_resources = AsyncMock(
side_effect=get_resources
)
else:
type(mock_powerview_userdata).get_resources = AsyncMock(return_value=userdata)
return mock_powerview_userdata
async def test_user_form(hass):
"""Test we get the user form."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
mock_powerview_userdata = _get_mock_powerview_userdata()
with patch(
"homeassistant.components.hunterdouglas_powerview.UserData",
return_value=mock_powerview_userdata,
), patch(
"homeassistant.components.hunterdouglas_powerview.async_setup",
return_value=True,
) as mock_setup, patch(
"homeassistant.components.hunterdouglas_powerview.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": "1.2.3.4"},
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "AlexanderHD"
assert result2["data"] == {
"host": "1.2.3.4",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
result3 = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result3["type"] == "form"
assert result3["errors"] == {}
result4 = await hass.config_entries.flow.async_configure(
result3["flow_id"],
{"host": "1.2.3.4"},
)
assert result4["type"] == "abort"
async def test_form_import(hass):
"""Test we get the form with import source."""
await setup.async_setup_component(hass, "persistent_notification", {})
mock_powerview_userdata = _get_mock_powerview_userdata()
with patch(
"homeassistant.components.hunterdouglas_powerview.UserData",
return_value=mock_powerview_userdata,
), patch(
"homeassistant.components.hunterdouglas_powerview.async_setup",
return_value=True,
) as mock_setup, patch(
"homeassistant.components.hunterdouglas_powerview.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={"host": "1.2.3.4"},
)
await hass.async_block_till_done()
assert result["type"] == "create_entry"
assert result["title"] == "AlexanderHD"
assert result["data"] == {
"host": "1.2.3.4",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_homekit(hass):
"""Test we get the form with homekit source."""
await setup.async_setup_component(hass, "persistent_notification", {})
ignored_config_entry = MockConfigEntry(domain=DOMAIN, data={}, source="ignore")
ignored_config_entry.add_to_hass(hass)
mock_powerview_userdata = _get_mock_powerview_userdata()
with patch(
"homeassistant.components.hunterdouglas_powerview.UserData",
return_value=mock_powerview_userdata,
):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": "homekit"},
data={
"host": "1.2.3.4",
"properties": {"id": "AA::BB::CC::DD::EE::FF"},
"name": "PowerViewHub._hap._tcp.local.",
},
)
assert result["type"] == "form"
assert result["step_id"] == "link"
assert result["errors"] is None
assert result["description_placeholders"] == {
"host": "1.2.3.4",
"name": "PowerViewHub",
}
with patch(
"homeassistant.components.hunterdouglas_powerview.UserData",
return_value=mock_powerview_userdata,
), patch(
"homeassistant.components.hunterdouglas_powerview.async_setup",
return_value=True,
) as mock_setup, patch(
"homeassistant.components.hunterdouglas_powerview.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(result["flow_id"], {})
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "PowerViewHub"
assert result2["data"] == {"host": "1.2.3.4"}
assert result2["result"].unique_id == "ABC123"
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
result3 = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": "homekit"},
data={
"host": "1.2.3.4",
"properties": {"id": "AA::BB::CC::DD::EE::FF"},
"name": "PowerViewHub._hap._tcp.local.",
},
)
assert result3["type"] == "abort"
async def test_form_cannot_connect(hass):
"""Test we handle cannot connect error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
mock_powerview_userdata = _get_mock_powerview_userdata(
get_resources=asyncio.TimeoutError
)
with patch(
"homeassistant.components.hunterdouglas_powerview.UserData",
return_value=mock_powerview_userdata,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": "1.2.3.4"},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "cannot_connect"}
async def test_form_no_data(hass):
"""Test we handle no data being returned from the hub."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
mock_powerview_userdata = _get_mock_powerview_userdata(userdata={"userData": {}})
with patch(
"homeassistant.components.hunterdouglas_powerview.UserData",
return_value=mock_powerview_userdata,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": "1.2.3.4"},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "unknown"}
async def test_form_unknown_exception(hass):
"""Test we handle unknown exception."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
mock_powerview_userdata = _get_mock_powerview_userdata(userdata={"userData": {}})
with patch(
"homeassistant.components.hunterdouglas_powerview.UserData",
return_value=mock_powerview_userdata,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": "1.2.3.4"},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "unknown"}
|
import enum
import attr
from PyQt5.QtCore import (pyqtSignal, pyqtSlot, # type: ignore[attr-defined]
pyqtProperty, Qt, QSize, QTimer)
from PyQt5.QtWidgets import QWidget, QHBoxLayout, QStackedLayout, QSizePolicy
from qutebrowser.browser import browsertab
from qutebrowser.config import config, stylesheet
from qutebrowser.keyinput import modeman
from qutebrowser.utils import usertypes, log, objreg, utils
from qutebrowser.mainwindow.statusbar import (backforward, command, progress,
keystring, percentage, url,
tabindex, textbase)
@attr.s
class ColorFlags:
"""Flags which change the appearance of the statusbar.
Attributes:
prompt: If we're currently in prompt-mode.
insert: If we're currently in insert mode.
command: If we're currently in command mode.
mode: The current caret mode (CaretMode.off/.on/.selection).
private: Whether this window is in private browsing mode.
passthrough: If we're currently in passthrough-mode.
"""
class CaretMode(enum.Enum):
"""The current caret "sub-mode" we're in."""
off = enum.auto()
on = enum.auto()
selection = enum.auto()
prompt = attr.ib(False)
insert = attr.ib(False)
command = attr.ib(False)
caret = attr.ib(CaretMode.off)
private = attr.ib(False)
passthrough = attr.ib(False)
def to_stringlist(self):
"""Get a string list of set flags used in the stylesheet.
This also combines flags in ways they're used in the sheet.
"""
strings = []
if self.prompt:
strings.append('prompt')
if self.insert:
strings.append('insert')
if self.command:
strings.append('command')
if self.private:
strings.append('private')
if self.passthrough:
strings.append('passthrough')
if self.private and self.command:
strings.append('private-command')
if self.caret == self.CaretMode.on:
strings.append('caret')
elif self.caret == self.CaretMode.selection:
strings.append('caret-selection')
else:
assert self.caret == self.CaretMode.off
return strings
def _generate_stylesheet():
flags = [
('private', 'statusbar.private'),
('caret', 'statusbar.caret'),
('caret-selection', 'statusbar.caret.selection'),
('prompt', 'prompts'),
('insert', 'statusbar.insert'),
('command', 'statusbar.command'),
('passthrough', 'statusbar.passthrough'),
('private-command', 'statusbar.command.private'),
]
qss = """
QWidget#StatusBar,
QWidget#StatusBar QLabel,
QWidget#StatusBar QLineEdit {
font: {{ conf.fonts.statusbar }};
color: {{ conf.colors.statusbar.normal.fg }};
}
QWidget#StatusBar {
background-color: {{ conf.colors.statusbar.normal.bg }};
}
"""
for flag, option in flags:
qss += """
QWidget#StatusBar[color_flags~="%s"],
QWidget#StatusBar[color_flags~="%s"] QLabel,
QWidget#StatusBar[color_flags~="%s"] QLineEdit {
color: {{ conf.colors.%s }};
}
QWidget#StatusBar[color_flags~="%s"] {
background-color: {{ conf.colors.%s }};
}
""" % (flag, flag, flag, # noqa: S001
option + '.fg', flag, option + '.bg')
return qss
class StatusBar(QWidget):
"""The statusbar at the bottom of the mainwindow.
Attributes:
txt: The Text widget in the statusbar.
keystring: The KeyString widget in the statusbar.
percentage: The Percentage widget in the statusbar.
url: The UrlText widget in the statusbar.
prog: The Progress widget in the statusbar.
cmd: The Command widget in the statusbar.
_hbox: The main QHBoxLayout.
_stack: The QStackedLayout with cmd/txt widgets.
_win_id: The window ID the statusbar is associated with.
Signals:
resized: Emitted when the statusbar has resized, so the completion
widget can adjust its size to it.
arg: The new size.
moved: Emitted when the statusbar has moved, so the completion widget
can move to the right position.
arg: The new position.
"""
resized = pyqtSignal('QRect')
moved = pyqtSignal('QPoint')
STYLESHEET = _generate_stylesheet()
def __init__(self, *, win_id, private, parent=None):
super().__init__(parent)
self.setObjectName(self.__class__.__name__)
self.setAttribute(Qt.WA_StyledBackground)
stylesheet.set_register(self)
self.setSizePolicy(QSizePolicy.Ignored, QSizePolicy.Fixed)
self._win_id = win_id
self._color_flags = ColorFlags()
self._color_flags.private = private
self._hbox = QHBoxLayout(self)
self._set_hbox_padding()
self._hbox.setSpacing(5)
self._stack = QStackedLayout()
self._hbox.addLayout(self._stack)
self._stack.setContentsMargins(0, 0, 0, 0)
self.cmd = command.Command(private=private, win_id=win_id)
self._stack.addWidget(self.cmd)
objreg.register('status-command', self.cmd, scope='window',
window=win_id)
self.txt = textbase.TextBase()
self._stack.addWidget(self.txt)
self.cmd.show_cmd.connect(self._show_cmd_widget)
self.cmd.hide_cmd.connect(self._hide_cmd_widget)
self._hide_cmd_widget()
self.url = url.UrlText()
self.percentage = percentage.Percentage()
self.backforward = backforward.Backforward()
self.tabindex = tabindex.TabIndex()
self.keystring = keystring.KeyString()
self.prog = progress.Progress(self)
self._draw_widgets()
config.instance.changed.connect(self._on_config_changed)
QTimer.singleShot(0, self.maybe_hide)
def __repr__(self):
return utils.get_repr(self)
@pyqtSlot(str)
def _on_config_changed(self, option):
if option == 'statusbar.show':
self.maybe_hide()
elif option == 'statusbar.padding':
self._set_hbox_padding()
elif option == 'statusbar.widgets':
self._draw_widgets()
def _draw_widgets(self):
"""Draw statusbar widgets."""
# Start with widgets hidden and show them when needed
for widget in [self.url, self.percentage,
self.backforward, self.tabindex,
self.keystring, self.prog]:
assert isinstance(widget, QWidget)
widget.hide()
self._hbox.removeWidget(widget)
tab = self._current_tab()
# Read the list and set widgets accordingly
for segment in config.val.statusbar.widgets:
if segment == 'url':
self._hbox.addWidget(self.url)
self.url.show()
elif segment == 'scroll':
self._hbox.addWidget(self.percentage)
self.percentage.show()
elif segment == 'scroll_raw':
self._hbox.addWidget(self.percentage)
self.percentage.set_raw()
self.percentage.show()
elif segment == 'history':
self._hbox.addWidget(self.backforward)
self.backforward.enabled = True
if tab:
self.backforward.on_tab_changed(tab)
elif segment == 'tabs':
self._hbox.addWidget(self.tabindex)
self.tabindex.show()
elif segment == 'keypress':
self._hbox.addWidget(self.keystring)
self.keystring.show()
elif segment == 'progress':
self._hbox.addWidget(self.prog)
self.prog.enabled = True
if tab:
self.prog.on_tab_changed(tab)
@pyqtSlot()
def maybe_hide(self):
"""Hide the statusbar if it's configured to do so."""
strategy = config.val.statusbar.show
tab = self._current_tab()
if tab is not None and tab.data.fullscreen:
self.hide()
elif strategy == 'never':
self.hide()
elif strategy == 'in-mode':
try:
mode_manager = modeman.instance(self._win_id)
except modeman.UnavailableError:
self.hide()
else:
if mode_manager.mode == usertypes.KeyMode.normal:
self.hide()
else:
self.show()
elif strategy == 'always':
self.show()
else:
raise utils.Unreachable
def _set_hbox_padding(self):
padding = config.val.statusbar.padding
self._hbox.setContentsMargins(padding.left, 0, padding.right, 0)
@pyqtProperty('QStringList')
def color_flags(self):
"""Getter for self.color_flags, so it can be used as Qt property."""
return self._color_flags.to_stringlist()
def _current_tab(self):
"""Get the currently displayed tab."""
window = objreg.get('tabbed-browser', scope='window',
window=self._win_id)
return window.widget.currentWidget()
def set_mode_active(self, mode, val):
"""Setter for self.{insert,command,caret}_active.
Re-set the stylesheet after setting the value, so everything gets
updated by Qt properly.
"""
if mode == usertypes.KeyMode.insert:
log.statusbar.debug("Setting insert flag to {}".format(val))
self._color_flags.insert = val
if mode == usertypes.KeyMode.passthrough:
log.statusbar.debug("Setting passthrough flag to {}".format(val))
self._color_flags.passthrough = val
if mode == usertypes.KeyMode.command:
log.statusbar.debug("Setting command flag to {}".format(val))
self._color_flags.command = val
elif mode in [usertypes.KeyMode.prompt, usertypes.KeyMode.yesno]:
log.statusbar.debug("Setting prompt flag to {}".format(val))
self._color_flags.prompt = val
elif mode == usertypes.KeyMode.caret:
if not val:
# Turning on is handled in on_current_caret_selection_toggled
log.statusbar.debug("Setting caret mode off")
self._color_flags.caret = ColorFlags.CaretMode.off
stylesheet.set_register(self, update=False)
def _set_mode_text(self, mode):
"""Set the mode text."""
if mode == 'passthrough':
key_instance = config.key_instance
all_bindings = key_instance.get_reverse_bindings_for('passthrough')
bindings = all_bindings.get('leave-mode')
if bindings:
suffix = ' ({} to leave)'.format(' or '.join(bindings))
else:
suffix = ''
else:
suffix = ''
text = "-- {} MODE --{}".format(mode.upper(), suffix)
self.txt.setText(text)
def _show_cmd_widget(self):
"""Show command widget instead of temporary text."""
self._stack.setCurrentWidget(self.cmd)
self.show()
def _hide_cmd_widget(self):
"""Show temporary text instead of command widget."""
log.statusbar.debug("Hiding cmd widget")
self._stack.setCurrentWidget(self.txt)
self.maybe_hide()
@pyqtSlot(str)
def set_text(self, text):
"""Set a normal (persistent) text in the status bar."""
log.message.debug(text)
self.txt.setText(text)
@pyqtSlot(usertypes.KeyMode)
def on_mode_entered(self, mode):
"""Mark certain modes in the commandline."""
mode_manager = modeman.instance(self._win_id)
if config.val.statusbar.show == 'in-mode':
self.show()
if mode_manager.parsers[mode].passthrough:
self._set_mode_text(mode.name)
if mode in [usertypes.KeyMode.insert,
usertypes.KeyMode.command,
usertypes.KeyMode.caret,
usertypes.KeyMode.prompt,
usertypes.KeyMode.yesno,
usertypes.KeyMode.passthrough]:
self.set_mode_active(mode, True)
@pyqtSlot(usertypes.KeyMode, usertypes.KeyMode)
def on_mode_left(self, old_mode, new_mode):
"""Clear marked mode."""
mode_manager = modeman.instance(self._win_id)
if config.val.statusbar.show == 'in-mode':
self.hide()
if mode_manager.parsers[old_mode].passthrough:
if mode_manager.parsers[new_mode].passthrough:
self._set_mode_text(new_mode.name)
else:
self.txt.setText('')
if old_mode in [usertypes.KeyMode.insert,
usertypes.KeyMode.command,
usertypes.KeyMode.caret,
usertypes.KeyMode.prompt,
usertypes.KeyMode.yesno,
usertypes.KeyMode.passthrough]:
self.set_mode_active(old_mode, False)
@pyqtSlot(browsertab.AbstractTab)
def on_tab_changed(self, tab):
"""Notify sub-widgets when the tab has been changed."""
self.url.on_tab_changed(tab)
self.prog.on_tab_changed(tab)
self.percentage.on_tab_changed(tab)
self.backforward.on_tab_changed(tab)
self.maybe_hide()
assert tab.is_private == self._color_flags.private
@pyqtSlot(browsertab.SelectionState)
def on_caret_selection_toggled(self, selection_state):
"""Update the statusbar when entering/leaving caret selection mode."""
log.statusbar.debug("Setting caret selection {}"
.format(selection_state))
if selection_state is browsertab.SelectionState.normal:
self._set_mode_text("caret selection")
self._color_flags.caret = ColorFlags.CaretMode.selection
elif selection_state is browsertab.SelectionState.line:
self._set_mode_text("caret line selection")
self._color_flags.caret = ColorFlags.CaretMode.selection
else:
self._set_mode_text("caret")
self._color_flags.caret = ColorFlags.CaretMode.on
stylesheet.set_register(self, update=False)
def resizeEvent(self, e):
"""Extend resizeEvent of QWidget to emit a resized signal afterwards.
Args:
e: The QResizeEvent.
"""
super().resizeEvent(e)
self.resized.emit(self.geometry())
def moveEvent(self, e):
"""Extend moveEvent of QWidget to emit a moved signal afterwards.
Args:
e: The QMoveEvent.
"""
super().moveEvent(e)
self.moved.emit(e.pos())
def minimumSizeHint(self):
"""Set the minimum height to the text height plus some padding."""
padding = config.cache['statusbar.padding']
width = super().minimumSizeHint().width()
height = self.fontMetrics().height() + padding.top + padding.bottom
return QSize(width, height)
|
import asyncio
import contextlib
import functools
from typing import Iterable, List, Union
import discord
from .. import commands
from .predicates import ReactionPredicate
_ReactableEmoji = Union[str, discord.Emoji]
async def menu(
ctx: commands.Context,
pages: Union[List[str], List[discord.Embed]],
controls: dict,
message: discord.Message = None,
page: int = 0,
timeout: float = 30.0,
):
"""
An emoji-based menu
.. note:: All pages should be of the same type
.. note:: All functions for handling what a particular emoji does
should be coroutines (i.e. :code:`async def`). Additionally,
they must take all of the parameters of this function, in
addition to a string representing the emoji reacted with.
This parameter should be the last one, and none of the
parameters in the handling functions are optional
Parameters
----------
ctx: commands.Context
The command context
pages: `list` of `str` or `discord.Embed`
The pages of the menu.
controls: dict
A mapping of emoji to the function which handles the action for the
emoji.
message: discord.Message
The message representing the menu. Usually :code:`None` when first opening
the menu
page: int
The current page number of the menu
timeout: float
The time (in seconds) to wait for a reaction
Raises
------
RuntimeError
If either of the notes above are violated
"""
if not isinstance(pages[0], (discord.Embed, str)):
raise RuntimeError("Pages must be of type discord.Embed or str")
if not all(isinstance(x, discord.Embed) for x in pages) and not all(
isinstance(x, str) for x in pages
):
raise RuntimeError("All pages must be of the same type")
for key, value in controls.items():
maybe_coro = value
if isinstance(value, functools.partial):
maybe_coro = value.func
if not asyncio.iscoroutinefunction(maybe_coro):
raise RuntimeError("Function must be a coroutine")
current_page = pages[page]
if not message:
if isinstance(current_page, discord.Embed):
message = await ctx.send(embed=current_page)
else:
message = await ctx.send(current_page)
# Don't wait for reactions to be added (GH-1797)
# noinspection PyAsyncCall
start_adding_reactions(message, controls.keys())
else:
try:
if isinstance(current_page, discord.Embed):
await message.edit(embed=current_page)
else:
await message.edit(content=current_page)
except discord.NotFound:
return
try:
react, user = await ctx.bot.wait_for(
"reaction_add",
check=ReactionPredicate.with_emojis(tuple(controls.keys()), message, ctx.author),
timeout=timeout,
)
except asyncio.TimeoutError:
if not ctx.me:
return
try:
if message.channel.permissions_for(ctx.me).manage_messages:
await message.clear_reactions()
else:
raise RuntimeError
except (discord.Forbidden, RuntimeError): # cannot remove all reactions
for key in controls.keys():
try:
await message.remove_reaction(key, ctx.bot.user)
except discord.Forbidden:
return
except discord.HTTPException:
pass
except discord.NotFound:
return
else:
return await controls[react.emoji](
ctx, pages, controls, message, page, timeout, react.emoji
)
async def next_page(
ctx: commands.Context,
pages: list,
controls: dict,
message: discord.Message,
page: int,
timeout: float,
emoji: str,
):
perms = message.channel.permissions_for(ctx.me)
if perms.manage_messages: # Can manage messages, so remove react
with contextlib.suppress(discord.NotFound):
await message.remove_reaction(emoji, ctx.author)
if page == len(pages) - 1:
page = 0 # Loop around to the first item
else:
page = page + 1
return await menu(ctx, pages, controls, message=message, page=page, timeout=timeout)
async def prev_page(
ctx: commands.Context,
pages: list,
controls: dict,
message: discord.Message,
page: int,
timeout: float,
emoji: str,
):
perms = message.channel.permissions_for(ctx.me)
if perms.manage_messages: # Can manage messages, so remove react
with contextlib.suppress(discord.NotFound):
await message.remove_reaction(emoji, ctx.author)
if page == 0:
page = len(pages) - 1 # Loop around to the last item
else:
page = page - 1
return await menu(ctx, pages, controls, message=message, page=page, timeout=timeout)
async def close_menu(
ctx: commands.Context,
pages: list,
controls: dict,
message: discord.Message,
page: int,
timeout: float,
emoji: str,
):
with contextlib.suppress(discord.NotFound):
await message.delete()
def start_adding_reactions(
message: discord.Message, emojis: Iterable[_ReactableEmoji]
) -> asyncio.Task:
"""Start adding reactions to a message.
This is a non-blocking operation - calling this will schedule the
reactions being added, but the calling code will continue to
execute asynchronously. There is no need to await this function.
This is particularly useful if you wish to start waiting for a
reaction whilst the reactions are still being added - in fact,
this is exactly what `menu` uses to do that.
Parameters
----------
message: discord.Message
The message to add reactions to.
emojis : Iterable[Union[str, discord.Emoji]]
The emojis to react to the message with.
Returns
-------
asyncio.Task
The task for the coroutine adding the reactions.
"""
async def task():
# The task should exit silently if the message is deleted
with contextlib.suppress(discord.NotFound):
for emoji in emojis:
await message.add_reaction(emoji)
return asyncio.create_task(task())
DEFAULT_CONTROLS = {
"\N{LEFTWARDS BLACK ARROW}\N{VARIATION SELECTOR-16}": prev_page,
"\N{CROSS MARK}": close_menu,
"\N{BLACK RIGHTWARDS ARROW}\N{VARIATION SELECTOR-16}": next_page,
}
|
import unittest
import mock
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.providers.digitalocean import util
class TestDoctlAndParse(unittest.TestCase):
def testCommandSucceeds(self):
with mock.patch(vm_util.__name__ + '.IssueCommand',
return_value=('{"a": 1, "b": 2}', '', 0)):
response, retval = util.DoctlAndParse(['foo', 'bar', 'baz'])
self.assertEqual(response, {'a': 1, 'b': 2})
self.assertEqual(retval, 0)
def testCommandFailsWithNull(self):
with mock.patch(vm_util.__name__ + '.IssueCommand',
return_value=(
'null{"errors": [{"detail": "foo"}]}', '', 1)):
response, retval = util.DoctlAndParse(['foo', 'bar', 'baz'])
self.assertEqual(response, {'errors': [{'detail': 'foo'}]})
self.assertEqual(retval, 1)
def testCommandFailsWithoutNull(self):
with mock.patch(vm_util.__name__ + '.IssueCommand',
return_value=('{"errors": [{"detail": "foo"}]}', '', 1)):
response, retval = util.DoctlAndParse(['foo', 'bar', 'baz'])
self.assertEqual(response, {'errors': [{'detail': 'foo'}]})
self.assertEqual(retval, 1)
def testCommandSucceedsNoOutput(self):
with mock.patch(vm_util.__name__ + '.IssueCommand',
return_value=('', '', 0)):
response, retval = util.DoctlAndParse(['foo', 'bar', 'baz'])
self.assertEqual(response, None)
self.assertEqual(retval, 0)
if __name__ == '__main__':
unittest.main()
|
from datetime import timedelta
import logging
from raincloudy.core import RainCloudy
from requests.exceptions import ConnectTimeout, HTTPError
import voluptuous as vol
from homeassistant.const import (
ATTR_ATTRIBUTION,
CONF_PASSWORD,
CONF_SCAN_INTERVAL,
CONF_USERNAME,
PERCENTAGE,
TIME_DAYS,
TIME_MINUTES,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_connect, dispatcher_send
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.event import track_time_interval
_LOGGER = logging.getLogger(__name__)
ALLOWED_WATERING_TIME = [5, 10, 15, 30, 45, 60]
ATTRIBUTION = "Data provided by Melnor Aquatimer.com"
CONF_WATERING_TIME = "watering_minutes"
NOTIFICATION_ID = "raincloud_notification"
NOTIFICATION_TITLE = "Rain Cloud Setup"
DATA_RAINCLOUD = "raincloud"
DOMAIN = "raincloud"
DEFAULT_WATERING_TIME = 15
KEY_MAP = {
"auto_watering": "Automatic Watering",
"battery": "Battery",
"is_watering": "Watering",
"manual_watering": "Manual Watering",
"next_cycle": "Next Cycle",
"rain_delay": "Rain Delay",
"status": "Status",
"watering_time": "Remaining Watering Time",
}
ICON_MAP = {
"auto_watering": "mdi:autorenew",
"battery": "",
"is_watering": "",
"manual_watering": "mdi:water-pump",
"next_cycle": "mdi:calendar-clock",
"rain_delay": "mdi:weather-rainy",
"status": "",
"watering_time": "mdi:water-pump",
}
UNIT_OF_MEASUREMENT_MAP = {
"auto_watering": "",
"battery": PERCENTAGE,
"is_watering": "",
"manual_watering": "",
"next_cycle": "",
"rain_delay": TIME_DAYS,
"status": "",
"watering_time": TIME_MINUTES,
}
BINARY_SENSORS = ["is_watering", "status"]
SENSORS = ["battery", "next_cycle", "rain_delay", "watering_time"]
SWITCHES = ["auto_watering", "manual_watering"]
SCAN_INTERVAL = timedelta(seconds=20)
SIGNAL_UPDATE_RAINCLOUD = "raincloud_update"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_SCAN_INTERVAL, default=SCAN_INTERVAL): cv.time_period,
}
)
},
extra=vol.ALLOW_EXTRA,
)
def setup(hass, config):
"""Set up the Melnor RainCloud component."""
conf = config[DOMAIN]
username = conf.get(CONF_USERNAME)
password = conf.get(CONF_PASSWORD)
scan_interval = conf.get(CONF_SCAN_INTERVAL)
try:
raincloud = RainCloudy(username=username, password=password)
if not raincloud.is_connected:
raise HTTPError
hass.data[DATA_RAINCLOUD] = RainCloudHub(raincloud)
except (ConnectTimeout, HTTPError) as ex:
_LOGGER.error("Unable to connect to Rain Cloud service: %s", str(ex))
hass.components.persistent_notification.create(
f"Error: {ex}<br />" "You will need to restart hass after fixing.",
title=NOTIFICATION_TITLE,
notification_id=NOTIFICATION_ID,
)
return False
def hub_refresh(event_time):
"""Call Raincloud hub to refresh information."""
_LOGGER.debug("Updating RainCloud Hub component")
hass.data[DATA_RAINCLOUD].data.update()
dispatcher_send(hass, SIGNAL_UPDATE_RAINCLOUD)
# Call the Raincloud API to refresh updates
track_time_interval(hass, hub_refresh, scan_interval)
return True
class RainCloudHub:
"""Representation of a base RainCloud device."""
def __init__(self, data):
"""Initialize the entity."""
self.data = data
class RainCloudEntity(Entity):
"""Entity class for RainCloud devices."""
def __init__(self, data, sensor_type):
"""Initialize the RainCloud entity."""
self.data = data
self._sensor_type = sensor_type
self._name = f"{self.data.name} {KEY_MAP.get(self._sensor_type)}"
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
async def async_added_to_hass(self):
"""Register callbacks."""
self.async_on_remove(
async_dispatcher_connect(
self.hass, SIGNAL_UPDATE_RAINCLOUD, self._update_callback
)
)
def _update_callback(self):
"""Call update method."""
self.schedule_update_ha_state(True)
@property
def unit_of_measurement(self):
"""Return the units of measurement."""
return UNIT_OF_MEASUREMENT_MAP.get(self._sensor_type)
@property
def device_state_attributes(self):
"""Return the state attributes."""
return {ATTR_ATTRIBUTION: ATTRIBUTION, "identifier": self.data.serial}
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return ICON_MAP.get(self._sensor_type)
|
import os
import re
import sys
import time
import unittest
from spinners.spinners import Spinners
from halo import HaloNotebook
from halo._utils import get_terminal_columns, is_supported
from tests._utils import decode_utf_8_text, encode_utf_8_text, find_colors, strip_ansi
from termcolor import COLORS
if sys.version_info.major == 2:
get_coded_text = encode_utf_8_text
else:
get_coded_text = decode_utf_8_text
if is_supported():
frames = [get_coded_text(frame) for frame in Spinners['dots'].value['frames']]
default_spinner = Spinners['dots'].value
else:
frames = [get_coded_text(frame) for frame in Spinners['line'].value['frames']]
default_spinner = Spinners['line'].value
class TestHaloNotebook(unittest.TestCase):
"""Test HaloNotebook enum for attribute values.
"""
TEST_FOLDER = os.path.dirname(os.path.abspath(__file__))
def setUp(self):
"""Set up things before beginning of each test.
"""
pass
def _get_test_output(self, spinner, no_ansi=True):
"""Clean the output from Output widget and return it in list form.
Returns
-------
list
Clean output from Output widget
"""
output = {}
output_text = []
output_colors = []
for line in spinner.output.outputs:
if no_ansi:
clean_line = strip_ansi(line['text'].strip('\r'))
else:
clean_line = line['text'].strip('\r')
if clean_line != '':
output_text.append(get_coded_text(clean_line))
colors_found = find_colors(line['text'].strip('\r'))
if colors_found:
tmp = []
for color in colors_found:
tmp.append(re.sub(r'[^0-9]', '', color, flags=re.I))
output_colors.append(tmp)
output['text'] = output_text
output['colors'] = output_colors
return output
def test_basic_spinner(self):
"""Test the basic of basic spinners.
"""
spinner = HaloNotebook(text='foo', spinner='dots')
spinner.start()
time.sleep(1)
output = self._get_test_output(spinner)['text']
spinner.stop()
self.assertEqual(output[0], '{} foo'.format(frames[0]))
self.assertEqual(output[1], '{} foo'.format(frames[1]))
self.assertEqual(output[2], '{} foo'.format(frames[2]))
self.assertEqual(spinner.output.outputs, spinner._output(''))
def test_text_spinner_color(self):
"""Test basic spinner with available colors color (both spinner and text)
"""
for color, color_int in COLORS.items():
spinner = HaloNotebook(text='foo', text_color=color, color=color, spinner='dots')
spinner.start()
time.sleep(1)
output = self._get_test_output(spinner)['colors']
spinner.stop()
# check if spinner colors match
self.assertEqual(color_int, int(output[0][0]))
self.assertEqual(color_int, int(output[1][0]))
self.assertEqual(color_int, int(output[2][0]))
# check if text colors match
self.assertEqual(color_int, int(output[0][1]))
self.assertEqual(color_int, int(output[1][1]))
self.assertEqual(color_int, int(output[2][1]))
def test_text_stripping(self):
"""Test the text being stripped before output.
"""
spinner = HaloNotebook(text='foo\n', spinner='dots')
spinner.start()
time.sleep(1)
output = self._get_test_output(spinner)['text']
self.assertEqual(output[0], '{} foo'.format(frames[0]))
self.assertEqual(output[1], '{} foo'.format(frames[1]))
self.assertEqual(output[2], '{} foo'.format(frames[2]))
spinner.succeed('foo\n')
output = self._get_test_output(spinner)['text']
pattern = re.compile(r'(✔|v) foo', re.UNICODE)
self.assertRegexpMatches(output[-1], pattern)
def test_text_ellipsing(self):
"""Test the text gets ellipsed if it's too long
"""
text = 'This is a text that it is too long. In fact, it exceeds the eighty column standard ' \
'terminal width, which forces the text frame renderer to add an ellipse at the end of the ' \
'text. ' * 6
spinner = HaloNotebook(text=text, spinner='dots')
spinner.start()
time.sleep(1)
output = self._get_test_output(spinner)['text']
terminal_width = get_terminal_columns()
# -6 of the ' (...)' ellipsis, -2 of the spinner and space
self.assertEqual(output[0], '{} {} (...)'.format(frames[0], text[:terminal_width - 6 - 2]))
self.assertEqual(output[1], '{} {} (...)'.format(frames[1], text[:terminal_width - 6 - 2]))
self.assertEqual(output[2], '{} {} (...)'.format(frames[2], text[:terminal_width - 6 - 2]))
spinner.succeed('End!')
output = self._get_test_output(spinner)['text']
pattern = re.compile(r'(✔|v) End!', re.UNICODE)
self.assertRegexpMatches(output[-1], pattern)
def test_text_animation(self):
"""Test the text gets animated when it is too long
"""
text = 'This is a text that it is too long. In fact, it exceeds the eighty column standard ' \
'terminal width, which forces the text frame renderer to add an ellipse at the end of the ' \
'text. ' * 6
spinner = HaloNotebook(text=text, spinner='dots', animation='marquee')
spinner.start()
time.sleep(1)
output = self._get_test_output(spinner)['text']
terminal_width = get_terminal_columns()
self.assertEqual(output[0], '{} {}'.format(frames[0], text[:terminal_width - 2]))
self.assertEqual(output[1], '{} {}'.format(frames[1], text[1:terminal_width - 1]))
self.assertEqual(output[2], '{} {}'.format(frames[2], text[2:terminal_width]))
spinner.succeed('End!')
output = self._get_test_output(spinner)['text']
pattern = re.compile(r'(✔|v) End!', re.UNICODE)
self.assertRegexpMatches(output[-1], pattern)
def test_context_manager(self):
"""Test the basic of basic spinners used through the with statement.
"""
with HaloNotebook(text='foo', spinner='dots') as spinner:
time.sleep(1)
output = self._get_test_output(spinner)['text']
self.assertEqual(output[0], '{} foo'.format(frames[0]))
self.assertEqual(output[1], '{} foo'.format(frames[1]))
self.assertEqual(output[2], '{} foo'.format(frames[2]))
self.assertEqual(spinner.output.outputs, spinner._output(''))
def test_decorator_spinner(self):
"""Test basic usage of spinners with the decorator syntax."""
@HaloNotebook(text="foo", spinner="dots")
def decorated_function():
time.sleep(1)
spinner = decorated_function.__closure__[1].cell_contents
output = self._get_test_output(spinner)['text']
return output
output = decorated_function()
self.assertEqual(output[0], '{} foo'.format(frames[0]))
self.assertEqual(output[1], '{} foo'.format(frames[1]))
self.assertEqual(output[2], '{} foo'.format(frames[2]))
def test_initial_title_spinner(self):
"""Test Halo with initial title.
"""
spinner = HaloNotebook('bar')
spinner.start()
time.sleep(1)
output = self._get_test_output(spinner)['text']
spinner.stop()
self.assertEqual(output[0], '{} bar'.format(frames[0]))
self.assertEqual(output[1], '{} bar'.format(frames[1]))
self.assertEqual(output[2], '{} bar'.format(frames[2]))
self.assertEqual(spinner.output.outputs, spinner._output(''))
def test_id_not_created_before_start(self):
"""Test Spinner ID not created before start.
"""
spinner = HaloNotebook()
self.assertEqual(spinner.spinner_id, None)
def test_ignore_multiple_start_calls(self):
"""Test ignoring of multiple start calls.
"""
spinner = HaloNotebook()
spinner.start()
spinner_id = spinner.spinner_id
spinner.start()
self.assertEqual(spinner.spinner_id, spinner_id)
spinner.stop()
def test_chaining_start(self):
"""Test chaining start with constructor
"""
spinner = HaloNotebook().start()
spinner_id = spinner.spinner_id
self.assertIsNotNone(spinner_id)
spinner.stop()
def test_succeed(self):
"""Test succeed method
"""
spinner = HaloNotebook()
spinner.start('foo')
spinner.succeed('foo')
output = self._get_test_output(spinner)['text']
pattern = re.compile(r'(✔|v) foo', re.UNICODE)
self.assertRegexpMatches(output[-1], pattern)
spinner.stop()
def test_succeed_with_new_text(self):
"""Test succeed method with new text
"""
spinner = HaloNotebook()
spinner.start('foo')
spinner.succeed('bar')
output = self._get_test_output(spinner)['text']
pattern = re.compile(r'(✔|v) bar', re.UNICODE)
self.assertRegexpMatches(output[-1], pattern)
spinner.stop()
def test_info(self):
"""Test info method
"""
spinner = HaloNotebook()
spinner.start('foo')
spinner.info()
output = self._get_test_output(spinner)['text']
pattern = re.compile(r'(ℹ|¡) foo', re.UNICODE)
self.assertRegexpMatches(output[-1], pattern)
spinner.stop()
def test_fail(self):
"""Test fail method
"""
spinner = HaloNotebook()
spinner.start('foo')
spinner.fail()
output = self._get_test_output(spinner)['text']
pattern = re.compile(r'(✖|×) foo', re.UNICODE)
self.assertRegexpMatches(output[-1], pattern)
spinner.stop()
def test_warning(self):
"""Test warn method
"""
spinner = HaloNotebook()
spinner.start('foo')
spinner.warn('Warning!')
output = self._get_test_output(spinner)['text']
pattern = re.compile(r'(⚠|!!) Warning!', re.UNICODE)
self.assertRegexpMatches(output[-1], pattern)
spinner.stop()
def test_spinner_getters_setters(self):
"""Test spinner getters and setters.
"""
spinner = HaloNotebook()
self.assertEqual(spinner.text, '')
self.assertEqual(spinner.color, 'cyan')
self.assertIsNone(spinner.spinner_id)
spinner.spinner = 'dots12'
spinner.text = 'bar'
spinner.color = 'red'
self.assertEqual(spinner.text, 'bar')
self.assertEqual(spinner.color, 'red')
if is_supported():
self.assertEqual(spinner.spinner, Spinners['dots12'].value)
else:
self.assertEqual(spinner.spinner, default_spinner)
spinner.spinner = 'dots11'
if is_supported():
self.assertEqual(spinner.spinner, Spinners['dots11'].value)
else:
self.assertEqual(spinner.spinner, default_spinner)
spinner.spinner = 'foo_bar'
self.assertEqual(spinner.spinner, default_spinner)
# Color is None
spinner.color = None
spinner.start()
spinner.stop()
self.assertIsNone(spinner.color)
def test_unavailable_spinner_defaults(self):
"""Test unavailable spinner defaults.
"""
spinner = HaloNotebook('dot')
self.assertEqual(spinner.text, 'dot')
self.assertEqual(spinner.spinner, default_spinner)
def test_if_enabled(self):
"""Test if spinner is enabled
"""
spinner = HaloNotebook(text="foo", enabled=False)
spinner.start()
time.sleep(1)
output = self._get_test_output(spinner)['text']
spinner.clear()
spinner.stop()
self.assertEqual(len(output), 0)
self.assertEqual(output, [])
def test_invalid_placement(self):
"""Test invalid placement of spinner.
"""
with self.assertRaises(ValueError):
HaloNotebook(placement='')
HaloNotebook(placement='foo')
HaloNotebook(placement=None)
spinner = HaloNotebook(placement='left')
with self.assertRaises(ValueError):
spinner.placement = ''
spinner.placement = 'foo'
spinner.placement = None
def test_default_placement(self):
"""Test default placement of spinner.
"""
spinner = HaloNotebook()
self.assertEqual(spinner.placement, 'left')
def test_right_placement(self):
"""Test right placement of spinner.
"""
spinner = HaloNotebook(text="foo", placement="right")
spinner.start()
time.sleep(1)
output = self._get_test_output(spinner)['text']
(text, _) = output[-1].split(" ")
self.assertEqual(text, "foo")
spinner.succeed()
output = self._get_test_output(spinner)['text']
(text, symbol) = output[-1].split(" ")
pattern = re.compile(r"(✔|v)", re.UNICODE)
self.assertEqual(text, "foo")
self.assertRegexpMatches(symbol, pattern)
spinner.stop()
def test_spinner_color(self):
"""Test ANSI escape characters are present
"""
for color, color_int in COLORS.items():
spinner = HaloNotebook(color=color)
spinner.start()
output = self._get_test_output(spinner, no_ansi=False)
spinner.stop()
output_merged = [arr for c in output['colors'] for arr in c]
self.assertEquals(str(color_int) in output_merged, True)
def tearDown(self):
"""Clean up things after every test.
"""
pass
if __name__ == '__main__':
SUITE = unittest.TestLoader().loadTestsFromTestCase(TestHaloNotebook)
unittest.TextTestRunner(verbosity=2).run(SUITE)
|
import unittest
import mock
from perfkitbenchmarker import errors
from perfkitbenchmarker.linux_packages import intel_repo
from tests import pkb_common_test_case
class IntelRepoTestCase(pkb_common_test_case.PkbCommonTestCase):
def testAptPrepare(self) -> None:
vm = mock.Mock()
intel_repo.AptPrepare(vm)
vm.PushDataFile.call_args_list[0].assert_called_with(
'intel_repo_key.txt', '/tmp/pkb/intel_repo_key.txt')
vm.PushDataFile.call_args_list[1].assert_called_with(
'intel_repo_list.txt', '/tmp/pkb/intel.list')
vm.InstallPackages.assert_called_with('libgomp1')
def testYumPrepare(self) -> None:
vm = mock.Mock()
vm.RemoteCommandWithReturnCode.return_value = ('', '', 0)
intel_repo.YumPrepare(vm)
vm.PushDataFile.assert_called_with('intel_repo_key.txt',
'/tmp/pkb/intel_repo_key.txt')
vm.InstallPackages.assert_called_with('yum-utils')
vm.RemoteCommandWithReturnCode.assert_called_with(
'diff /tmp/pkb/intel_repo_key.txt /tmp/pkb/mpi.yumkey')
def testYumPrepareBadKey(self) -> None:
vm = mock.Mock()
vm.RemoteCommandWithReturnCode.return_value = ('', '', 1)
with self.assertRaises(errors.Setup.InvalidConfigurationError):
intel_repo.YumPrepare(vm)
if __name__ == '__main__':
unittest.main()
|
import mock
from paasta_tools import deployment_utils
from paasta_tools.utils import DeploymentsJsonV2
@mock.patch("paasta_tools.deployment_utils.load_v2_deployments_json", autospec=True)
def test_get_currently_deployed_sha(mock_load_v2_deployments_json,):
mock_load_v2_deployments_json.return_value = DeploymentsJsonV2(
service="fake-service",
config_dict={
"controls": {},
"deployments": {"everything": {"git_sha": "abc", "docker_image": "foo"}},
},
)
actual = deployment_utils.get_currently_deployed_sha(
service="service", deploy_group="everything"
)
assert actual == "abc"
|
import argparse
import chainer
from chainer import iterators
import chainermn
from chainercv.utils import apply_to_iterator
from chainercv.utils import ProgressHook
from eval_detection import models
from eval_detection import setup
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--dataset', choices=('voc', 'coco'))
parser.add_argument('--model', choices=sorted(models.keys()))
parser.add_argument('--pretrained-model')
parser.add_argument('--batchsize', type=int)
args = parser.parse_args()
comm = chainermn.create_communicator('pure_nccl')
device = comm.intra_rank
dataset, eval_, model, batchsize = setup(
args.dataset, args.model, args.pretrained_model, args.batchsize)
chainer.cuda.get_device_from_id(device).use()
model.to_gpu()
model.use_preset('evaluate')
if not comm.rank == 0:
apply_to_iterator(model.predict, None, comm=comm)
return
iterator = iterators.MultithreadIterator(
dataset, batchsize * comm.size, repeat=False, shuffle=False)
in_values, out_values, rest_values = apply_to_iterator(
model.predict, iterator, hook=ProgressHook(len(dataset)), comm=comm)
# delete unused iterators explicitly
del in_values
eval_(out_values, rest_values)
if __name__ == '__main__':
main()
|
import os.path as op
import numpy as np
from numpy.testing import assert_array_almost_equal
import pytest
from mne.io import read_raw_fif
from mne.event import read_events
from mne.epochs import Epochs
from mne.preprocessing.stim import fix_stim_artifact
data_path = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data')
raw_fname = op.join(data_path, 'test_raw.fif')
event_fname = op.join(data_path, 'test-eve.fif')
def test_fix_stim_artifact():
"""Test fix stim artifact."""
events = read_events(event_fname)
raw = read_raw_fif(raw_fname)
pytest.raises(RuntimeError, fix_stim_artifact, raw)
raw = read_raw_fif(raw_fname, preload=True)
# use window before stimulus in epochs
tmin, tmax, event_id = -0.2, 0.5, 1
picks = ('meg', 'eeg', 'eog')
epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks,
preload=True, reject=None)
e_start = int(np.ceil(epochs.info['sfreq'] * epochs.tmin))
tmin, tmax = -0.045, -0.015
tmin_samp = int(-0.035 * epochs.info['sfreq']) - e_start
tmax_samp = int(-0.015 * epochs.info['sfreq']) - e_start
epochs = fix_stim_artifact(epochs, tmin=tmin, tmax=tmax, mode='linear',
picks=('eeg', 'eog'))
data = epochs.copy().pick(
('eeg', 'eog')).get_data()[:, :, tmin_samp:tmax_samp]
diff_data0 = np.diff(data[0][0])
diff_data0 -= np.mean(diff_data0)
assert_array_almost_equal(diff_data0, np.zeros(len(diff_data0)))
data = epochs.copy().pick(('meg')).get_data()[:, :, tmin_samp:tmax_samp]
diff_data0 = np.diff(data[0][0])
diff_data0 -= np.mean(diff_data0)
assert np.all(diff_data0 != 0)
epochs = fix_stim_artifact(epochs, tmin=tmin, tmax=tmax, mode='window')
data_from_epochs_fix = epochs.get_data()[:, :, tmin_samp:tmax_samp]
# XXX This is a very weird check...
assert np.all(data_from_epochs_fix) == 0.
# use window before stimulus in raw
event_idx = np.where(events[:, 2] == 1)[0][0]
tmin, tmax = -0.045, -0.015
tmin_samp = int(-0.035 * raw.info['sfreq'])
tmax_samp = int(-0.015 * raw.info['sfreq'])
tidx = int(events[event_idx, 0] - raw.first_samp)
pytest.raises(ValueError, fix_stim_artifact, raw, events=np.array([]))
raw = fix_stim_artifact(raw, events=None, event_id=1, tmin=tmin,
tmax=tmax, mode='linear', stim_channel='STI 014')
data, times = raw[:, (tidx + tmin_samp):(tidx + tmax_samp)]
diff_data0 = np.diff(data[0])
diff_data0 -= np.mean(diff_data0)
assert_array_almost_equal(diff_data0, np.zeros(len(diff_data0)))
raw = fix_stim_artifact(raw, events, event_id=1, tmin=tmin,
tmax=tmax, mode='window')
data, times = raw[:, (tidx + tmin_samp):(tidx + tmax_samp)]
assert np.all(data) == 0.
# get epochs from raw with fixed data
tmin, tmax, event_id = -0.2, 0.5, 1
epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks,
preload=True, reject=None, baseline=None)
e_start = int(np.ceil(epochs.info['sfreq'] * epochs.tmin))
tmin_samp = int(-0.035 * epochs.info['sfreq']) - e_start
tmax_samp = int(-0.015 * epochs.info['sfreq']) - e_start
data_from_raw_fix = epochs.get_data()[:, :, tmin_samp:tmax_samp]
assert np.all(data_from_raw_fix) == 0.
# use window after stimulus
evoked = epochs.average()
tmin, tmax = 0.005, 0.045
tmin_samp = int(0.015 * evoked.info['sfreq']) - evoked.first
tmax_samp = int(0.035 * evoked.info['sfreq']) - evoked.first
evoked = fix_stim_artifact(evoked, tmin=tmin, tmax=tmax, mode='linear')
data = evoked.data[:, tmin_samp:tmax_samp]
diff_data0 = np.diff(data[0])
diff_data0 -= np.mean(diff_data0)
assert_array_almost_equal(diff_data0, np.zeros(len(diff_data0)))
evoked = fix_stim_artifact(evoked, tmin=tmin, tmax=tmax, mode='window')
data = evoked.data[:, tmin_samp:tmax_samp]
assert np.all(data) == 0.
|
from django.template.loader import render_to_string
from django.urls import reverse
from django.utils.translation import gettext_lazy as _
from weblate.addons.models import ADDONS, Addon
from weblate.trans.models import Change
from weblate.utils.docs import get_doc_url
GUIDELINES = []
def register(cls):
GUIDELINES.append(cls)
return cls
class Guideline:
description = ""
group = False
url = ""
anchor = ""
def __init__(self, component):
self.component = component
self.passed = self.is_passing()
def is_passing(self):
raise NotImplementedError()
def is_relevant(self):
return True
def get_url(self):
url = reverse(self.url, kwargs=self.component.get_reverse_url_kwargs())
if self.anchor:
url = f"{url}#{self.anchor}"
return url
def get_doc_url(self, user=None):
return ""
class Group(Guideline):
group = True
def is_passing(self):
# Not used
return False
def get_url(self):
# Not used
return ""
@register
class VCSGroup(Group):
description = _("Version control integration")
def get_doc_url(self, user=None):
return get_doc_url("vcs", user=user)
@register
class HookGuideline(Guideline):
description = _(
"Configure repository hooks for automated flow of updates to Weblate."
)
url = "settings"
anchor = "vcs"
def is_passing(self):
return self.component.change_set.filter(action=Change.ACTION_HOOK).exists()
def is_relevant(self):
return not self.component.is_repo_link
def get_url(self):
return self.get_doc_url()
def get_doc_url(self, user=None):
return get_doc_url("admin/continuous", "update-vcs", user=user)
@register
class PushGuideline(Guideline):
description = _(
"Configure push URL for automated flow of translations from Weblate."
)
url = "settings"
anchor = "vcs"
def is_passing(self):
return self.component.can_push()
def get_doc_url(self, user=None):
return get_doc_url("admin/continuous", "push-changes", user=user)
@register
class CommunityGroup(Group):
description = _("Building community")
def get_doc_url(self, user=None):
return get_doc_url("devel/community", user=user)
@register
class InstructionsGuideline(Guideline):
description = _("Define translation instructions to give translators a guideline.")
def is_passing(self):
return bool(self.component.project.instructions)
def get_url(self):
return reverse(
"settings", kwargs=self.component.project.get_reverse_url_kwargs()
)
def get_doc_url(self, user=None):
return get_doc_url("admin/projects", "project", user=user)
@register
class LicenseGuideline(Guideline):
description = _("Make your translations available under a libre license.")
url = "settings"
anchor = "basic"
def is_passing(self):
return self.component.libre_license
def get_doc_url(self, user=None):
return "https://choosealicense.com/"
@register
class AlertGuideline(Guideline):
description = _("Fix this component to clear its alerts.")
url = "component"
anchor = "alerts"
def is_passing(self):
return not self.component.all_alerts
def get_doc_url(self, user=None):
return get_doc_url("devel/alerts", user=user)
@register
class ContextGroup(Group):
description = _("Provide context to the translators")
def get_doc_url(self, user=None):
return get_doc_url("admin/translating", "additional", user=user)
@register
class ScreenshotGuideline(Guideline):
description = _("Add screenshots to show where strings are being used.")
url = "screenshots"
def is_passing(self):
from weblate.screenshots.models import Screenshot
return Screenshot.objects.filter(translation__component=self.component).exists()
def get_doc_url(self, user=None):
return get_doc_url("admin/translating", "screenshots", user=user)
@register
class FlagsGuideline(Guideline):
description = _("Use flags to indicate special strings in your translation.")
url = "settings"
anchor = "translation"
def is_passing(self):
return (
bool(self.component.check_flags)
or self.component.source_translation.unit_set.exclude(
extra_flags=""
).exists()
)
def get_doc_url(self, user=None):
return get_doc_url("admin/checks", "custom-checks", user=user)
@register
class SafeHTMLGuideline(Guideline):
description = _("Add safe-html flag to avoid dangerous HTML from translators.")
url = "settings"
anchor = "translation"
def is_relevant(self):
return self.component.source_translation.unit_set.filter(
source__contains="<a "
).exists()
def is_passing(self):
return (
"safe-html" in self.component.check_flags
or self.component.source_translation.unit_set.filter(
extra_flags__contains="safe-html"
).exists()
)
def get_doc_url(self, user=None):
return get_doc_url("user/checks", "check-safe-html", user=user)
@register
class AddonsGroup(Group):
description = _("Workflow customization")
def get_doc_url(self, user=None):
return get_doc_url("admin/addons", user=user)
class AddonGuideline(Guideline):
addon = ""
url = "addons"
def is_passing(self):
return (
Addon.objects.filter_component(self.component)
.filter(name=self.addon)
.exists()
)
def is_relevant(self):
if self.addon not in ADDONS:
return False
addon = ADDONS[self.addon]
return addon.can_install(self.component, None)
def get_doc_url(self, user=None):
return get_doc_url(
"admin/addons", ADDONS[self.addon].get_doc_anchor(), user=user
)
@property
def description(self):
return render_to_string(
"trans/guide/addon.html",
{
"name": ADDONS[self.addon].verbose,
"description": ADDONS[self.addon].description,
},
)
@register
class LanguageConsistencyGuideline(AddonGuideline):
addon = "weblate.consistency.languages"
def is_relevant(self):
if self.component.project.component_set.count() <= 1:
return False
return super().is_relevant()
@register
class LinguasGuideline(AddonGuideline):
addon = "weblate.gettext.linguas"
@register
class ConfigureGuideline(AddonGuideline):
addon = "weblate.gettext.configure"
|
import io
import os
import pytest
from nikola import __main__
from .helper import cd, patch_config
from .test_demo_build import prepare_demo_site
from .test_empty_build import ( # NOQA
test_archive_exists,
test_avoid_double_slash_in_rss,
test_check_files,
test_check_links,
)
def test_index_in_sitemap(build, output_dir):
"""
Test that the correct path is in sitemap, and not the wrong one.
The correct path ends in /foo/ because this is where we deploy to.
"""
sitemap_path = os.path.join(output_dir, "sitemap.xml")
with io.open(sitemap_path, "r", encoding="utf8") as inf:
sitemap_data = inf.read()
assert "<loc>https://example.com/foo/</loc>" in sitemap_data
@pytest.fixture(scope="module")
def build(target_dir):
"""Fill the site with demo content and build it."""
prepare_demo_site(target_dir)
patch_config(
target_dir,
('SITE_URL = "https://example.com/"', 'SITE_URL = "https://example.com/foo/"'),
("# URL_TYPE = 'rel_path'", "URL_TYPE = 'absolute'"),
)
with cd(target_dir):
__main__.main(["build"])
|
import os
import os.path
import sys
from coverage.misc import CoverageException, isolate_module
from coverage.plugin import CoveragePlugin, FileTracer, FileReporter
os = isolate_module(os)
class Plugins(object):
"""The currently loaded collection of coverage.py plugins."""
def __init__(self):
self.order = []
self.names = {}
self.file_tracers = []
self.configurers = []
self.context_switchers = []
self.current_module = None
self.debug = None
@classmethod
def load_plugins(cls, modules, config, debug=None):
"""Load plugins from `modules`.
Returns a Plugins object with the loaded and configured plugins.
"""
plugins = cls()
plugins.debug = debug
for module in modules:
plugins.current_module = module
__import__(module)
mod = sys.modules[module]
coverage_init = getattr(mod, "coverage_init", None)
if not coverage_init:
raise CoverageException(
"Plugin module %r didn't define a coverage_init function" % module
)
options = config.get_plugin_options(module)
coverage_init(plugins, options)
plugins.current_module = None
return plugins
def add_file_tracer(self, plugin):
"""Add a file tracer plugin.
`plugin` is an instance of a third-party plugin class. It must
implement the :meth:`CoveragePlugin.file_tracer` method.
"""
self._add_plugin(plugin, self.file_tracers)
def add_configurer(self, plugin):
"""Add a configuring plugin.
`plugin` is an instance of a third-party plugin class. It must
implement the :meth:`CoveragePlugin.configure` method.
"""
self._add_plugin(plugin, self.configurers)
def add_dynamic_context(self, plugin):
"""Add a dynamic context plugin.
`plugin` is an instance of a third-party plugin class. It must
implement the :meth:`CoveragePlugin.dynamic_context` method.
"""
self._add_plugin(plugin, self.context_switchers)
def add_noop(self, plugin):
"""Add a plugin that does nothing.
This is only useful for testing the plugin support.
"""
self._add_plugin(plugin, None)
def _add_plugin(self, plugin, specialized):
"""Add a plugin object.
`plugin` is a :class:`CoveragePlugin` instance to add. `specialized`
is a list to append the plugin to.
"""
plugin_name = "%s.%s" % (self.current_module, plugin.__class__.__name__)
if self.debug and self.debug.should('plugin'):
self.debug.write("Loaded plugin %r: %r" % (self.current_module, plugin))
labelled = LabelledDebug("plugin %r" % (self.current_module,), self.debug)
plugin = DebugPluginWrapper(plugin, labelled)
# pylint: disable=attribute-defined-outside-init
plugin._coverage_plugin_name = plugin_name
plugin._coverage_enabled = True
self.order.append(plugin)
self.names[plugin_name] = plugin
if specialized is not None:
specialized.append(plugin)
def __nonzero__(self):
return bool(self.order)
__bool__ = __nonzero__
def __iter__(self):
return iter(self.order)
def get(self, plugin_name):
"""Return a plugin by name."""
return self.names[plugin_name]
class LabelledDebug(object):
"""A Debug writer, but with labels for prepending to the messages."""
def __init__(self, label, debug, prev_labels=()):
self.labels = list(prev_labels) + [label]
self.debug = debug
def add_label(self, label):
"""Add a label to the writer, and return a new `LabelledDebug`."""
return LabelledDebug(label, self.debug, self.labels)
def message_prefix(self):
"""The prefix to use on messages, combining the labels."""
prefixes = self.labels + ['']
return ":\n".join(" "*i+label for i, label in enumerate(prefixes))
def write(self, message):
"""Write `message`, but with the labels prepended."""
self.debug.write("%s%s" % (self.message_prefix(), message))
class DebugPluginWrapper(CoveragePlugin):
"""Wrap a plugin, and use debug to report on what it's doing."""
def __init__(self, plugin, debug):
super(DebugPluginWrapper, self).__init__()
self.plugin = plugin
self.debug = debug
def file_tracer(self, filename):
tracer = self.plugin.file_tracer(filename)
self.debug.write("file_tracer(%r) --> %r" % (filename, tracer))
if tracer:
debug = self.debug.add_label("file %r" % (filename,))
tracer = DebugFileTracerWrapper(tracer, debug)
return tracer
def file_reporter(self, filename):
reporter = self.plugin.file_reporter(filename)
self.debug.write("file_reporter(%r) --> %r" % (filename, reporter))
if reporter:
debug = self.debug.add_label("file %r" % (filename,))
reporter = DebugFileReporterWrapper(filename, reporter, debug)
return reporter
def dynamic_context(self, frame):
context = self.plugin.dynamic_context(frame)
self.debug.write("dynamic_context(%r) --> %r" % (frame, context))
return context
def find_executable_files(self, src_dir):
executable_files = self.plugin.find_executable_files(src_dir)
self.debug.write("find_executable_files(%r) --> %r" % (src_dir, executable_files))
return executable_files
def configure(self, config):
self.debug.write("configure(%r)" % (config,))
self.plugin.configure(config)
def sys_info(self):
return self.plugin.sys_info()
class DebugFileTracerWrapper(FileTracer):
"""A debugging `FileTracer`."""
def __init__(self, tracer, debug):
self.tracer = tracer
self.debug = debug
def _show_frame(self, frame):
"""A short string identifying a frame, for debug messages."""
return "%s@%d" % (
os.path.basename(frame.f_code.co_filename),
frame.f_lineno,
)
def source_filename(self):
sfilename = self.tracer.source_filename()
self.debug.write("source_filename() --> %r" % (sfilename,))
return sfilename
def has_dynamic_source_filename(self):
has = self.tracer.has_dynamic_source_filename()
self.debug.write("has_dynamic_source_filename() --> %r" % (has,))
return has
def dynamic_source_filename(self, filename, frame):
dyn = self.tracer.dynamic_source_filename(filename, frame)
self.debug.write("dynamic_source_filename(%r, %s) --> %r" % (
filename, self._show_frame(frame), dyn,
))
return dyn
def line_number_range(self, frame):
pair = self.tracer.line_number_range(frame)
self.debug.write("line_number_range(%s) --> %r" % (self._show_frame(frame), pair))
return pair
class DebugFileReporterWrapper(FileReporter):
"""A debugging `FileReporter`."""
def __init__(self, filename, reporter, debug):
super(DebugFileReporterWrapper, self).__init__(filename)
self.reporter = reporter
self.debug = debug
def relative_filename(self):
ret = self.reporter.relative_filename()
self.debug.write("relative_filename() --> %r" % (ret,))
return ret
def lines(self):
ret = self.reporter.lines()
self.debug.write("lines() --> %r" % (ret,))
return ret
def excluded_lines(self):
ret = self.reporter.excluded_lines()
self.debug.write("excluded_lines() --> %r" % (ret,))
return ret
def translate_lines(self, lines):
ret = self.reporter.translate_lines(lines)
self.debug.write("translate_lines(%r) --> %r" % (lines, ret))
return ret
def translate_arcs(self, arcs):
ret = self.reporter.translate_arcs(arcs)
self.debug.write("translate_arcs(%r) --> %r" % (arcs, ret))
return ret
def no_branch_lines(self):
ret = self.reporter.no_branch_lines()
self.debug.write("no_branch_lines() --> %r" % (ret,))
return ret
def exit_counts(self):
ret = self.reporter.exit_counts()
self.debug.write("exit_counts() --> %r" % (ret,))
return ret
def arcs(self):
ret = self.reporter.arcs()
self.debug.write("arcs() --> %r" % (ret,))
return ret
def source(self):
ret = self.reporter.source()
self.debug.write("source() --> %d chars" % (len(ret),))
return ret
def source_token_lines(self):
ret = list(self.reporter.source_token_lines())
self.debug.write("source_token_lines() --> %d tokens" % (len(ret),))
return ret
|
import logging
import pysmarthab
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD
# pylint: disable=unused-import
from . import DOMAIN
_LOGGER = logging.getLogger(__name__)
class SmartHabConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""SmartHab config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
def _show_setup_form(self, user_input=None, errors=None):
"""Show the setup form to the user."""
if user_input is None:
user_input = {}
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(
CONF_EMAIL, default=user_input.get(CONF_EMAIL, "")
): str,
vol.Required(CONF_PASSWORD): str,
}
),
errors=errors or {},
)
async def async_step_user(self, user_input=None):
"""Handle a flow initiated by the user."""
errors = {}
if user_input is None:
return self._show_setup_form(user_input, None)
username = user_input[CONF_EMAIL]
password = user_input[CONF_PASSWORD]
# Check if already configured
if self.unique_id is None:
await self.async_set_unique_id(username)
self._abort_if_unique_id_configured()
# Setup connection with SmartHab API
hub = pysmarthab.SmartHab()
try:
await hub.async_login(username, password)
# Verify that passed in configuration works
if hub.is_logged_in():
return self.async_create_entry(
title=username, data={CONF_EMAIL: username, CONF_PASSWORD: password}
)
errors["base"] = "invalid_auth"
except pysmarthab.RequestFailedException:
_LOGGER.exception("Error while trying to reach SmartHab API")
errors["base"] = "service"
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected error during login")
errors["base"] = "unknown"
return self._show_setup_form(user_input, errors)
async def async_step_import(self, import_info):
"""Handle import from legacy config."""
return await self.async_step_user(import_info)
|
from os import mkdir, path
CHUNK_SIZE = 4096 * 10
diff_definition = {
'a': {
'a.txt': lambda: b'',
'c': {
'c.txt': lambda: b''
},
'd': {
'd.txt': lambda: (b'd' * CHUNK_SIZE) + b'd'
},
'e': {
'f': {},
'g': {
'g.txt': lambda: b'g'
},
'h': {
'h.txt': lambda: b'h'
},
'e.txt': lambda: b''
},
'crlf.txt': lambda: b'foo\r\nbar\r\n',
'crlftrailing.txt': lambda: b'foo\r\nbar\r\n\r\n',
},
'b': {
'b.txt': lambda: b'',
'c': {
'c.txt': lambda: b''
},
'd': {
'd.txt': lambda: (b'd' * CHUNK_SIZE) + b'd',
'd.1.txt': lambda: (b'D' * CHUNK_SIZE) + b'D',
'd.2.txt': lambda: (b'd' * CHUNK_SIZE) + b'D'
},
'e': {
'f': {
'f.txt': lambda: b''
},
'g': {
'g.txt': lambda: b''
},
'h': {
'h.txt': lambda: b'h'
},
'e.txt': lambda: b''
},
'lf.txt': lambda: b'foo\nbar\n',
'lftrailing.txt': lambda: b'foo\nbar\n\n',
}
}
CUR_DIR = path.dirname(__file__)
ROOT_DIR = path.join(CUR_DIR, 'diffs')
def make(definition=diff_definition, root_dir=ROOT_DIR):
if not path.exists(root_dir):
mkdir(root_dir, 0o755)
for k, v in definition.items():
file_path = path.join(root_dir, k)
if isinstance(v, dict):
make(v, file_path)
else:
with open(file_path, 'bw') as open_file:
open_file.write(v())
if __name__ == '__main__':
make()
|
from __future__ import print_function
import os
import urwid
from urwid.raw_display import Screen
from zope.interface import Interface, Attribute, implements
from twisted.application.service import Application
from twisted.application.internet import TCPServer
from twisted.cred.portal import Portal
from twisted.conch.interfaces import IConchUser, ISession
from twisted.conch.insults.insults import TerminalProtocol, ServerProtocol
from twisted.conch.manhole_ssh import (ConchFactory, TerminalRealm,
TerminalUser, TerminalSession, TerminalSessionTransport)
from twisted.python.components import Componentized, Adapter
class IUrwidUi(Interface):
"""Toplevel urwid widget
"""
toplevel = Attribute('Urwid Toplevel Widget')
palette = Attribute('Urwid Palette')
screen = Attribute('Urwid Screen')
loop = Attribute('Urwid Main Loop')
def create_urwid_toplevel():
"""Create a toplevel widget.
"""
def create_urwid_mainloop():
"""Create the urwid main loop.
"""
class IUrwidMind(Interface):
ui = Attribute('')
terminalProtocol = Attribute('')
terminal = Attribute('')
checkers = Attribute('')
avatar = Attribute('The avatar')
def push(data):
"""Push data"""
def draw():
"""Refresh the UI"""
class UrwidUi(object):
def __init__(self, urwid_mind):
self.mind = urwid_mind
self.toplevel = self.create_urwid_toplevel()
self.palette = self.create_urwid_palette()
self.screen = TwistedScreen(self.mind.terminalProtocol)
self.loop = self.create_urwid_mainloop()
def create_urwid_toplevel(self):
raise NotImplementedError
def create_urwid_palette(self):
return
def create_urwid_mainloop(self):
evl = urwid.TwistedEventLoop(manage_reactor=False)
loop = urwid.MainLoop(self.toplevel, screen=self.screen,
event_loop=evl,
unhandled_input=self.mind.unhandled_key,
palette=self.palette)
self.screen.loop = loop
loop.run()
return loop
class UnhandledKeyHandler(object):
def __init__(self, mind):
self.mind = mind
def push(self, key):
if isinstance(key, tuple):
pass
else:
f = getattr(self, 'key_%s' % key.replace(' ', '_'), None)
if f is None:
return
else:
return f(key)
def key_ctrl_c(self, key):
self.mind.terminal.loseConnection()
class UrwidMind(Adapter):
implements(IUrwidMind)
cred_checkers = []
ui = None
ui_factory = None
unhandled_key_factory = UnhandledKeyHandler
@property
def avatar(self):
return IConchUser(self.original)
def set_terminalProtocol(self, terminalProtocol):
self.terminalProtocol = terminalProtocol
self.terminal = terminalProtocol.terminal
self.unhandled_key_handler = self.unhandled_key_factory(self)
self.unhandled_key = self.unhandled_key_handler.push
self.ui = self.ui_factory(self)
def push(self, data):
self.ui.screen.push(data)
def draw(self):
self.ui.loop.draw_screen()
class TwistedScreen(Screen):
"""A Urwid screen which knows about the Twisted terminal protocol that is
driving it.
A Urwid screen is responsible for:
1. Input
2. Output
Input is achieved in normal urwid by passing a list of available readable
file descriptors to the event loop for polling/selecting etc. In the
Twisted situation, this is not necessary because Twisted polls the input
descriptors itself. Urwid allows this by being driven using the main loop
instance's `process_input` method which is triggered on Twisted protocol's
standard `dataReceived` method.
"""
def __init__(self, terminalProtocol):
# We will need these later
self.terminalProtocol = terminalProtocol
self.terminal = terminalProtocol.terminal
Screen.__init__(self)
self.colors = 16
self._pal_escape = {}
self.bright_is_bold = True
self.register_palette_entry(None, 'black', 'white')
urwid.signals.connect_signal(self, urwid.UPDATE_PALETTE_ENTRY,
self._on_update_palette_entry)
# Don't need to wait for anything to start
self._started = True
# Urwid Screen API
def get_cols_rows(self):
"""Get the size of the terminal as (cols, rows)
"""
return self.terminalProtocol.width, self.terminalProtocol.height
def draw_screen(self, maxres, r ):
"""Render a canvas to the terminal.
The canvas contains all the information required to render the Urwid
UI. The content method returns a list of rows as (attr, cs, text)
tuples. This very simple implementation iterates each row and simply
writes it out.
"""
(maxcol, maxrow) = maxres
#self.terminal.eraseDisplay()
lasta = None
for i, row in enumerate(r.content()):
self.terminal.cursorPosition(0, i)
for (attr, cs, text) in row:
if attr != lasta:
text = '%s%s' % (self._attr_to_escape(attr), text)
lasta = attr
#if cs or attr:
# print cs, attr
self.write(text)
cursor = r.get_cursor()
if cursor is not None:
self.terminal.cursorPosition(*cursor)
# XXX from base screen
def set_mouse_tracking(self, enable=True):
"""
Enable (or disable) mouse tracking.
After calling this function get_input will include mouse
click events along with keystrokes.
"""
if enable:
self.write(urwid.escape.MOUSE_TRACKING_ON)
else:
self.write(urwid.escape.MOUSE_TRACKING_OFF)
# twisted handles polling, so we don't need the loop to do it, we just
# push what we get to the loop from dataReceived.
def hook_event_loop(self, event_loop, callback):
self._urwid_callback = callback
self._evl = event_loop
def unhook_event_loop(self, event_loop):
pass
# Do nothing here either. Not entirely sure when it gets called.
def get_input(self, raw_keys=False):
return
def get_available_raw_input(self):
data = self._data
self._data = []
return data
# Twisted driven
def push(self, data):
"""Receive data from Twisted and push it into the urwid main loop.
We must here:
1. filter the input data against urwid's input filter.
2. Calculate escapes and other clever things using urwid's
`escape.process_keyqueue`.
3. Pass the calculated keys as a list to the Urwid main loop.
4. Redraw the screen
"""
self._data = list(map(ord, data))
self.parse_input(self._evl, self._urwid_callback)
self.loop.draw_screen()
# Convenience
def write(self, data):
self.terminal.write(data)
# Private
def _on_update_palette_entry(self, name, *attrspecs):
# copy the attribute to a dictionary containing the escape sequences
self._pal_escape[name] = self._attrspec_to_escape(
attrspecs[{16:0,1:1,88:2,256:3}[self.colors]])
def _attr_to_escape(self, a):
if a in self._pal_escape:
return self._pal_escape[a]
elif isinstance(a, urwid.AttrSpec):
return self._attrspec_to_escape(a)
# undefined attributes use default/default
# TODO: track and report these
return self._attrspec_to_escape(
urwid.AttrSpec('default','default'))
def _attrspec_to_escape(self, a):
"""
Convert AttrSpec instance a to an escape sequence for the terminal
>>> s = Screen()
>>> s.set_terminal_properties(colors=256)
>>> a2e = s._attrspec_to_escape
>>> a2e(s.AttrSpec('brown', 'dark green'))
'\\x1b[0;33;42m'
>>> a2e(s.AttrSpec('#fea,underline', '#d0d'))
'\\x1b[0;38;5;229;4;48;5;164m'
"""
if a.foreground_high:
fg = "38;5;%d" % a.foreground_number
elif a.foreground_basic:
if a.foreground_number > 7:
if self.bright_is_bold:
fg = "1;%d" % (a.foreground_number - 8 + 30)
else:
fg = "%d" % (a.foreground_number - 8 + 90)
else:
fg = "%d" % (a.foreground_number + 30)
else:
fg = "39"
st = "1;" * a.bold + "4;" * a.underline + "7;" * a.standout
if a.background_high:
bg = "48;5;%d" % a.background_number
elif a.background_basic:
if a.background_number > 7:
# this doesn't work on most terminals
bg = "%d" % (a.background_number - 8 + 100)
else:
bg = "%d" % (a.background_number + 40)
else:
bg = "49"
return urwid.escape.ESC + "[0;%s;%s%sm" % (fg, st, bg)
class UrwidTerminalProtocol(TerminalProtocol):
"""A terminal protocol that knows to proxy input and receive output from
Urwid.
This integrates with the TwistedScreen in a 1:1.
"""
def __init__(self, urwid_mind):
self.urwid_mind = urwid_mind
self.width = 80
self.height = 24
def connectionMade(self):
self.urwid_mind.set_terminalProtocol(self)
self.terminalSize(self.height, self.width)
def terminalSize(self, height, width):
"""Resize the terminal.
"""
self.width = width
self.height = height
self.urwid_mind.ui.loop.screen_size = None
self.terminal.eraseDisplay()
self.urwid_mind.draw()
def dataReceived(self, data):
"""Received data from the connection.
This overrides the default implementation which parses and passes to
the keyReceived method. We don't do that here, and must not do that so
that Urwid can get the right juice (which includes things like mouse
tracking).
Instead we just pass the data to the screen instance's dataReceived,
which handles the proxying to Urwid.
"""
self.urwid_mind.push(data)
def _unhandled_input(self, input):
# evil
proceed = True
if hasattr(self.urwid_toplevel, 'app'):
proceed = self.urwid_toplevel.app.unhandled_input(self, input)
if not proceed:
return
if input == 'ctrl c':
self.terminal.loseConnection()
class UrwidServerProtocol(ServerProtocol):
def dataReceived(self, data):
self.terminalProtocol.dataReceived(data)
class UrwidUser(TerminalUser):
"""A terminal user that remembers its avatarId
The default implementation doesn't
"""
def __init__(self, original, avatarId):
TerminalUser.__init__(self, original, avatarId)
self.avatarId = avatarId
class UrwidTerminalSession(TerminalSession):
"""A terminal session that remembers the avatar and chained protocol for
later use. And implements a missing method for changed Window size.
Note: This implementation assumes that each SSH connection will only
request a single shell, which is not an entirely safe assumption, but is
by far the most common case.
"""
def openShell(self, proto):
"""Open a shell.
"""
self.chained_protocol = UrwidServerProtocol(
UrwidTerminalProtocol, IUrwidMind(self.original))
TerminalSessionTransport(
proto, self.chained_protocol,
IConchUser(self.original),
self.height, self.width)
def windowChanged(self, dimensions):
"""Called when the window size has changed.
"""
(h, w, x, y) = dimensions
self.chained_protocol.terminalProtocol.terminalSize(h, w)
class UrwidRealm(TerminalRealm):
"""Custom terminal realm class-configured to use our custom Terminal User
Terminal Session.
"""
def __init__(self, mind_factory):
self.mind_factory = mind_factory
def _getAvatar(self, avatarId):
comp = Componentized()
user = UrwidUser(comp, avatarId)
comp.setComponent(IConchUser, user)
sess = UrwidTerminalSession(comp)
comp.setComponent(ISession, sess)
mind = self.mind_factory(comp)
comp.setComponent(IUrwidMind, mind)
return user
def requestAvatar(self, avatarId, mind, *interfaces):
for i in interfaces:
if i is IConchUser:
return (IConchUser,
self._getAvatar(avatarId),
lambda: None)
raise NotImplementedError()
def create_server_factory(urwid_mind_factory):
"""Convenience to create a server factory with a portal that uses a realm
serving a given urwid widget against checkers provided.
"""
rlm = UrwidRealm(urwid_mind_factory)
ptl = Portal(rlm, urwid_mind_factory.cred_checkers)
return ConchFactory(ptl)
def create_service(urwid_mind_factory, port, *args, **kw):
"""Convenience to create a service for use in tac-ish situations.
"""
f = create_server_factory(urwid_mind_factory)
return TCPServer(port, f, *args, **kw)
def create_application(application_name, urwid_mind_factory,
port, *args, **kw):
"""Convenience to create an application suitable for tac file
"""
application = Application(application_name)
svc = create_service(urwid_mind_factory, 6022)
svc.setServiceParent(application)
return application
|
from homeassistant.core import callback
from homeassistant.helpers.event import async_track_time_interval
class AugustSubscriberMixin:
"""Base implementation for a subscriber."""
def __init__(self, hass, update_interval):
"""Initialize an subscriber."""
super().__init__()
self._hass = hass
self._update_interval = update_interval
self._subscriptions = {}
self._unsub_interval = None
@callback
def async_subscribe_device_id(self, device_id, update_callback):
"""Add an callback subscriber.
Returns a callable that can be used to unsubscribe.
"""
if not self._subscriptions:
self._unsub_interval = async_track_time_interval(
self._hass, self._async_refresh, self._update_interval
)
self._subscriptions.setdefault(device_id, []).append(update_callback)
def _unsubscribe():
self.async_unsubscribe_device_id(device_id, update_callback)
return _unsubscribe
@callback
def async_unsubscribe_device_id(self, device_id, update_callback):
"""Remove a callback subscriber."""
self._subscriptions[device_id].remove(update_callback)
if not self._subscriptions[device_id]:
del self._subscriptions[device_id]
if not self._subscriptions:
self._unsub_interval()
self._unsub_interval = None
@callback
def async_signal_device_id_update(self, device_id):
"""Call the callbacks for a device_id."""
if not self._subscriptions.get(device_id):
return
for update_callback in self._subscriptions[device_id]:
update_callback()
|
import os
import datetime
import pytest
from cryptography import x509
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes
from flask import current_app
from flask_principal import identity_changed, Identity
from sqlalchemy.sql import text
from lemur import create_app
from lemur.common.utils import parse_private_key
from lemur.database import db as _db
from lemur.auth.service import create_token
from lemur.tests.vectors import (
SAN_CERT_KEY,
INTERMEDIATE_KEY,
ROOTCA_CERT_STR,
ROOTCA_KEY,
)
from .factories import (
ApiKeyFactory,
AuthorityFactory,
NotificationFactory,
DestinationFactory,
CertificateFactory,
UserFactory,
RoleFactory,
SourceFactory,
EndpointFactory,
RotationPolicyFactory,
PendingCertificateFactory,
AsyncAuthorityFactory,
InvalidCertificateFactory,
CryptoAuthorityFactory,
CACertificateFactory,
)
def pytest_runtest_setup(item):
if "slow" in item.keywords and not item.config.getoption("--runslow"):
pytest.skip("need --runslow option to run")
if "incremental" in item.keywords:
previousfailed = getattr(item.parent, "_previousfailed", None)
if previousfailed is not None:
pytest.xfail("previous test failed ({0})".format(previousfailed.name))
def pytest_runtest_makereport(item, call):
if "incremental" in item.keywords:
if call.excinfo is not None:
parent = item.parent
parent._previousfailed = item
@pytest.yield_fixture(scope="session")
def app(request):
"""
Creates a new Flask application for a test duration.
Uses application factory `create_app`.
"""
_app = create_app(
config_path=os.path.dirname(os.path.realpath(__file__)) + "/conf.py"
)
ctx = _app.app_context()
ctx.push()
yield _app
ctx.pop()
@pytest.yield_fixture(scope="session")
def db(app, request):
_db.drop_all()
_db.engine.execute(text("CREATE EXTENSION IF NOT EXISTS pg_trgm"))
_db.create_all()
_db.app = app
UserFactory()
r = RoleFactory(name="admin")
u = UserFactory(roles=[r])
rp = RotationPolicyFactory(name="default")
ApiKeyFactory(user=u)
_db.session.commit()
yield _db
_db.drop_all()
@pytest.yield_fixture(scope="function")
def session(db, request):
"""
Creates a new database session with (with working transaction)
for test duration.
"""
db.session.begin_nested()
yield db.session
db.session.rollback()
@pytest.yield_fixture(scope="function")
def client(app, session, client):
yield client
@pytest.fixture
def authority(session):
a = AuthorityFactory()
session.commit()
return a
@pytest.fixture
def crypto_authority(session):
a = CryptoAuthorityFactory()
session.commit()
return a
@pytest.fixture
def async_authority(session):
a = AsyncAuthorityFactory()
session.commit()
return a
@pytest.fixture
def destination(session):
d = DestinationFactory()
session.commit()
return d
@pytest.fixture
def source(session):
s = SourceFactory()
session.commit()
return s
@pytest.fixture
def notification(session):
n = NotificationFactory()
session.commit()
return n
@pytest.fixture
def certificate(session):
u = UserFactory()
a = AuthorityFactory()
c = CertificateFactory(user=u, authority=a)
session.commit()
return c
@pytest.fixture
def endpoint(session):
s = SourceFactory()
e = EndpointFactory(source=s)
session.commit()
return e
@pytest.fixture
def role(session):
r = RoleFactory()
session.commit()
return r
@pytest.fixture
def user(session):
u = UserFactory()
session.commit()
user_token = create_token(u)
token = {"Authorization": "Basic " + user_token}
return {"user": u, "token": token}
@pytest.fixture
def pending_certificate(session):
u = UserFactory()
a = AsyncAuthorityFactory()
p = PendingCertificateFactory(user=u, authority=a)
session.commit()
return p
@pytest.fixture
def pending_certificate_from_full_chain_ca(session):
u = UserFactory()
a = AuthorityFactory()
p = PendingCertificateFactory(user=u, authority=a)
session.commit()
return p
@pytest.fixture
def pending_certificate_from_partial_chain_ca(session):
u = UserFactory()
c = CACertificateFactory(body=ROOTCA_CERT_STR, private_key=ROOTCA_KEY, chain=None)
a = AuthorityFactory(authority_certificate=c)
p = PendingCertificateFactory(user=u, authority=a)
session.commit()
return p
@pytest.fixture
def invalid_certificate(session):
u = UserFactory()
a = AsyncAuthorityFactory()
i = InvalidCertificateFactory(user=u, authority=a)
session.commit()
return i
@pytest.fixture
def admin_user(session):
u = UserFactory()
admin_role = RoleFactory(name="admin")
u.roles.append(admin_role)
session.commit()
user_token = create_token(u)
token = {"Authorization": "Basic " + user_token}
return {"user": u, "token": token}
@pytest.fixture
def async_issuer_plugin():
from lemur.plugins.base import register
from .plugins.issuer_plugin import TestAsyncIssuerPlugin
register(TestAsyncIssuerPlugin)
return TestAsyncIssuerPlugin
@pytest.fixture
def issuer_plugin():
from lemur.plugins.base import register
from .plugins.issuer_plugin import TestIssuerPlugin
register(TestIssuerPlugin)
return TestIssuerPlugin
@pytest.fixture
def notification_plugin():
from lemur.plugins.base import register
from .plugins.notification_plugin import TestNotificationPlugin
register(TestNotificationPlugin)
return TestNotificationPlugin
@pytest.fixture
def destination_plugin():
from lemur.plugins.base import register
from .plugins.destination_plugin import TestDestinationPlugin
register(TestDestinationPlugin)
return TestDestinationPlugin
@pytest.fixture
def source_plugin():
from lemur.plugins.base import register
from .plugins.source_plugin import TestSourcePlugin
register(TestSourcePlugin)
return TestSourcePlugin
@pytest.yield_fixture(scope="function")
def logged_in_user(session, app):
with app.test_request_context():
identity_changed.send(current_app._get_current_object(), identity=Identity(1))
yield
@pytest.yield_fixture(scope="function")
def logged_in_admin(session, app):
with app.test_request_context():
identity_changed.send(current_app._get_current_object(), identity=Identity(2))
yield
@pytest.fixture
def private_key():
return parse_private_key(SAN_CERT_KEY)
@pytest.fixture
def issuer_private_key():
return parse_private_key(INTERMEDIATE_KEY)
@pytest.fixture
def cert_builder(private_key):
return (
x509.CertificateBuilder()
.subject_name(
x509.Name([x509.NameAttribute(x509.NameOID.COMMON_NAME, "foo.com")])
)
.issuer_name(
x509.Name([x509.NameAttribute(x509.NameOID.COMMON_NAME, "foo.com")])
)
.serial_number(1)
.public_key(private_key.public_key())
.not_valid_before(datetime.datetime(2017, 12, 22))
.not_valid_after(datetime.datetime(2040, 1, 1))
)
@pytest.fixture
def selfsigned_cert(cert_builder, private_key):
# cert_builder uses the same cert public key as 'private_key'
return cert_builder.sign(private_key, hashes.SHA256(), default_backend())
@pytest.fixture(scope="function")
def aws_credentials():
os.environ["AWS_ACCESS_KEY_ID"] = "testing"
os.environ["AWS_SECRET_ACCESS_KEY"] = "testing"
os.environ["AWS_SECURITY_TOKEN"] = "testing"
os.environ["AWS_SESSION_TOKEN"] = "testing"
|
import jsonschema
from jsonschema import ValidationError
from werkzeug.exceptions import abort
from flask import Flask
try:
from http import HTTPStatus
except ImportError:
import httplib as HTTPStatus
try:
import simplejson as json
except ImportError:
import json
from flask import jsonify
from flask import Response
from flask import request
from flasgger import Swagger
def validation_error_inform_error(err, data, schema):
"""
Custom validation error handler which produces 404 Bad Request
response in case validation fails and returns the error
"""
abort(Response(
json.dumps({'error': str(err), 'data': data, 'schema': schema}),
status=HTTPStatus.BAD_REQUEST))
def validation_error_404(err, data, schema):
"""
Custom validation error handler which produces 404 Not Found
response in case validation fails instead of 400 Bad Request
"""
abort(Response(status=HTTPStatus.NOT_FOUND))
def validation_error_try_to_accept(err, data, schema):
"""
Custom validation error handler which attempts alternative
validation
"""
if not isinstance(err, ValidationError):
abort(Response(err, status=HTTPStatus.BAD_REQUEST))
alernative_schema = dict(schema)
alernative_schema['properties']['running_time'].update({
'description': "Films's running time",
'type': 'integer',
'example': 169
})
try:
jsonschema.validate(data, alernative_schema)
except ValidationError as err:
abort(Response(str(err), status=400))
app = Flask(__name__)
swag = Swagger(app, validation_error_handler=validation_error_inform_error)
@app.route('/film', methods=['POST'])
@swag.validate('Film')
def create_film():
"""
Film creation endpoint
---
tags:
- film
summary: Creates a new Film
consumes:
- application/json
produces:
- application/json
parameters:
- in: body
name: body
description:
Film object that needs to be persisted to the database
required: true
schema:
id: Film
required:
- title
- director
- distributor
- release_date
- running_time
properties:
title:
description: Film's title
type: string
example: Interstellar
director:
description: Films's director
type: string
example: Christopher Nolan
distributor:
description: Films's distributor
type: string
example: Warner Bros. Pictures
release_date:
description: Films's release date
type: string
example: October 26, 2014
running_time:
description: Films's running time
type: string
example: 169 minutes
responses:
200:
description: Successful operation
400:
description: Invalid input
"""
return jsonify(request.json), HTTPStatus.OK
@app.route('/not_found/film', methods=['POST'])
@swag.validate('Film', validation_error_handler=validation_error_404)
def create_film_2():
"""
Film creation endpoint
---
tags:
- film
summary: Creates a new Film
consumes:
- application/json
produces:
- application/json
parameters:
- in: body
name: body
description:
Film object that needs to be persisted to the database
required: true
schema:
$ref: '#/definitions/Film'
responses:
200:
description: Successful operation
400:
description: Invalid input
"""
return jsonify(request.json), HTTPStatus.OK
@app.route('/retry/film', methods=['POST'])
@swag.validate('Film', validation_error_handler=validation_error_try_to_accept)
def create_film_3():
"""
Film creation endpoint
---
tags:
- film
summary: Creates a new Film
consumes:
- application/json
produces:
- application/json
parameters:
- in: body
name: body
description:
Film object that needs to be persisted to the database
required: true
schema:
$ref: '#/definitions/Film'
responses:
200:
description: Successful operation
400:
description: Invalid input
"""
return jsonify(request.json), HTTPStatus.OK
def test_swag(client, specs_data):
"""
This test is runs automatically in Travis CI
:param client: Flask app test client
:param specs_data: {'url': {swag_specs}} for every spec in app
"""
invalid_film = {
'_id': "594dba7b2879334e411f3dcc",
'title': "The Last Airbender",
'director': "M. Night Shyamalan",
'distributor': "Paramount Pictures",
'running_time': 103,
'release_date': "June 30, 2010"
}
super_invalid_film = {
'title': "The Last Airbender",
'release_date': 2010
}
with client.post(
'/film', data=json.dumps(invalid_film),
content_type='application/json') as response:
assert response.status_code == HTTPStatus.BAD_REQUEST
received = json.loads(response.data.decode('utf-8'))
assert received.get('error') is not None
assert received.get('schema') is not None
assert received.get('data') == invalid_film
with client.post(
'/not_found/film', data=json.dumps(invalid_film),
content_type='application/json') as response:
assert response.status_code == HTTPStatus.NOT_FOUND
with client.post(
'/retry/film', data=json.dumps(invalid_film),
content_type='application/json') as response:
assert response.status_code == HTTPStatus.OK
with client.post(
'/retry/film', data=json.dumps(super_invalid_film),
content_type='application/json') as response:
assert response.status_code == HTTPStatus.BAD_REQUEST
if __name__ == "__main__":
app.run(debug=True)
|
from datetime import timedelta
from homeassistant.const import POWER_WATT, TIME_HOURS, TIME_MINUTES, TIME_SECONDS
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from tests.async_mock import patch
async def test_state(hass):
"""Test derivative sensor state."""
config = {
"sensor": {
"platform": "derivative",
"name": "derivative",
"source": "sensor.energy",
"unit": "kW",
"round": 2,
}
}
assert await async_setup_component(hass, "sensor", config)
entity_id = config["sensor"]["source"]
hass.states.async_set(entity_id, 1, {})
await hass.async_block_till_done()
now = dt_util.utcnow() + timedelta(seconds=3600)
with patch("homeassistant.util.dt.utcnow", return_value=now):
hass.states.async_set(entity_id, 1, {}, force_update=True)
await hass.async_block_till_done()
state = hass.states.get("sensor.derivative")
assert state is not None
# Testing a energy sensor at 1 kWh for 1hour = 0kW
assert round(float(state.state), config["sensor"]["round"]) == 0.0
assert state.attributes.get("unit_of_measurement") == "kW"
async def _setup_sensor(hass, config):
default_config = {
"platform": "derivative",
"name": "power",
"source": "sensor.energy",
"round": 2,
}
config = {"sensor": dict(default_config, **config)}
assert await async_setup_component(hass, "sensor", config)
entity_id = config["sensor"]["source"]
hass.states.async_set(entity_id, 0, {})
await hass.async_block_till_done()
return config, entity_id
async def setup_tests(hass, config, times, values, expected_state):
"""Test derivative sensor state."""
config, entity_id = await _setup_sensor(hass, config)
# Testing a energy sensor with non-monotonic intervals and values
for time, value in zip(times, values):
now = dt_util.utcnow() + timedelta(seconds=time)
with patch("homeassistant.util.dt.utcnow", return_value=now):
hass.states.async_set(entity_id, value, {}, force_update=True)
await hass.async_block_till_done()
state = hass.states.get("sensor.power")
assert state is not None
assert round(float(state.state), config["sensor"]["round"]) == expected_state
return state
async def test_dataSet1(hass):
"""Test derivative sensor state."""
await setup_tests(
hass,
{"unit_time": TIME_SECONDS},
times=[20, 30, 40, 50],
values=[10, 30, 5, 0],
expected_state=-0.5,
)
async def test_dataSet2(hass):
"""Test derivative sensor state."""
await setup_tests(
hass,
{"unit_time": TIME_SECONDS},
times=[20, 30],
values=[5, 0],
expected_state=-0.5,
)
async def test_dataSet3(hass):
"""Test derivative sensor state."""
state = await setup_tests(
hass,
{"unit_time": TIME_SECONDS},
times=[20, 30],
values=[5, 10],
expected_state=0.5,
)
assert state.attributes.get("unit_of_measurement") == f"/{TIME_SECONDS}"
async def test_dataSet4(hass):
"""Test derivative sensor state."""
await setup_tests(
hass,
{"unit_time": TIME_SECONDS},
times=[20, 30],
values=[5, 5],
expected_state=0,
)
async def test_dataSet5(hass):
"""Test derivative sensor state."""
await setup_tests(
hass,
{"unit_time": TIME_SECONDS},
times=[20, 30],
values=[10, -10],
expected_state=-2,
)
async def test_dataSet6(hass):
"""Test derivative sensor state."""
await setup_tests(hass, {}, times=[0, 60], values=[0, 1 / 60], expected_state=1)
async def test_data_moving_average_for_discrete_sensor(hass):
"""Test derivative sensor state."""
# We simulate the following situation:
# The temperature rises 1 °C per minute for 30 minutes long.
# There is a data point every 30 seconds, however, the sensor returns
# the temperature rounded down to an integer value.
# We use a time window of 10 minutes and therefore we can expect
# (because the true derivative is 1 °C/min) an error of less than 10%.
temperature_values = []
for temperature in range(30):
temperature_values += [temperature] * 2 # two values per minute
time_window = 600
times = list(range(0, 1800 + 30, 30))
config, entity_id = await _setup_sensor(
hass,
{
"time_window": {"seconds": time_window},
"unit_time": TIME_MINUTES,
"round": 1,
},
) # two minute window
for time, value in zip(times, temperature_values):
now = dt_util.utcnow() + timedelta(seconds=time)
with patch("homeassistant.util.dt.utcnow", return_value=now):
hass.states.async_set(entity_id, value, {}, force_update=True)
await hass.async_block_till_done()
if time_window < time < times[-1] - time_window:
state = hass.states.get("sensor.power")
derivative = round(float(state.state), config["sensor"]["round"])
# Test that the error is never more than
# (time_window_in_minutes / true_derivative * 100) = 10% + ε
assert abs(1 - derivative) <= 0.1 + 1e-6
async def test_prefix(hass):
"""Test derivative sensor state using a power source."""
config = {
"sensor": {
"platform": "derivative",
"name": "derivative",
"source": "sensor.power",
"round": 2,
"unit_prefix": "k",
}
}
assert await async_setup_component(hass, "sensor", config)
entity_id = config["sensor"]["source"]
hass.states.async_set(
entity_id, 1000, {"unit_of_measurement": POWER_WATT}, force_update=True
)
await hass.async_block_till_done()
now = dt_util.utcnow() + timedelta(seconds=3600)
with patch("homeassistant.util.dt.utcnow", return_value=now):
hass.states.async_set(
entity_id, 1000, {"unit_of_measurement": POWER_WATT}, force_update=True
)
await hass.async_block_till_done()
state = hass.states.get("sensor.derivative")
assert state is not None
# Testing a power sensor at 1000 Watts for 1hour = 0kW/h
assert round(float(state.state), config["sensor"]["round"]) == 0.0
assert state.attributes.get("unit_of_measurement") == f"kW/{TIME_HOURS}"
async def test_suffix(hass):
"""Test derivative sensor state using a network counter source."""
config = {
"sensor": {
"platform": "derivative",
"name": "derivative",
"source": "sensor.bytes_per_second",
"round": 2,
"unit_prefix": "k",
"unit_time": TIME_SECONDS,
}
}
assert await async_setup_component(hass, "sensor", config)
entity_id = config["sensor"]["source"]
hass.states.async_set(entity_id, 1000, {})
await hass.async_block_till_done()
now = dt_util.utcnow() + timedelta(seconds=10)
with patch("homeassistant.util.dt.utcnow", return_value=now):
hass.states.async_set(entity_id, 1000, {}, force_update=True)
await hass.async_block_till_done()
state = hass.states.get("sensor.derivative")
assert state is not None
# Testing a network speed sensor at 1000 bytes/s over 10s = 10kbytes/s2
assert round(float(state.state), config["sensor"]["round"]) == 0.0
|
import logging
from homeassistant.components.switch import DOMAIN, SwitchEntity
from . import DOMAIN as CASETA_DOMAIN, LutronCasetaDevice
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Lutron Caseta switch platform.
Adds switches from the Caseta bridge associated with the config_entry as
switch entities.
"""
entities = []
bridge = hass.data[CASETA_DOMAIN][config_entry.entry_id]
switch_devices = bridge.get_devices_by_domain(DOMAIN)
for switch_device in switch_devices:
entity = LutronCasetaLight(switch_device, bridge)
entities.append(entity)
async_add_entities(entities, True)
return True
class LutronCasetaLight(LutronCasetaDevice, SwitchEntity):
"""Representation of a Lutron Caseta switch."""
async def async_turn_on(self, **kwargs):
"""Turn the switch on."""
await self._smartbridge.turn_on(self.device_id)
async def async_turn_off(self, **kwargs):
"""Turn the switch off."""
await self._smartbridge.turn_off(self.device_id)
@property
def is_on(self):
"""Return true if device is on."""
return self._device["current_state"] > 0
async def async_update(self):
"""Update when forcing a refresh of the device."""
self._device = self._smartbridge.get_device_by_id(self.device_id)
_LOGGER.debug(self._device)
|
import voluptuous as vol
from homeassistant.components.switch import (
ENTITY_ID_FORMAT,
PLATFORM_SCHEMA,
SwitchEntity,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_FRIENDLY_NAME,
CONF_ENTITY_PICTURE_TEMPLATE,
CONF_ICON_TEMPLATE,
CONF_SWITCHES,
CONF_UNIQUE_ID,
CONF_VALUE_TEMPLATE,
STATE_OFF,
STATE_ON,
)
from homeassistant.core import callback
from homeassistant.exceptions import TemplateError
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import async_generate_entity_id
from homeassistant.helpers.reload import async_setup_reload_service
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.helpers.script import Script
from .const import CONF_AVAILABILITY_TEMPLATE, DOMAIN, PLATFORMS
from .template_entity import TemplateEntity
_VALID_STATES = [STATE_ON, STATE_OFF, "true", "false"]
ON_ACTION = "turn_on"
OFF_ACTION = "turn_off"
SWITCH_SCHEMA = vol.All(
cv.deprecated(ATTR_ENTITY_ID),
vol.Schema(
{
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_ICON_TEMPLATE): cv.template,
vol.Optional(CONF_ENTITY_PICTURE_TEMPLATE): cv.template,
vol.Optional(CONF_AVAILABILITY_TEMPLATE): cv.template,
vol.Required(ON_ACTION): cv.SCRIPT_SCHEMA,
vol.Required(OFF_ACTION): cv.SCRIPT_SCHEMA,
vol.Optional(ATTR_FRIENDLY_NAME): cv.string,
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
vol.Optional(CONF_UNIQUE_ID): cv.string,
}
),
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_SWITCHES): cv.schema_with_slug_keys(SWITCH_SCHEMA)}
)
async def _async_create_entities(hass, config):
"""Create the Template switches."""
switches = []
for device, device_config in config[CONF_SWITCHES].items():
friendly_name = device_config.get(ATTR_FRIENDLY_NAME, device)
state_template = device_config.get(CONF_VALUE_TEMPLATE)
icon_template = device_config.get(CONF_ICON_TEMPLATE)
entity_picture_template = device_config.get(CONF_ENTITY_PICTURE_TEMPLATE)
availability_template = device_config.get(CONF_AVAILABILITY_TEMPLATE)
on_action = device_config[ON_ACTION]
off_action = device_config[OFF_ACTION]
unique_id = device_config.get(CONF_UNIQUE_ID)
switches.append(
SwitchTemplate(
hass,
device,
friendly_name,
state_template,
icon_template,
entity_picture_template,
availability_template,
on_action,
off_action,
unique_id,
)
)
return switches
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the template switches."""
await async_setup_reload_service(hass, DOMAIN, PLATFORMS)
async_add_entities(await _async_create_entities(hass, config))
class SwitchTemplate(TemplateEntity, SwitchEntity, RestoreEntity):
"""Representation of a Template switch."""
def __init__(
self,
hass,
device_id,
friendly_name,
state_template,
icon_template,
entity_picture_template,
availability_template,
on_action,
off_action,
unique_id,
):
"""Initialize the Template switch."""
super().__init__(
availability_template=availability_template,
icon_template=icon_template,
entity_picture_template=entity_picture_template,
)
self.entity_id = async_generate_entity_id(
ENTITY_ID_FORMAT, device_id, hass=hass
)
self._name = friendly_name
self._template = state_template
domain = __name__.split(".")[-2]
self._on_script = Script(hass, on_action, friendly_name, domain)
self._off_script = Script(hass, off_action, friendly_name, domain)
self._state = False
self._unique_id = unique_id
@callback
def _update_state(self, result):
super()._update_state(result)
if isinstance(result, TemplateError):
self._state = None
return
if isinstance(result, bool):
self._state = result
return
if isinstance(result, str):
self._state = result.lower() in ("true", STATE_ON)
return
self._state = False
async def async_added_to_hass(self):
"""Register callbacks."""
if self._template is None:
# restore state after startup
await super().async_added_to_hass()
state = await self.async_get_last_state()
if state:
self._state = state.state == STATE_ON
# no need to listen for events
else:
self.add_template_attribute(
"_state", self._template, None, self._update_state
)
await super().async_added_to_hass()
@property
def name(self):
"""Return the name of the switch."""
return self._name
@property
def unique_id(self):
"""Return the unique id of this switch."""
return self._unique_id
@property
def is_on(self):
"""Return true if device is on."""
return self._state
@property
def should_poll(self):
"""Return the polling state."""
return False
async def async_turn_on(self, **kwargs):
"""Fire the on action."""
await self._on_script.async_run(context=self._context)
if self._template is None:
self._state = True
self.async_write_ha_state()
async def async_turn_off(self, **kwargs):
"""Fire the off action."""
await self._off_script.async_run(context=self._context)
if self._template is None:
self._state = False
self.async_write_ha_state()
@property
def assumed_state(self):
"""State is assumed, if no template given."""
return self._template is None
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import subprocess
from absl import logging
from absl.testing import _bazelize_command
from absl.testing import absltest
class TestFailFastTest(absltest.TestCase):
"""Integration tests: Runs a test binary with fail fast.
This is done by setting the fail fast environment variable
"""
def setUp(self):
self._test_name = 'absl/testing/tests/absltest_fail_fast_test_helper'
def _run_fail_fast(self, fail_fast):
"""Runs the py_test binary in a subprocess.
Args:
fail_fast: string, the fail fast value.
Returns:
(stdout, exit_code) tuple of (string, int).
"""
env = {}
if 'SYSTEMROOT' in os.environ:
# This is used by the random module on Windows to locate crypto
# libraries.
env['SYSTEMROOT'] = os.environ['SYSTEMROOT']
additional_args = []
if fail_fast is not None:
env['TESTBRIDGE_TEST_RUNNER_FAIL_FAST'] = fail_fast
proc = subprocess.Popen(
args=([_bazelize_command.get_executable_path(self._test_name)]
+ additional_args),
env=env,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
universal_newlines=True)
stdout = proc.communicate()[0]
logging.info('output: %s', stdout)
return stdout, proc.wait()
def test_no_fail_fast(self):
out, exit_code = self._run_fail_fast(None)
self.assertEqual(1, exit_code)
self.assertIn('class A test A', out)
self.assertIn('class A test B', out)
self.assertIn('class A test C', out)
self.assertIn('class A test D', out)
self.assertIn('class A test E', out)
def test_empty_fail_fast(self):
out, exit_code = self._run_fail_fast('')
self.assertEqual(1, exit_code)
self.assertIn('class A test A', out)
self.assertIn('class A test B', out)
self.assertIn('class A test C', out)
self.assertIn('class A test D', out)
self.assertIn('class A test E', out)
def test_fail_fast_1(self):
out, exit_code = self._run_fail_fast('1')
self.assertEqual(1, exit_code)
self.assertIn('class A test A', out)
self.assertIn('class A test B', out)
self.assertIn('class A test C', out)
self.assertNotIn('class A test D', out)
self.assertNotIn('class A test E', out)
def test_fail_fast_0(self):
out, exit_code = self._run_fail_fast('0')
self.assertEqual(1, exit_code)
self.assertIn('class A test A', out)
self.assertIn('class A test B', out)
self.assertIn('class A test C', out)
self.assertIn('class A test D', out)
self.assertIn('class A test E', out)
if __name__ == '__main__':
absltest.main()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import itertools
import re
import types
import unittest
from absl._collections_abc import abc
from absl.testing import absltest
import six
try:
from absl.testing import _parameterized_async
except (ImportError, SyntaxError):
_parameterized_async = None
_ADDR_RE = re.compile(r'\<([a-zA-Z0-9_\-\.]+) object at 0x[a-fA-F0-9]+\>')
_NAMED = object()
_ARGUMENT_REPR = object()
_NAMED_DICT_KEY = 'testcase_name'
class NoTestsError(Exception):
"""Raised when parameterized decorators do not generate any tests."""
class DuplicateTestNameError(Exception):
"""Raised when a parameterized test has the same test name multiple times."""
def __init__(self, test_class_name, new_test_name, original_test_name):
super(DuplicateTestNameError, self).__init__(
'Duplicate parameterized test name in {}: generated test name {!r} '
'(generated from {!r}) already exists. Consider using '
'named_parameters() to give your tests unique names and/or renaming '
'the conflicting test method.'.format(
test_class_name, new_test_name, original_test_name))
def _clean_repr(obj):
return _ADDR_RE.sub(r'<\1>', repr(obj))
def _non_string_or_bytes_iterable(obj):
return (isinstance(obj, abc.Iterable) and
not isinstance(obj, six.text_type) and
not isinstance(obj, six.binary_type))
def _format_parameter_list(testcase_params):
if isinstance(testcase_params, abc.Mapping):
return ', '.join('%s=%s' % (argname, _clean_repr(value))
for argname, value in six.iteritems(testcase_params))
elif _non_string_or_bytes_iterable(testcase_params):
return ', '.join(map(_clean_repr, testcase_params))
else:
return _format_parameter_list((testcase_params,))
class _ParameterizedTestIter(object):
"""Callable and iterable class for producing new test cases."""
def __init__(self, test_method, testcases, naming_type, original_name=None):
"""Returns concrete test functions for a test and a list of parameters.
The naming_type is used to determine the name of the concrete
functions as reported by the unittest framework. If naming_type is
_FIRST_ARG, the testcases must be tuples, and the first element must
have a string representation that is a valid Python identifier.
Args:
test_method: The decorated test method.
testcases: (list of tuple/dict) A list of parameter tuples/dicts for
individual test invocations.
naming_type: The test naming type, either _NAMED or _ARGUMENT_REPR.
original_name: The original test method name. When decorated on a test
method, None is passed to __init__ and test_method.__name__ is used.
Note test_method.__name__ might be different than the original defined
test method because of the use of other decorators. A more accurate
value is set by TestGeneratorMetaclass.__new__ later.
"""
self._test_method = test_method
self.testcases = testcases
self._naming_type = naming_type
if original_name is None:
original_name = test_method.__name__
self._original_name = original_name
self.__name__ = _ParameterizedTestIter.__name__
def __call__(self, *args, **kwargs):
raise RuntimeError('You appear to be running a parameterized test case '
'without having inherited from parameterized.'
'TestCase. This is bad because none of '
'your test cases are actually being run. You may also '
'be using another decorator before the parameterized '
'one, in which case you should reverse the order.')
def __iter__(self):
test_method = self._test_method
naming_type = self._naming_type
def make_bound_param_test(testcase_params):
@functools.wraps(test_method)
def bound_param_test(self):
if isinstance(testcase_params, abc.Mapping):
return test_method(self, **testcase_params)
elif _non_string_or_bytes_iterable(testcase_params):
return test_method(self, *testcase_params)
else:
return test_method(self, testcase_params)
if naming_type is _NAMED:
# Signal the metaclass that the name of the test function is unique
# and descriptive.
bound_param_test.__x_use_name__ = True
testcase_name = None
if isinstance(testcase_params, abc.Mapping):
if _NAMED_DICT_KEY not in testcase_params:
raise RuntimeError(
'Dict for named tests must contain key "%s"' % _NAMED_DICT_KEY)
# Create a new dict to avoid modifying the supplied testcase_params.
testcase_name = testcase_params[_NAMED_DICT_KEY]
testcase_params = {k: v for k, v in six.iteritems(testcase_params)
if k != _NAMED_DICT_KEY}
elif _non_string_or_bytes_iterable(testcase_params):
if not isinstance(testcase_params[0], six.string_types):
raise RuntimeError(
'The first element of named test parameters is the test name '
'suffix and must be a string')
testcase_name = testcase_params[0]
testcase_params = testcase_params[1:]
else:
raise RuntimeError(
'Named tests must be passed a dict or non-string iterable.')
test_method_name = self._original_name
# Support PEP-8 underscore style for test naming if used.
if (test_method_name.startswith('test_')
and testcase_name
and not testcase_name.startswith('_')):
test_method_name += '_'
bound_param_test.__name__ = test_method_name + str(testcase_name)
elif naming_type is _ARGUMENT_REPR:
# If it's a generator, convert it to a tuple and treat them as
# parameters.
if isinstance(testcase_params, types.GeneratorType):
testcase_params = tuple(testcase_params)
# The metaclass creates a unique, but non-descriptive method name for
# _ARGUMENT_REPR tests using an indexed suffix.
# To keep test names descriptive, only the original method name is used.
# To make sure test names are unique, we add a unique descriptive suffix
# __x_params_repr__ for every test.
params_repr = '(%s)' % (_format_parameter_list(testcase_params),)
bound_param_test.__x_params_repr__ = params_repr
else:
raise RuntimeError('%s is not a valid naming type.' % (naming_type,))
bound_param_test.__doc__ = '%s(%s)' % (
bound_param_test.__name__, _format_parameter_list(testcase_params))
if test_method.__doc__:
bound_param_test.__doc__ += '\n%s' % (test_method.__doc__,)
if (_parameterized_async and
_parameterized_async.iscoroutinefunction(test_method)):
return _parameterized_async.async_wrapped(bound_param_test)
return bound_param_test
return (make_bound_param_test(c) for c in self.testcases)
def _modify_class(class_object, testcases, naming_type):
assert not getattr(class_object, '_test_params_reprs', None), (
'Cannot add parameters to %s. Either it already has parameterized '
'methods, or its super class is also a parameterized class.' % (
class_object,))
# NOTE: _test_params_repr is private to parameterized.TestCase and it's
# metaclass; do not use it outside of those classes.
class_object._test_params_reprs = test_params_reprs = {}
for name, obj in six.iteritems(class_object.__dict__.copy()):
if (name.startswith(unittest.TestLoader.testMethodPrefix)
and isinstance(obj, types.FunctionType)):
delattr(class_object, name)
methods = {}
_update_class_dict_for_param_test_case(
class_object.__name__, methods, test_params_reprs, name,
_ParameterizedTestIter(obj, testcases, naming_type, name))
for meth_name, meth in six.iteritems(methods):
setattr(class_object, meth_name, meth)
def _parameter_decorator(naming_type, testcases):
"""Implementation of the parameterization decorators.
Args:
naming_type: The naming type.
testcases: Testcase parameters.
Raises:
NoTestsError: Raised when the decorator generates no tests.
Returns:
A function for modifying the decorated object.
"""
def _apply(obj):
if isinstance(obj, type):
_modify_class(obj, testcases, naming_type)
return obj
else:
return _ParameterizedTestIter(obj, testcases, naming_type)
if (len(testcases) == 1 and
not isinstance(testcases[0], tuple) and
not isinstance(testcases[0], abc.Mapping)):
# Support using a single non-tuple parameter as a list of test cases.
# Note that the single non-tuple parameter can't be Mapping either, which
# means a single dict parameter case.
assert _non_string_or_bytes_iterable(testcases[0]), (
'Single parameter argument must be a non-string non-Mapping iterable')
testcases = testcases[0]
if not isinstance(testcases, abc.Sequence):
testcases = list(testcases)
if not testcases:
raise NoTestsError(
'parameterized test decorators did not generate any tests. '
'Make sure you specify non-empty parameters, '
'and do not reuse generators more than once.')
return _apply
def parameters(*testcases):
"""A decorator for creating parameterized tests.
See the module docstring for a usage example.
Args:
*testcases: Parameters for the decorated method, either a single
iterable, or a list of tuples/dicts/objects (for tests with only one
argument).
Raises:
NoTestsError: Raised when the decorator generates no tests.
Returns:
A test generator to be handled by TestGeneratorMetaclass.
"""
return _parameter_decorator(_ARGUMENT_REPR, testcases)
def named_parameters(*testcases):
"""A decorator for creating parameterized tests.
See the module docstring for a usage example. For every parameter tuple
passed, the first element of the tuple should be a string and will be appended
to the name of the test method. Each parameter dict passed must have a value
for the key "testcase_name", the string representation of that value will be
appended to the name of the test method.
Args:
*testcases: Parameters for the decorated method, either a single iterable,
or a list of tuples or dicts.
Raises:
NoTestsError: Raised when the decorator generates no tests.
Returns:
A test generator to be handled by TestGeneratorMetaclass.
"""
return _parameter_decorator(_NAMED, testcases)
def product(**testgrid):
"""A decorator for running tests over cartesian product of parameters values.
See the module docstring for a usage example. The test will be run for every
possible combination of the parameters.
Args:
**testgrid: A mapping of parameter names and their possible values.
Possible values should given as either a list or a tuple.
Raises:
NoTestsError: Raised when the decorator generates no tests.
Returns:
A test generator to be handled by TestGeneratorMetaclass.
"""
for name, values in testgrid.items():
assert isinstance(values, list) or isinstance(values, tuple), (
'Values of {} must be given as list or tuple, found {}'.format(
name, type(values)))
# Create all possible combinations of parameters as a cartesian product
# of parameter values.
testcases = [
dict(zip(testgrid.keys(), product))
for product in itertools.product(*testgrid.values())
]
return _parameter_decorator(_ARGUMENT_REPR, testcases)
class TestGeneratorMetaclass(type):
"""Metaclass for adding tests generated by parameterized decorators."""
def __new__(mcs, class_name, bases, dct):
# NOTE: _test_params_repr is private to parameterized.TestCase and it's
# metaclass; do not use it outside of those classes.
test_params_reprs = dct.setdefault('_test_params_reprs', {})
for name, obj in six.iteritems(dct.copy()):
if (name.startswith(unittest.TestLoader.testMethodPrefix) and
_non_string_or_bytes_iterable(obj)):
# NOTE: `obj` might not be a _ParameterizedTestIter in two cases:
# 1. a class-level iterable named test* that isn't a test, such as
# a list of something. Such attributes get deleted from the class.
#
# 2. If a decorator is applied to the parameterized test, e.g.
# @morestuff
# @parameterized.parameters(...)
# def test_foo(...): ...
#
# This is OK so long as the underlying parameterized function state
# is forwarded (e.g. using functool.wraps() and **without**
# accessing explicitly accessing the internal attributes.
if isinstance(obj, _ParameterizedTestIter):
# Update the original test method name so it's more accurate.
# The mismatch might happen when another decorator is used inside
# the parameterized decrators, and the inner decorator doesn't
# preserve its __name__.
obj._original_name = name
iterator = iter(obj)
dct.pop(name)
_update_class_dict_for_param_test_case(
class_name, dct, test_params_reprs, name, iterator)
# If the base class is a subclass of parameterized.TestCase, inherit its
# _test_params_reprs too.
for base in bases:
# Check if the base has _test_params_reprs first, then check if it's a
# subclass of parameterized.TestCase. Otherwise when this is called for
# the parameterized.TestCase definition itself, this raises because
# itself is not defined yet. This works as long as absltest.TestCase does
# not define _test_params_reprs.
base_test_params_reprs = getattr(base, '_test_params_reprs', None)
if base_test_params_reprs and issubclass(base, TestCase):
for test_method, test_method_id in base_test_params_reprs.items():
# test_method may both exists in base and this class.
# This class's method overrides base class's.
# That's why it should only inherit it if it does not exist.
test_params_reprs.setdefault(test_method, test_method_id)
return type.__new__(mcs, class_name, bases, dct)
def _update_class_dict_for_param_test_case(
test_class_name, dct, test_params_reprs, name, iterator):
"""Adds individual test cases to a dictionary.
Args:
test_class_name: The name of the class tests are added to.
dct: The target dictionary.
test_params_reprs: The dictionary for mapping names to test IDs.
name: The original name of the test case.
iterator: The iterator generating the individual test cases.
Raises:
DuplicateTestNameError: Raised when a test name occurs multiple times.
RuntimeError: If non-parameterized functions are generated.
"""
for idx, func in enumerate(iterator):
assert callable(func), 'Test generators must yield callables, got %r' % (
func,)
if not (getattr(func, '__x_use_name__', None) or
getattr(func, '__x_params_repr__', None)):
raise RuntimeError(
'{}.{} generated a test function without using the parameterized '
'decorators. Only tests generated using the decorators are '
'supported.'.format(test_class_name, name))
if getattr(func, '__x_use_name__', False):
original_name = func.__name__
new_name = original_name
else:
original_name = name
new_name = '%s%d' % (original_name, idx)
if new_name in dct:
raise DuplicateTestNameError(test_class_name, new_name, original_name)
dct[new_name] = func
test_params_reprs[new_name] = getattr(func, '__x_params_repr__', '')
@six.add_metaclass(TestGeneratorMetaclass)
class TestCase(absltest.TestCase):
"""Base class for test cases using the parameters decorator."""
# visibility: private; do not call outside this class.
def _get_params_repr(self):
return self._test_params_reprs.get(self._testMethodName, '')
def __str__(self):
params_repr = self._get_params_repr()
if params_repr:
params_repr = ' ' + params_repr
return '{}{} ({})'.format(
self._testMethodName, params_repr,
unittest.util.strclass(self.__class__))
def id(self):
"""Returns the descriptive ID of the test.
This is used internally by the unittesting framework to get a name
for the test to be used in reports.
Returns:
The test id.
"""
base = super(TestCase, self).id()
params_repr = self._get_params_repr()
if params_repr:
# We include the params in the id so that, when reported in the
# test.xml file, the value is more informative than just "test_foo0".
# Use a space to separate them so that it's copy/paste friendly and
# easy to identify the actual test id.
return '{} {}'.format(base, params_repr)
else:
return base
# This function is kept CamelCase because it's used as a class's base class.
def CoopTestCase(other_base_class): # pylint: disable=invalid-name
"""Returns a new base class with a cooperative metaclass base.
This enables the TestCase to be used in combination
with other base classes that have custom metaclasses, such as
mox.MoxTestBase.
Only works with metaclasses that do not override type.__new__.
Example:
from absl.testing import parameterized
class ExampleTest(parameterized.CoopTestCase(OtherTestCase)):
...
Args:
other_base_class: (class) A test case base class.
Returns:
A new class object.
"""
metaclass = type(
'CoopMetaclass',
(other_base_class.__metaclass__,
TestGeneratorMetaclass), {})
return metaclass(
'CoopTestCase',
(other_base_class, TestCase), {})
|
from homeassistant.components.switch import SwitchEntity
from .const import ATTR_DISCOVER_DEVICES
from .entity import HMDevice
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the HomeMatic switch platform."""
if discovery_info is None:
return
devices = []
for conf in discovery_info[ATTR_DISCOVER_DEVICES]:
new_device = HMSwitch(conf)
devices.append(new_device)
add_entities(devices, True)
class HMSwitch(HMDevice, SwitchEntity):
"""Representation of a HomeMatic switch."""
@property
def is_on(self):
"""Return True if switch is on."""
try:
return self._hm_get_state() > 0
except TypeError:
return False
@property
def today_energy_kwh(self):
"""Return the current power usage in kWh."""
if "ENERGY_COUNTER" in self._data:
try:
return self._data["ENERGY_COUNTER"] / 1000
except ZeroDivisionError:
return 0
return None
def turn_on(self, **kwargs):
"""Turn the switch on."""
self._hmdevice.on(self._channel)
def turn_off(self, **kwargs):
"""Turn the switch off."""
self._hmdevice.off(self._channel)
def _init_data_struct(self):
"""Generate the data dictionary (self._data) from metadata."""
self._state = "STATE"
self._data.update({self._state: None})
# Need sensor values for SwitchPowermeter
for node in self._hmdevice.SENSORNODE:
self._data.update({node: None})
|
from datetime import timedelta
import logging
from typing import Tuple
import pyeverlights
import voluptuous as vol
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_EFFECT,
ATTR_HS_COLOR,
PLATFORM_SCHEMA,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
SUPPORT_EFFECT,
LightEntity,
)
from homeassistant.const import CONF_HOSTS
from homeassistant.exceptions import PlatformNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
import homeassistant.util.color as color_util
_LOGGER = logging.getLogger(__name__)
SUPPORT_EVERLIGHTS = SUPPORT_EFFECT | SUPPORT_BRIGHTNESS | SUPPORT_COLOR
SCAN_INTERVAL = timedelta(minutes=1)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_HOSTS): vol.All(cv.ensure_list, [cv.string])}
)
def color_rgb_to_int(red: int, green: int, blue: int) -> int:
"""Return a RGB color as an integer."""
return red * 256 * 256 + green * 256 + blue
def color_int_to_rgb(value: int) -> Tuple[int, int, int]:
"""Return an RGB tuple from an integer."""
return (value >> 16, (value >> 8) & 0xFF, value & 0xFF)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the EverLights lights from configuration.yaml."""
lights = []
for ipaddr in config[CONF_HOSTS]:
api = pyeverlights.EverLights(ipaddr, async_get_clientsession(hass))
try:
status = await api.get_status()
effects = await api.get_all_patterns()
except pyeverlights.ConnectionError as err:
raise PlatformNotReady from err
else:
lights.append(EverLightsLight(api, pyeverlights.ZONE_1, status, effects))
lights.append(EverLightsLight(api, pyeverlights.ZONE_2, status, effects))
async_add_entities(lights)
class EverLightsLight(LightEntity):
"""Representation of a Flux light."""
def __init__(self, api, channel, status, effects):
"""Initialize the light."""
self._api = api
self._channel = channel
self._status = status
self._effects = effects
self._mac = status["mac"]
self._error_reported = False
self._hs_color = [255, 255]
self._brightness = 255
self._effect = None
self._available = True
@property
def unique_id(self) -> str:
"""Return a unique ID."""
return f"{self._mac}-{self._channel}"
@property
def available(self) -> bool:
"""Return True if entity is available."""
return self._available
@property
def name(self):
"""Return the name of the device."""
return f"EverLights {self._mac} Zone {self._channel}"
@property
def is_on(self):
"""Return true if device is on."""
return self._status[f"ch{self._channel}Active"] == 1
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
return self._brightness
@property
def hs_color(self):
"""Return the color property."""
return self._hs_color
@property
def effect(self):
"""Return the effect property."""
return self._effect
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_EVERLIGHTS
@property
def effect_list(self):
"""Return the list of supported effects."""
return self._effects
async def async_turn_on(self, **kwargs):
"""Turn the light on."""
hs_color = kwargs.get(ATTR_HS_COLOR, self._hs_color)
brightness = kwargs.get(ATTR_BRIGHTNESS, self._brightness)
effect = kwargs.get(ATTR_EFFECT)
if effect is not None:
colors = await self._api.set_pattern_by_id(self._channel, effect)
rgb = color_int_to_rgb(colors[0])
hsv = color_util.color_RGB_to_hsv(*rgb)
hs_color = hsv[:2]
brightness = hsv[2] / 100 * 255
else:
rgb = color_util.color_hsv_to_RGB(*hs_color, brightness / 255 * 100)
colors = [color_rgb_to_int(*rgb)]
await self._api.set_pattern(self._channel, colors)
self._hs_color = hs_color
self._brightness = brightness
self._effect = effect
async def async_turn_off(self, **kwargs):
"""Turn the light off."""
await self._api.clear_pattern(self._channel)
async def async_update(self):
"""Synchronize state with control box."""
try:
self._status = await self._api.get_status()
except pyeverlights.ConnectionError:
if self._available:
_LOGGER.warning("EverLights control box connection lost")
self._available = False
else:
if not self._available:
_LOGGER.warning("EverLights control box connection restored")
self._available = True
|
import pytest
import numpy as np
import tensorflow as tf
import jax
import torch
from tensornetwork.backends import backend_factory
#pylint: disable=line-too-long
from tensornetwork.matrixproductstates.mpo import FiniteMPO, BaseMPO, InfiniteMPO
@pytest.fixture(
name="backend_dtype_values",
params=[('numpy', np.float64), ('numpy', np.complex128),
('tensorflow', tf.float64), ('tensorflow', tf.complex128),
('pytorch', torch.float64), ('jax', np.float64),
('jax', np.complex128)])
def backend_dtype(request):
return request.param
def test_base_mpo_init(backend_dtype_values):
backend = backend_factory.get_backend(backend_dtype_values[0])
dtype = backend_dtype_values[1]
tensors = [
backend.randn((1, 5, 2, 2), dtype=dtype),
backend.randn((5, 5, 2, 2), dtype=dtype),
backend.randn((5, 1, 2, 2), dtype=dtype)
]
mpo = BaseMPO(tensors=tensors, backend=backend, name='test')
assert mpo.backend is backend
assert mpo.dtype == dtype
np.testing.assert_allclose(mpo.bond_dimensions, [1, 5, 5, 1])
def test_base_mpo_raises():
backend = backend_factory.get_backend('numpy')
tensors = [
backend.randn((1, 5, 2, 2), dtype=np.float64),
backend.randn((5, 5, 2, 2), dtype=np.float64),
backend.randn((5, 1, 2, 2), dtype=np.float32)
]
with pytest.raises(TypeError):
BaseMPO(tensors=tensors, backend=backend)
mpo = BaseMPO(tensors=[], backend=backend)
mpo.tensors = tensors
with pytest.raises(TypeError):
mpo.dtype
def test_finite_mpo_raises(backend):
tensors = [np.random.rand(2, 5, 2, 2), np.random.rand(5, 1, 2, 2)]
with pytest.raises(ValueError):
FiniteMPO(tensors=tensors, backend=backend)
tensors = [np.random.rand(1, 5, 2, 2), np.random.rand(5, 2, 2, 2)]
with pytest.raises(ValueError):
FiniteMPO(tensors=tensors, backend=backend)
def test_infinite_mpo_raises(backend):
tensors = [np.random.rand(2, 5, 2, 2), np.random.rand(5, 3, 2, 2)]
with pytest.raises(ValueError):
InfiniteMPO(tensors=tensors, backend=backend)
def test_infinite_mpo_roll(backend):
tensors = [np.random.rand(5, 5, 2, 2), np.random.rand(5, 5, 2, 2)]
mpo = InfiniteMPO(tensors=tensors, backend=backend)
mpo.roll(1)
np.testing.assert_allclose(mpo.tensors[0], tensors[1])
np.testing.assert_allclose(mpo.tensors[1], tensors[0])
mpo.roll(1)
np.testing.assert_allclose(mpo.tensors[0], tensors[0])
np.testing.assert_allclose(mpo.tensors[1], tensors[1])
def test_len(backend):
tensors = [
np.random.rand(1, 5, 2, 2),
np.random.rand(5, 5, 2, 2),
np.random.rand(5, 1, 2, 2)
]
mpo = BaseMPO(tensors=tensors, backend=backend)
assert len(mpo) == 3
|
import pytest
from homeassistant import auth, data_entry_flow
from homeassistant.auth import auth_store
from homeassistant.auth.providers import legacy_api_password
@pytest.fixture
def store(hass):
"""Mock store."""
return auth_store.AuthStore(hass)
@pytest.fixture
def provider(hass, store):
"""Mock provider."""
return legacy_api_password.LegacyApiPasswordAuthProvider(
hass, store, {"type": "legacy_api_password", "api_password": "test-password"}
)
@pytest.fixture
def manager(hass, store, provider):
"""Mock manager."""
return auth.AuthManager(hass, store, {(provider.type, provider.id): provider}, {})
async def test_create_new_credential(manager, provider):
"""Test that we create a new credential."""
credentials = await provider.async_get_or_create_credentials({})
assert credentials.is_new is True
user = await manager.async_get_or_create_user(credentials)
assert user.name == legacy_api_password.LEGACY_USER_NAME
assert user.is_active
async def test_only_one_credentials(manager, provider):
"""Call create twice will return same credential."""
credentials = await provider.async_get_or_create_credentials({})
await manager.async_get_or_create_user(credentials)
credentials2 = await provider.async_get_or_create_credentials({})
assert credentials2.id == credentials.id
assert credentials2.is_new is False
async def test_verify_login(hass, provider):
"""Test login using legacy api password auth provider."""
provider.async_validate_login("test-password")
with pytest.raises(legacy_api_password.InvalidAuthError):
provider.async_validate_login("invalid-password")
async def test_login_flow_works(hass, manager):
"""Test wrong config."""
result = await manager.login_flow.async_init(handler=("legacy_api_password", None))
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
result = await manager.login_flow.async_configure(
flow_id=result["flow_id"], user_input={"password": "not-hello"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"]["base"] == "invalid_auth"
result = await manager.login_flow.async_configure(
flow_id=result["flow_id"], user_input={"password": "test-password"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
|
import time
import arctic.serialization.numpy_records as anr
from tests.unit.serialization.serialization_test_data import _mixed_test_data as input_test_data
df_serializer = anr.DataFrameSerializer()
def _bench(rounds, input_df, fast):
fast = bool(fast)
anr.set_fast_check_df_serializable(fast)
start = time.time()
for i in range(rounds):
df_serializer.can_convert_to_records_without_objects(input_df, 'symA')
print("Time per iteration (fast={}): {}".format(fast, (time.time() - start)/rounds))
# Results suggest significant speed improvements for
# (1) large df with objects
# Time per iteration (fast=False): 0.0281402397156
# Time per iteration (fast=True): 0.00866063833237
# (2) large multi-column df
# Time per iteration (fast=False): 0.00556221961975
# Time per iteration (fast=True): 0.00276621818542
# (3) large multi-index df
# Time per iteration (fast=False): 0.00640722036362
# Time per iteration (fast=True): 0.00154552936554
def assess_speed(df_kind):
rounds = 100
input_df = input_test_data()[df_kind][0]
orig_config = anr.FAST_CHECK_DF_SERIALIZABLE
try:
_bench(rounds, input_df, fast=False)
_bench(rounds, input_df, fast=True)
finally:
anr.FAST_CHECK_DF_SERIALIZABLE = orig_config
def main():
for df_kind in ('large_with_some_objects', 'large_multi_index', 'large_multi_column'):
assess_speed(df_kind)
if __name__ == '__main__':
main()
|
import os
import pytest
import requests
from vcr import VCR
from subliminal.video import Episode, Movie
from subliminal.refiners.omdb import OMDBClient, refine
APIKEY = '00000000'
vcr = VCR(path_transformer=lambda path: path + '.yaml',
record_mode=os.environ.get('VCR_RECORD_MODE', 'once'),
cassette_library_dir=os.path.realpath(os.path.join('tests', 'cassettes', 'omdb')))
@pytest.fixture()
def client():
return OMDBClient()
def test_session():
session = requests.Session()
client = OMDBClient(session=session)
assert client.session is session
def test_headers():
client = OMDBClient(headers={'X-Test': 'Value'})
assert 'X-Test' in client.session.headers
assert client.session.headers['X-Test'] == 'Value'
@pytest.mark.integration
@vcr.use_cassette
def test_get_id(client):
data = client.get('tt0770828')
assert data['Title'] == 'Man of Steel'
@pytest.mark.integration
@vcr.use_cassette
def test_get_wrong_id(client):
data = client.get('tt9999999')
assert data is None
@pytest.mark.integration
@vcr.use_cassette
def test_get_title(client):
data = client.get(title='Man of Steel')
assert data['imdbID'] == 'tt0770828'
@pytest.mark.integration
@vcr.use_cassette
def test_get_wrong_title(client):
data = client.get(title='Meen of Stal')
assert data is None
@pytest.mark.integration
@vcr.use_cassette
def test_search(client):
data = client.search('Man of Steel')
assert data['totalResults'] == '23'
assert len(data['Search']) == 10
assert data['Search'][0]['imdbID'] == 'tt0770828'
assert data['Search'][0]['Year'] == '2013'
@pytest.mark.integration
@vcr.use_cassette
def test_search_wrong_title(client):
data = client.search('Meen of Stal')
assert data is None
@pytest.mark.integration
@vcr.use_cassette
def test_search_type(client):
data = client.search('Man of Steel', type='movie')
assert data['totalResults'] == '21'
@pytest.mark.integration
@vcr.use_cassette
def test_search_year(client):
data = client.search('Man of Steel', year=2013)
assert data['totalResults'] == '13'
@pytest.mark.integration
@vcr.use_cassette
def test_search_page(client):
data = client.search('Man of Steel', page=3)
assert data['totalResults'] == '23'
assert len(data['Search']) == 3
assert data['Search'][0]['imdbID'] == 'tt5369598'
assert data['Search'][0]['Title'] == 'BigHead Man of Steel'
@pytest.mark.integration
@vcr.use_cassette
def test_refine_episode(episodes):
episode = Episode(episodes['bbt_s07e05'].name, episodes['bbt_s07e05'].series.lower(), episodes['bbt_s07e05'].season,
episodes['bbt_s07e05'].episode)
refine(episode, apikey=APIKEY)
assert episode.series == episodes['bbt_s07e05'].series
assert episode.year == episodes['bbt_s07e05'].year
assert episode.series_imdb_id == episodes['bbt_s07e05'].series_imdb_id
@pytest.mark.integration
@vcr.use_cassette
def test_refine_episode_original_series(episodes):
episode = Episode(episodes['dallas_s01e03'].name, episodes['dallas_s01e03'].series.lower(),
episodes['dallas_s01e03'].season, episodes['dallas_s01e03'].episode)
refine(episode, apikey=APIKEY)
assert episode.series == episodes['dallas_s01e03'].series
assert episode.year == 1978
assert episode.series_imdb_id == 'tt0077000'
@pytest.mark.integration
@vcr.use_cassette
def test_refine_episode_year(episodes):
episode = Episode(episodes['dallas_2012_s01e03'].name, episodes['dallas_2012_s01e03'].series.lower(),
episodes['dallas_2012_s01e03'].season, episodes['dallas_2012_s01e03'].episode,
year=episodes['dallas_2012_s01e03'].year, original_series=False)
refine(episode, apikey=APIKEY)
assert episode.series == episodes['dallas_2012_s01e03'].series
assert episode.year == episodes['dallas_2012_s01e03'].year
assert episode.series_imdb_id == 'tt1723760'
@pytest.mark.integration
@vcr.use_cassette
def test_refine_movie(movies):
movie = Movie(movies['man_of_steel'].name, movies['man_of_steel'].title.lower())
refine(movie, apikey=APIKEY)
assert movie.title == movies['man_of_steel'].title
assert movie.year == movies['man_of_steel'].year
assert movie.imdb_id == movies['man_of_steel'].imdb_id
@pytest.mark.integration
@vcr.use_cassette
def test_refine_movie_guess_alternative_title(movies):
movie = Movie.fromname(movies['jack_reacher_never_go_back'].name)
refine(movie, apikey=APIKEY)
assert movie.title == movies['jack_reacher_never_go_back'].title
assert movie.year == movies['jack_reacher_never_go_back'].year
assert movie.imdb_id == movies['jack_reacher_never_go_back'].imdb_id
@pytest.mark.integration
@vcr.use_cassette
def test_refine_episode_with_country(episodes):
episode = Episode.fromname(episodes['shameless_us_s08e01'].name)
video_series = episode.series
refine(episode, apikey=APIKEY)
# omdb has no country info. No match
assert episode.series == video_series
assert episode.series_imdb_id is None
@pytest.mark.integration
@vcr.use_cassette
def test_refine_episode_with_country_hoc_us(episodes):
episode = Episode.fromname(episodes['house_of_cards_us_s06e01'].name)
video_series = episode.series
refine(episode, apikey=APIKEY)
# omdb has no country info. No match
assert episode.series == video_series
assert episode.series_imdb_id is None
|
import re
from absl import flags
from perfkitbenchmarker import data
from perfkitbenchmarker import linux_packages
from perfkitbenchmarker import regex_util
flags.DEFINE_integer(
'netperf_histogram_buckets', 100,
'The number of buckets per bucket array in a netperf histogram. Netperf '
'keeps one array for latencies in the single usec range, one for the '
'10-usec range, one for the 100-usec range, and so on until the 10-sec '
'range. The default value that netperf uses is 100. Using more will '
'increase the precision of the histogram samples that the netperf '
'benchmark produces.')
FLAGS = flags.FLAGS
NETPERF_TAR = 'netperf-2.7.0.tar.gz'
NETPERF_URL = 'https://github.com/HewlettPackard/netperf/archive/%s' % (
NETPERF_TAR)
NETPERF_DIR = '%s/netperf-netperf-2.7.0' % linux_packages.INSTALL_DIR
NETPERF_SRC_DIR = NETPERF_DIR + '/src'
NETSERVER_PATH = NETPERF_SRC_DIR + '/netserver'
NETPERF_PATH = NETPERF_SRC_DIR + '/netperf'
NETLIB_PATCH = NETPERF_DIR + '/netperf.patch'
NETPERF_EXAMPLE_DIR = NETPERF_DIR + '/doc/examples/'
def _Install(vm):
"""Installs the netperf package on the VM."""
vm.InstallPackages('python3-pip')
vm.RemoteCommand('sudo pip3 install absl-py')
vm.Install('build_tools')
_CopyTar(vm)
vm.RemoteCommand('cd %s && tar xvzf %s' %
(linux_packages.INSTALL_DIR, NETPERF_TAR))
# Modify netperf to print out all buckets in its histogram rather than
# aggregating, edit runemomniaggdemo script, and apply fix to
# allow it to compile with --enable-demo flag correctly
vm.PushDataFile('netperf.patch', NETLIB_PATCH)
vm.RemoteCommand('cd %s && patch -l -p1 < netperf.patch' %
NETPERF_DIR)
vm.RemoteCommand('cd %s && CFLAGS=-DHIST_NUM_OF_BUCKET=%s '
'./configure --enable-burst '
'--enable-demo --enable-histogram '
'&& make && sudo make install' %
(NETPERF_DIR, FLAGS.netperf_histogram_buckets))
vm.RemoteCommand('cd %s && chmod +x runemomniaggdemo.sh'
'&& chmod +x find_max_burst.sh'
% (NETPERF_EXAMPLE_DIR))
# Set keepalive to a low value to ensure that the control connection
# is not closed by the cloud networking infrastructure.
# This causes keepalive packets to be sent every minute on all ipv4
# tcp connections.
#
# TODO(user): Keepalive is not enabled on the netperf control socket.
# While (for unknown reasons) this hack fixes the issue with the socket
# being closed anyway, a more correct approach would be to patch netperf
# and enable keepalive on the control socket in addition to changing the
# system defaults below.
#
if vm.IS_REBOOTABLE:
vm.ApplySysctlPersistent({
'net.ipv4.tcp_keepalive_time': 60,
'net.ipv4.tcp_keepalive_intvl': 60,
})
def _CopyTar(vm):
"""Copy the tar file for installation.
Tries local data directory first, then NET_PERF_URL
"""
try:
vm.PushDataFile(NETPERF_TAR, remote_path=(linux_packages.INSTALL_DIR + '/'))
except data.ResourceNotFound:
vm.Install('curl')
vm.RemoteCommand('curl %s -L -o %s/%s' %
(NETPERF_URL, linux_packages.INSTALL_DIR, NETPERF_TAR))
def YumInstall(vm):
"""Installs the netperf package on the VM."""
_Install(vm)
def AptInstall(vm):
"""Installs the netperf package on the VM."""
_Install(vm)
def ParseHistogram(netperf_stdout):
"""Parses the histogram output from netperf.
Args:
netperf_output: string. The stdout from netperf containing a histogram.
Returns:
A dict mapping latency to sample count or None if the output did not
contain a histogram.
"""
# Here is an example of a netperf histogram:
#
# Histogram of request/response times
# UNIT_USEC : 0: 0: 0: 0: 0: 0: 0: 0: 0: 0
# TEN_USEC : 0: 0: 0: 0: 0: 0: 0: 0: 0: 0
# HUNDRED_USEC : 0: 433684: 9696: 872: 140: 56: 27: 28: 17: 10
# UNIT_MSEC : 0: 24: 57: 40: 5: 2: 0: 0: 0: 0
# TEN_MSEC : 0: 0: 0: 0: 0: 0: 0: 0: 0: 0
# HUNDRED_MSEC : 0: 0: 0: 0: 0: 0: 0: 0: 0: 0
# UNIT_SEC : 0: 0: 0: 0: 0: 0: 0: 0: 0: 0
# TEN_SEC : 0: 0: 0: 0: 0: 0: 0: 0: 0: 0
# >100_SECS: 0
# HIST_TOTAL: 444658
histogram_text = regex_util.ExtractGroup(
'(UNIT_USEC.*?)>100_SECS', netperf_stdout, flags=re.S)
# The total number of usecs that this row of the histogram represents.
row_size = 10.0
hist = {}
for l in histogram_text.splitlines():
buckets = [int(b) for b in l.split(':')[1:]]
bucket_size = row_size / len(buckets)
hist.update({(i * bucket_size): count
for i, count in enumerate(buckets) if count})
# Each row is 10x larger than the previous row.
row_size *= 10
return hist
|
import logging
from aiohttp import ClientResponseError
from pysmartthings import APIResponseError, AppOAuth, SmartThings
from pysmartthings.installedapp import format_install_url
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import (
CONF_ACCESS_TOKEN,
CONF_CLIENT_ID,
CONF_CLIENT_SECRET,
HTTP_FORBIDDEN,
HTTP_UNAUTHORIZED,
)
from homeassistant.helpers.aiohttp_client import async_get_clientsession
# pylint: disable=unused-import
from .const import (
APP_OAUTH_CLIENT_NAME,
APP_OAUTH_SCOPES,
CONF_APP_ID,
CONF_INSTALLED_APP_ID,
CONF_LOCATION_ID,
CONF_REFRESH_TOKEN,
DOMAIN,
VAL_UID_MATCHER,
)
from .smartapp import (
create_app,
find_app,
format_unique_id,
get_webhook_url,
setup_smartapp,
setup_smartapp_endpoint,
update_app,
validate_webhook_requirements,
)
_LOGGER = logging.getLogger(__name__)
class SmartThingsFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle configuration of SmartThings integrations."""
VERSION = 2
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_PUSH
def __init__(self):
"""Create a new instance of the flow handler."""
self.access_token = None
self.app_id = None
self.api = None
self.oauth_client_secret = None
self.oauth_client_id = None
self.installed_app_id = None
self.refresh_token = None
self.location_id = None
async def async_step_import(self, user_input=None):
"""Occurs when a previously entry setup fails and is re-initiated."""
return await self.async_step_user(user_input)
async def async_step_user(self, user_input=None):
"""Validate and confirm webhook setup."""
await setup_smartapp_endpoint(self.hass)
webhook_url = get_webhook_url(self.hass)
# Abort if the webhook is invalid
if not validate_webhook_requirements(self.hass):
return self.async_abort(
reason="invalid_webhook_url",
description_placeholders={
"webhook_url": webhook_url,
"component_url": "https://www.home-assistant.io/integrations/smartthings/",
},
)
# Show the confirmation
if user_input is None:
return self.async_show_form(
step_id="user",
description_placeholders={"webhook_url": webhook_url},
)
# Show the next screen
return await self.async_step_pat()
async def async_step_pat(self, user_input=None):
"""Get the Personal Access Token and validate it."""
errors = {}
if user_input is None or CONF_ACCESS_TOKEN not in user_input:
return self._show_step_pat(errors)
self.access_token = user_input[CONF_ACCESS_TOKEN]
# Ensure token is a UUID
if not VAL_UID_MATCHER.match(self.access_token):
errors[CONF_ACCESS_TOKEN] = "token_invalid_format"
return self._show_step_pat(errors)
# Setup end-point
self.api = SmartThings(async_get_clientsession(self.hass), self.access_token)
try:
app = await find_app(self.hass, self.api)
if app:
await app.refresh() # load all attributes
await update_app(self.hass, app)
# Find an existing entry to copy the oauth client
existing = next(
(
entry
for entry in self._async_current_entries()
if entry.data[CONF_APP_ID] == app.app_id
),
None,
)
if existing:
self.oauth_client_id = existing.data[CONF_CLIENT_ID]
self.oauth_client_secret = existing.data[CONF_CLIENT_SECRET]
else:
# Get oauth client id/secret by regenerating it
app_oauth = AppOAuth(app.app_id)
app_oauth.client_name = APP_OAUTH_CLIENT_NAME
app_oauth.scope.extend(APP_OAUTH_SCOPES)
client = await self.api.generate_app_oauth(app_oauth)
self.oauth_client_secret = client.client_secret
self.oauth_client_id = client.client_id
else:
app, client = await create_app(self.hass, self.api)
self.oauth_client_secret = client.client_secret
self.oauth_client_id = client.client_id
setup_smartapp(self.hass, app)
self.app_id = app.app_id
except APIResponseError as ex:
if ex.is_target_error():
errors["base"] = "webhook_error"
else:
errors["base"] = "app_setup_error"
_LOGGER.exception(
"API error setting up the SmartApp: %s", ex.raw_error_response
)
return self._show_step_pat(errors)
except ClientResponseError as ex:
if ex.status == HTTP_UNAUTHORIZED:
errors[CONF_ACCESS_TOKEN] = "token_unauthorized"
_LOGGER.debug(
"Unauthorized error received setting up SmartApp", exc_info=True
)
elif ex.status == HTTP_FORBIDDEN:
errors[CONF_ACCESS_TOKEN] = "token_forbidden"
_LOGGER.debug(
"Forbidden error received setting up SmartApp", exc_info=True
)
else:
errors["base"] = "app_setup_error"
_LOGGER.exception("Unexpected error setting up the SmartApp")
return self._show_step_pat(errors)
except Exception: # pylint:disable=broad-except
errors["base"] = "app_setup_error"
_LOGGER.exception("Unexpected error setting up the SmartApp")
return self._show_step_pat(errors)
return await self.async_step_select_location()
async def async_step_select_location(self, user_input=None):
"""Ask user to select the location to setup."""
if user_input is None or CONF_LOCATION_ID not in user_input:
# Get available locations
existing_locations = [
entry.data[CONF_LOCATION_ID] for entry in self._async_current_entries()
]
locations = await self.api.locations()
locations_options = {
location.location_id: location.name
for location in locations
if location.location_id not in existing_locations
}
if not locations_options:
return self.async_abort(reason="no_available_locations")
return self.async_show_form(
step_id="select_location",
data_schema=vol.Schema(
{vol.Required(CONF_LOCATION_ID): vol.In(locations_options)}
),
)
self.location_id = user_input[CONF_LOCATION_ID]
await self.async_set_unique_id(format_unique_id(self.app_id, self.location_id))
return await self.async_step_authorize()
async def async_step_authorize(self, user_input=None):
"""Wait for the user to authorize the app installation."""
user_input = {} if user_input is None else user_input
self.installed_app_id = user_input.get(CONF_INSTALLED_APP_ID)
self.refresh_token = user_input.get(CONF_REFRESH_TOKEN)
if self.installed_app_id is None:
# Launch the external setup URL
url = format_install_url(self.app_id, self.location_id)
return self.async_external_step(step_id="authorize", url=url)
return self.async_external_step_done(next_step_id="install")
def _show_step_pat(self, errors):
if self.access_token is None:
# Get the token from an existing entry to make it easier to setup multiple locations.
self.access_token = next(
(
entry.data.get(CONF_ACCESS_TOKEN)
for entry in self._async_current_entries()
),
None,
)
return self.async_show_form(
step_id="pat",
data_schema=vol.Schema(
{vol.Required(CONF_ACCESS_TOKEN, default=self.access_token): str}
),
errors=errors,
description_placeholders={
"token_url": "https://account.smartthings.com/tokens",
"component_url": "https://www.home-assistant.io/integrations/smartthings/",
},
)
async def async_step_install(self, data=None):
"""Create a config entry at completion of a flow and authorization of the app."""
data = {
CONF_ACCESS_TOKEN: self.access_token,
CONF_REFRESH_TOKEN: self.refresh_token,
CONF_CLIENT_ID: self.oauth_client_id,
CONF_CLIENT_SECRET: self.oauth_client_secret,
CONF_LOCATION_ID: self.location_id,
CONF_APP_ID: self.app_id,
CONF_INSTALLED_APP_ID: self.installed_app_id,
}
location = await self.api.location(data[CONF_LOCATION_ID])
return self.async_create_entry(title=location.name, data=data)
|
import os
import unittest
from perfkitbenchmarker import test_util
from perfkitbenchmarker.linux_packages import blazemark
class BlazemarkTestCase(unittest.TestCase, test_util.SamplesTestMixin):
maxDiff = None
def setUp(self):
self.data_dir = os.path.join(os.path.dirname(__file__), '..', 'data')
def testParseResult(self):
result_path = os.path.join(self.data_dir, 'blazemark-output.txt')
with open(result_path) as result_file:
out = result_file.read()
results = blazemark._ParseResult(out, 'test')
self.assertEqual(14, len(results)) # 14 results
self.assertEqual('test_C-like_Throughput', results[0].metric)
self.assertEqual(1115.44, results[0].value)
self.assertEqual('MFlop/s', results[0].unit)
self.assertEqual({'N': 100}, results[0].metadata)
self.assertEqual('test_Eigen_Throughput', results[-1].metric)
self.assertEqual(209.899, results[-1].value)
self.assertEqual('MFlop/s', results[-1].unit)
self.assertEqual({'N': 10000000}, results[-1].metadata)
def testParseExpResult(self):
result_path = os.path.join(self.data_dir, 'blazemark-output2.txt')
with open(result_path) as result_file:
out = result_file.read()
results = blazemark._ParseResult(out, 'test')
self.assertEqual(10, len(results)) # 10 results
self.assertEqual('test_Blaze_Throughput', results[0].metric)
self.assertEqual(float('3.03424e-08'), results[0].value)
self.assertEqual('Seconds', results[0].unit)
self.assertEqual({'N': 3}, results[0].metadata)
self.assertEqual('test_Blaze_Throughput', results[-1].metric)
self.assertEqual(31.9121, results[-1].value)
self.assertEqual('Seconds', results[-1].unit)
self.assertEqual({'N': 2000}, results[-1].metadata)
if __name__ == '__main__':
unittest.main()
|
import jwt
import base64
import requests
from flask import Blueprint, current_app
from flask_restful import reqparse, Resource, Api
from flask_principal import Identity, identity_changed
from lemur.constants import SUCCESS_METRIC_STATUS, FAILURE_METRIC_STATUS
from lemur.extensions import metrics
from lemur.common.utils import get_psuedo_random_string
from lemur.users import service as user_service
from lemur.roles import service as role_service
from lemur.auth.service import create_token, fetch_token_header, get_rsa_public_key
from lemur.auth import ldap
mod = Blueprint("auth", __name__)
api = Api(mod)
def exchange_for_access_token(
code, redirect_uri, client_id, secret, access_token_url=None, verify_cert=True
):
"""
Exchanges authorization code for access token.
:param code:
:param redirect_uri:
:param client_id:
:param secret:
:param access_token_url:
:param verify_cert:
:return:
:return:
"""
# take the information we have received from the provider to create a new request
params = {
"grant_type": "authorization_code",
"scope": "openid email profile address",
"code": code,
"redirect_uri": redirect_uri,
"client_id": client_id,
}
# the secret and cliendId will be given to you when you signup for the provider
token = "{0}:{1}".format(client_id, secret)
basic = base64.b64encode(bytes(token, "utf-8"))
headers = {
"Content-Type": "application/x-www-form-urlencoded",
"authorization": "basic {0}".format(basic.decode("utf-8")),
}
# exchange authorization code for access token.
r = requests.post(
access_token_url, headers=headers, params=params, verify=verify_cert
)
if r.status_code == 400:
r = requests.post(
access_token_url, headers=headers, data=params, verify=verify_cert
)
id_token = r.json()["id_token"]
access_token = r.json()["access_token"]
return id_token, access_token
def validate_id_token(id_token, client_id, jwks_url):
"""
Ensures that the token we receive is valid.
:param id_token:
:param client_id:
:param jwks_url:
:return:
"""
# fetch token public key
header_data = fetch_token_header(id_token)
# retrieve the key material as specified by the token header
r = requests.get(jwks_url)
for key in r.json()["keys"]:
if key["kid"] == header_data["kid"]:
secret = get_rsa_public_key(key["n"], key["e"])
algo = header_data["alg"]
break
else:
return dict(message="Key not found"), 401
# validate your token based on the key it was signed with
try:
jwt.decode(
id_token, secret.decode("utf-8"), algorithms=[algo], audience=client_id
)
except jwt.DecodeError:
return dict(message="Token is invalid"), 401
except jwt.ExpiredSignatureError:
return dict(message="Token has expired"), 401
except jwt.InvalidTokenError:
return dict(message="Token is invalid"), 401
def retrieve_user(user_api_url, access_token):
"""
Fetch user information from provided user api_url.
:param user_api_url:
:param access_token:
:return:
"""
user_params = dict(access_token=access_token, schema="profile")
headers = {}
if current_app.config.get("PING_INCLUDE_BEARER_TOKEN"):
headers = {"Authorization": f"Bearer {access_token}"}
# retrieve information about the current user.
r = requests.get(user_api_url, params=user_params, headers=headers)
# Some IDPs, like "Keycloak", require a POST instead of a GET
if r.status_code == 400:
r = requests.post(user_api_url, data=user_params, headers=headers)
profile = r.json()
user = user_service.get_by_email(profile["email"])
return user, profile
def create_user_roles(profile):
"""Creates new roles based on profile information.
:param profile:
:return:
"""
roles = []
# update their google 'roles'
if "googleGroups" in profile:
for group in profile["googleGroups"]:
role = role_service.get_by_name(group)
if not role:
role = role_service.create(
group,
description="This is a google group based role created by Lemur",
third_party=True,
)
if not role.third_party:
role = role_service.set_third_party(role.id, third_party_status=True)
roles.append(role)
else:
current_app.logger.warning(
"'googleGroups' not sent by identity provider, no specific roles will assigned to the user."
)
role = role_service.get_by_name(profile["email"])
if not role:
role = role_service.create(
profile["email"],
description="This is a user specific role",
third_party=True,
)
if not role.third_party:
role = role_service.set_third_party(role.id, third_party_status=True)
roles.append(role)
# every user is an operator (tied to a default role)
if current_app.config.get("LEMUR_DEFAULT_ROLE"):
default = role_service.get_by_name(current_app.config["LEMUR_DEFAULT_ROLE"])
if not default:
default = role_service.create(
current_app.config["LEMUR_DEFAULT_ROLE"],
description="This is the default Lemur role.",
)
if not default.third_party:
role_service.set_third_party(default.id, third_party_status=True)
roles.append(default)
return roles
def update_user(user, profile, roles):
"""Updates user with current profile information and associated roles.
:param user:
:param profile:
:param roles:
"""
# if we get an sso user create them an account
if not user:
user = user_service.create(
profile["email"],
get_psuedo_random_string(),
profile["email"],
True,
profile.get("thumbnailPhotoUrl"),
roles,
)
else:
# we add 'lemur' specific roles, so they do not get marked as removed
for ur in user.roles:
if not ur.third_party:
roles.append(ur)
# update any changes to the user
user_service.update(
user.id,
profile["email"],
profile["email"],
True,
profile.get("thumbnailPhotoUrl"), # profile isn't google+ enabled
roles,
)
class Login(Resource):
"""
Provides an endpoint for Lemur's basic authentication. It takes a username and password
combination and returns a JWT token.
This token token is required for each API request and must be provided in the Authorization Header for the request.
::
Authorization:Bearer <token>
Tokens have a set expiration date. You can inspect the token expiration by base64 decoding the token and inspecting
it's contents.
.. note:: It is recommended that the token expiration is fairly short lived (hours not days). This will largely depend \
on your uses cases but. It is important to not that there is currently no build in method to revoke a users token \
and force re-authentication.
"""
def __init__(self):
self.reqparse = reqparse.RequestParser()
super(Login, self).__init__()
def post(self):
"""
.. http:post:: /auth/login
Login with username:password
**Example request**:
.. sourcecode:: http
POST /auth/login HTTP/1.1
Host: example.com
Accept: application/json, text/javascript
{
"username": "test",
"password": "test"
}
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: text/javascript
{
"token": "12343243243"
}
:arg username: username
:arg password: password
:statuscode 401: invalid credentials
:statuscode 200: no error
"""
self.reqparse.add_argument("username", type=str, required=True, location="json")
self.reqparse.add_argument("password", type=str, required=True, location="json")
args = self.reqparse.parse_args()
if "@" in args["username"]:
user = user_service.get_by_email(args["username"])
else:
user = user_service.get_by_username(args["username"])
# default to local authentication
if user and user.check_password(args["password"]) and user.active:
# Tell Flask-Principal the identity changed
identity_changed.send(
current_app._get_current_object(), identity=Identity(user.id)
)
metrics.send(
"login", "counter", 1, metric_tags={"status": SUCCESS_METRIC_STATUS}
)
return dict(token=create_token(user))
# try ldap login
if current_app.config.get("LDAP_AUTH"):
try:
ldap_principal = ldap.LdapPrincipal(args)
user = ldap_principal.authenticate()
if user and user.active:
# Tell Flask-Principal the identity changed
identity_changed.send(
current_app._get_current_object(), identity=Identity(user.id)
)
metrics.send(
"login",
"counter",
1,
metric_tags={"status": SUCCESS_METRIC_STATUS},
)
return dict(token=create_token(user))
except Exception as e:
current_app.logger.error("ldap error: {0}".format(e))
ldap_message = "ldap error: %s" % e
metrics.send(
"login", "counter", 1, metric_tags={"status": FAILURE_METRIC_STATUS}
)
return dict(message=ldap_message), 403
# if not valid user - no certificates for you
metrics.send(
"login", "counter", 1, metric_tags={"status": FAILURE_METRIC_STATUS}
)
return dict(message="The supplied credentials are invalid"), 403
class Ping(Resource):
"""
This class serves as an example of how one might implement an SSO provider for use with Lemur. In
this example we use an OpenIDConnect authentication flow, that is essentially OAuth2 underneath. If you have an
OAuth2 provider you want to use Lemur there would be two steps:
1. Define your own class that inherits from :class:`flask_restful.Resource` and create the HTTP methods the \
provider uses for its callbacks.
2. Add or change the Lemur AngularJS Configuration to point to your new provider
"""
def __init__(self):
self.reqparse = reqparse.RequestParser()
super(Ping, self).__init__()
def get(self):
return "Redirecting..."
def post(self):
self.reqparse.add_argument("clientId", type=str, required=True, location="json")
self.reqparse.add_argument(
"redirectUri", type=str, required=True, location="json"
)
self.reqparse.add_argument("code", type=str, required=True, location="json")
args = self.reqparse.parse_args()
# you can either discover these dynamically or simply configure them
access_token_url = current_app.config.get("PING_ACCESS_TOKEN_URL")
user_api_url = current_app.config.get("PING_USER_API_URL")
secret = current_app.config.get("PING_SECRET")
id_token, access_token = exchange_for_access_token(
args["code"],
args["redirectUri"],
args["clientId"],
secret,
access_token_url=access_token_url,
)
jwks_url = current_app.config.get("PING_JWKS_URL")
error_code = validate_id_token(id_token, args["clientId"], jwks_url)
if error_code:
return error_code
user, profile = retrieve_user(user_api_url, access_token)
roles = create_user_roles(profile)
update_user(user, profile, roles)
if not user or not user.active:
metrics.send(
"login", "counter", 1, metric_tags={"status": FAILURE_METRIC_STATUS}
)
return dict(message="The supplied credentials are invalid"), 403
# Tell Flask-Principal the identity changed
identity_changed.send(
current_app._get_current_object(), identity=Identity(user.id)
)
metrics.send(
"login", "counter", 1, metric_tags={"status": SUCCESS_METRIC_STATUS}
)
return dict(token=create_token(user))
class OAuth2(Resource):
def __init__(self):
self.reqparse = reqparse.RequestParser()
super(OAuth2, self).__init__()
def get(self):
return "Redirecting..."
def post(self):
self.reqparse.add_argument("clientId", type=str, required=True, location="json")
self.reqparse.add_argument(
"redirectUri", type=str, required=True, location="json"
)
self.reqparse.add_argument("code", type=str, required=True, location="json")
args = self.reqparse.parse_args()
# you can either discover these dynamically or simply configure them
access_token_url = current_app.config.get("OAUTH2_ACCESS_TOKEN_URL")
user_api_url = current_app.config.get("OAUTH2_USER_API_URL")
verify_cert = current_app.config.get("OAUTH2_VERIFY_CERT")
secret = current_app.config.get("OAUTH2_SECRET")
id_token, access_token = exchange_for_access_token(
args["code"],
args["redirectUri"],
args["clientId"],
secret,
access_token_url=access_token_url,
verify_cert=verify_cert,
)
jwks_url = current_app.config.get("OAUTH2_JWKS_URL")
error_code = validate_id_token(id_token, args["clientId"], jwks_url)
if error_code:
return error_code
user, profile = retrieve_user(user_api_url, access_token)
roles = create_user_roles(profile)
update_user(user, profile, roles)
if not user.active:
metrics.send(
"login", "counter", 1, metric_tags={"status": FAILURE_METRIC_STATUS}
)
return dict(message="The supplied credentials are invalid"), 403
# Tell Flask-Principal the identity changed
identity_changed.send(
current_app._get_current_object(), identity=Identity(user.id)
)
metrics.send(
"login", "counter", 1, metric_tags={"status": SUCCESS_METRIC_STATUS}
)
return dict(token=create_token(user))
class Google(Resource):
def __init__(self):
self.reqparse = reqparse.RequestParser()
super(Google, self).__init__()
def post(self):
access_token_url = "https://accounts.google.com/o/oauth2/token"
people_api_url = "https://www.googleapis.com/plus/v1/people/me/openIdConnect"
self.reqparse.add_argument("clientId", type=str, required=True, location="json")
self.reqparse.add_argument(
"redirectUri", type=str, required=True, location="json"
)
self.reqparse.add_argument("code", type=str, required=True, location="json")
args = self.reqparse.parse_args()
# Step 1. Exchange authorization code for access token
payload = {
"client_id": args["clientId"],
"grant_type": "authorization_code",
"redirect_uri": args["redirectUri"],
"code": args["code"],
"client_secret": current_app.config.get("GOOGLE_SECRET"),
}
r = requests.post(access_token_url, data=payload)
token = r.json()
# Step 2. Retrieve information about the current user
headers = {"Authorization": "Bearer {0}".format(token["access_token"])}
r = requests.get(people_api_url, headers=headers)
profile = r.json()
user = user_service.get_by_email(profile["email"])
if not (user and user.active):
metrics.send(
"login", "counter", 1, metric_tags={"status": FAILURE_METRIC_STATUS}
)
return dict(message="The supplied credentials are invalid."), 403
if user:
metrics.send(
"login", "counter", 1, metric_tags={"status": SUCCESS_METRIC_STATUS}
)
return dict(token=create_token(user))
metrics.send(
"login", "counter", 1, metric_tags={"status": FAILURE_METRIC_STATUS}
)
class Providers(Resource):
def get(self):
active_providers = []
for provider in current_app.config.get("ACTIVE_PROVIDERS", []):
provider = provider.lower()
if provider == "google":
active_providers.append(
{
"name": "google",
"clientId": current_app.config.get("GOOGLE_CLIENT_ID"),
"url": api.url_for(Google),
}
)
elif provider == "ping":
active_providers.append(
{
"name": current_app.config.get("PING_NAME"),
"url": current_app.config.get("PING_REDIRECT_URI"),
"redirectUri": current_app.config.get("PING_REDIRECT_URI"),
"clientId": current_app.config.get("PING_CLIENT_ID"),
"responseType": "code",
"scope": ["openid", "email", "profile", "address"],
"scopeDelimiter": " ",
"authorizationEndpoint": current_app.config.get(
"PING_AUTH_ENDPOINT"
),
"requiredUrlParams": ["scope"],
"type": "2.0",
}
)
elif provider == "oauth2":
active_providers.append(
{
"name": current_app.config.get("OAUTH2_NAME"),
"url": current_app.config.get("OAUTH2_REDIRECT_URI"),
"redirectUri": current_app.config.get("OAUTH2_REDIRECT_URI"),
"clientId": current_app.config.get("OAUTH2_CLIENT_ID"),
"responseType": "code",
"scope": ["openid", "email", "profile", "groups"],
"scopeDelimiter": " ",
"authorizationEndpoint": current_app.config.get(
"OAUTH2_AUTH_ENDPOINT"
),
"requiredUrlParams": ["scope", "state", "nonce"],
"state": "STATE",
"nonce": get_psuedo_random_string(),
"type": "2.0",
}
)
return active_providers
api.add_resource(Login, "/auth/login", endpoint="login")
api.add_resource(Ping, "/auth/ping", endpoint="ping")
api.add_resource(Google, "/auth/google", endpoint="google")
api.add_resource(OAuth2, "/auth/oauth2", endpoint="oauth2")
api.add_resource(Providers, "/auth/providers", endpoint="providers")
|
import inspect
import logging
import threading
import unittest
from perfkitbenchmarker import log_util
from perfkitbenchmarker import vm_util
from tests import pkb_common_test_case
class LogUtilTestCase(pkb_common_test_case.PkbCommonTestCase):
"""Tests exercising the utilities in log_util."""
def testThreadLogContextExtendLabel(self):
"""Verify ThreadLogContext.ExtendLabel behavior."""
context = log_util.ThreadLogContext()
self.assertEqual(context.label, '')
with context.ExtendLabel('LABEL-A'):
self.assertEqual(context.label, 'LABEL-A ')
with context.ExtendLabel('LABEL-B'):
self.assertEqual(context.label, 'LABEL-A LABEL-B ')
self.assertEqual(context.label, 'LABEL-A ')
self.assertEqual(context.label, '')
def testThreadLogContextExtendLabelEmptyStrings(self):
"""Verify ThreadLogContext.ExtendLabel behavior with empty strings."""
context = log_util.ThreadLogContext()
self.assertEqual(context.label, '')
with context.ExtendLabel(''):
self.assertEqual(context.label, '')
with context.ExtendLabel('LABEL-A'):
self.assertEqual(context.label, 'LABEL-A ')
with context.ExtendLabel(''):
self.assertEqual(context.label, 'LABEL-A ')
with context.ExtendLabel('LABEL-B'):
self.assertEqual(context.label, 'LABEL-A LABEL-B ')
self.assertEqual(context.label, 'LABEL-A ')
self.assertEqual(context.label, 'LABEL-A ')
self.assertEqual(context.label, '')
self.assertEqual(context.label, '')
def testThreadLogContextCopyConstruct(self):
"""Verify ThreadLogContext init with a reference ThreadLogContext behavior.
The label state of the first ThreadLogContext should be copied.
"""
original = log_util.ThreadLogContext()
self.assertEqual(original.label, '')
with original.ExtendLabel('LABEL-A'):
self.assertEqual(original.label, 'LABEL-A ')
copied = log_util.ThreadLogContext(original)
self.assertEqual(original.label, 'LABEL-A ')
self.assertEqual(copied.label, 'LABEL-A ')
with original.ExtendLabel('LABEL-B'):
self.assertEqual(original.label, 'LABEL-A LABEL-B ')
self.assertEqual(copied.label, 'LABEL-A ')
with copied.ExtendLabel('LABEL-C'):
self.assertEqual(original.label, 'LABEL-A LABEL-B ')
self.assertEqual(copied.label, 'LABEL-A LABEL-C ')
self.assertEqual(original.label, 'LABEL-A LABEL-B ')
self.assertEqual(copied.label, 'LABEL-A ')
self.assertEqual(original.label, 'LABEL-A ')
self.assertEqual(copied.label, 'LABEL-A ')
self.assertEqual(original.label, '')
self.assertEqual(copied.label, 'LABEL-A ')
def testRunThreadedContextCopy(self):
"""Verify that ThreadLogContext is copied to threads by vm_util.RunThreaded.
"""
original = log_util.ThreadLogContext()
log_util.SetThreadLogContext(original)
t1_list = ['T1']
t2_list = ['T2']
self.assertEqual(original.label, '')
with original.ExtendLabel('T0'):
self.assertEqual(original.label, 'T0 ')
vm_util.RunThreaded(
target=LogUtilTestCase.RunThreadedContextCopyHelper,
thread_params=[t1_list, t2_list])
self.assertEqual(original.label, 'T0 ')
self.assertEqual(t1_list, ['T1', 'T0 ', 'T0 T1 ', 'T0 '])
self.assertEqual(t2_list, ['T2', 'T0 ', 'T0 T2 ', 'T0 '])
@staticmethod
def RunThreadedContextCopyHelper(my_list):
"""Helper method used by testRunThreadedContextCopy."""
context = log_util.GetThreadLogContext()
my_list.append(context.label)
with context.ExtendLabel(my_list[0]):
my_list.append(context.label)
my_list.append(context.label)
def testPkbLogFilter(self):
"""Verify that PkbLogFilter sets the pkb_label of LogRecords it processes.
"""
logger_name = 'log_util_test.LogUtilTestCase.testPkbLogFilter'
context = log_util.ThreadLogContext()
log_util.SetThreadLogContext(context)
with context.ExtendLabel('LABEL-A'):
log_record = logging.LogRecord(
name=logger_name, level=logging.INFO, pathname=__file__,
lineno=inspect.getframeinfo(inspect.currentframe()).lineno + 1,
msg="Log message.", args=None, exc_info=None)
log_util.PkbLogFilter().filter(log_record)
self.assertEqual(log_record.pkb_label, 'LABEL-A ')
def testPkbLogFilterNoContext(self):
"""Verify that PkbLogFilter works if no context was set."""
self.completed = False
def childLog():
logger_name = 'log_util_test.LogUtilTestCase.testPkbLogFilterNoContext'
self.log_record = logging.LogRecord(
name=logger_name, level=logging.INFO, pathname=__file__,
lineno=inspect.getframeinfo(inspect.currentframe()).lineno + 1,
msg="Log message.", args=None, exc_info=None)
log_util.PkbLogFilter().filter(self.log_record)
self.completed = True
child = threading.Thread(target=childLog)
child.start()
child.join()
self.assertTrue(self.completed)
self.assertEqual(self.log_record.pkb_label, '')
if __name__ == '__main__':
unittest.main()
|
from unittest.mock import patch
import pytest
from homeassistant.core import HomeAssistant
from .common import ComponentFactory
from tests.test_util.aiohttp import AiohttpClientMocker
@pytest.fixture()
def component_factory(
hass: HomeAssistant, aiohttp_client, aioclient_mock: AiohttpClientMocker
):
"""Return a factory for initializing the withings component."""
with patch(
"homeassistant.components.withings.common.ConfigEntryWithingsApi"
) as api_class_mock:
yield ComponentFactory(hass, api_class_mock, aiohttp_client, aioclient_mock)
|
import logging
from threading import Thread
import requests
GA_TRACKING_ID = "UA-124800612-1"
logging.basicConfig()
logger = logging.getLogger("kalliope")
logger.setLevel(logging.DEBUG)
class GoogleTracking(Thread):
"""
send hit to Google Analytics
allow to anonymously evaluate the global usage of Kalliope app by users
"""
def __init__(self, **kwargs):
super(GoogleTracking, self).__init__()
self.category = kwargs.get("category")
self.action = kwargs.get("action")
self.label = kwargs.get("label", None)
self.value = kwargs.get("value", 0)
self.kalliope_version = kwargs.get("kalliope_version", 0)
def run(self):
self.track_event(self.kalliope_version, self.category, self.action, self.label, self.value)
@staticmethod
def track_event(cid, kalliope_version, category, action, label=None, value=0):
# allowed parameters: https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters
data = {
'v': '1', # API Version.
'tid': GA_TRACKING_ID, # Tracking ID / Property ID.
'cid': cid, # unique user id
'an': "kalliope",
'av': kalliope_version,
'ds': 'api',
't': 'event', # Event hit type.
'ec': category, # Event category.
'ea': action, # Event action.
'el': label, # Event label.
'ev': value, # Event value, must be an integer
}
try:
response = requests.post(
'http://www.google-analytics.com/collect', data=data)
# If the request fails, this will raise a RequestException.
response.raise_for_status()
logger.debug("[GoogleTracking] hit sent: %s" % response.status_code)
except Exception as e:
logger.debug("[GoogleTracking] fail to send data: %s" % e)
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.