text
stringlengths 213
32.3k
|
---|
from datetime import timedelta
import logging
import linode
import voluptuous as vol
from homeassistant.const import CONF_ACCESS_TOKEN
import homeassistant.helpers.config_validation as cv
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
ATTR_CREATED = "created"
ATTR_NODE_ID = "node_id"
ATTR_NODE_NAME = "node_name"
ATTR_IPV4_ADDRESS = "ipv4_address"
ATTR_IPV6_ADDRESS = "ipv6_address"
ATTR_MEMORY = "memory"
ATTR_REGION = "region"
ATTR_VCPUS = "vcpus"
CONF_NODES = "nodes"
DATA_LINODE = "data_li"
LINODE_PLATFORMS = ["binary_sensor", "switch"]
DOMAIN = "linode"
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60)
CONFIG_SCHEMA = vol.Schema(
{DOMAIN: vol.Schema({vol.Required(CONF_ACCESS_TOKEN): cv.string})},
extra=vol.ALLOW_EXTRA,
)
def setup(hass, config):
"""Set up the Linode component."""
conf = config[DOMAIN]
access_token = conf.get(CONF_ACCESS_TOKEN)
_linode = Linode(access_token)
try:
_LOGGER.info("Linode Profile %s", _linode.manager.get_profile().username)
except linode.errors.ApiError as _ex:
_LOGGER.error(_ex)
return False
hass.data[DATA_LINODE] = _linode
return True
class Linode:
"""Handle all communication with the Linode API."""
def __init__(self, access_token):
"""Initialize the Linode connection."""
self._access_token = access_token
self.data = None
self.manager = linode.LinodeClient(token=self._access_token)
def get_node_id(self, node_name):
"""Get the status of a Linode Instance."""
node_id = None
try:
all_nodes = self.manager.linode.get_instances()
for node in all_nodes:
if node_name == node.label:
node_id = node.id
except linode.errors.ApiError as _ex:
_LOGGER.error(_ex)
return node_id
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Use the data from Linode API."""
try:
self.data = self.manager.linode.get_instances()
except linode.errors.ApiError as _ex:
_LOGGER.error(_ex)
|
import logging
import os
from gmusicapi.utils import utils
from oauth2client.client import OAuth2WebServerFlow
import oauth2client.file
import webbrowser
class _Base(metaclass=utils.DocstringInheritMeta):
"""Factors out common client setup."""
_session_class = utils.NotImplementedField
num_clients = 0 # used to disambiguate loggers
def __init__(self, logger_basename, debug_logging, validate, verify_ssl):
"""
:param debug_logging: each Client has a ``logger`` member.
The logger is named ``gmusicapi.<client class><client number>`` and
will propogate to the ``gmusicapi`` root logger.
If this param is ``True``, handlers will be configured to send
this client's debug log output to disk,
with warnings and above printed to stderr.
`Appdirs <https://pypi.python.org/pypi/appdirs>`__
``user_log_dir`` is used by default. Users can run::
from gmusicapi.utils import utils
print utils.log_filepath
to see the exact location on their system.
If ``False``, no handlers will be configured;
users must create their own handlers.
Completely ignoring logging is dangerous and not recommended.
The Google Music protocol can change at any time; if
something were to go wrong, the logs would be necessary for
recovery.
:param validate: if False, do not validate server responses against
known schemas. This helps to catch protocol changes, but requires
significant cpu work.
This arg is stored as ``self.validate`` and can be safely
modified at runtime.
:param verify_ssl: if False, exceptions will not be raised if there
are problems verifying SSL certificates.
Be wary of using this option; it's almost always better to
fix the machine's SSL configuration than to ignore errors.
"""
# this isn't correct if init is called more than once, so we log the
# client name below to avoid confusion for people reading logs
_Base.num_clients += 1
logger_name = "gmusicapi.%s%s" % (logger_basename,
_Base.num_clients)
self._cache = {}
self.logger = logging.getLogger(logger_name)
self.validate = validate
self._verify_ssl = verify_ssl
def setup_session(s):
s.verify = self._verify_ssl
self.session = self._session_class(rsession_setup=setup_session)
if debug_logging:
utils.configure_debug_log_handlers(self.logger)
self.logger.info("initialized")
self.logout()
def _make_call(self, protocol, *args, **kwargs):
"""Returns the response of a protocol.Call.
args/kwargs are passed to protocol.perform.
CallFailure may be raised."""
return protocol.perform(self.session, self.validate, *args, **kwargs)
def is_authenticated(self):
"""Returns ``True`` if the Api can make an authenticated request."""
return self.session.is_authenticated
def logout(self):
"""Forgets local authentication and cached properties in this Api instance.
Returns ``True`` on success."""
# note to clients: this will be called during __init__.
self.session.logout()
self._cache.clear() # Clear the instance of all cached properties.
self.logger.info("logged out")
return True
class _OAuthClient(_Base):
_path_sentinel = object()
# the default path for credential storage
OAUTH_FILEPATH = utils.NotImplementedField
@classmethod
def perform_oauth(cls, storage_filepath=_path_sentinel, open_browser=False):
"""Provides a series of prompts for a user to follow to authenticate.
Returns ``oauth2client.client.OAuth2Credentials`` when successful.
In most cases, this should only be run once per machine to store
credentials to disk, then never be needed again.
If the user refuses to give access,
``oauth2client.client.FlowExchangeError`` is raised.
:param storage_filepath: a filepath to write the credentials to,
or ``None``
to not write the credentials to disk (which is not recommended).
`Appdirs <https://pypi.python.org/pypi/appdirs>`__
``user_data_dir`` is used by default. Check the OAUTH_FILEPATH field
on this class to see the exact location that will be used.
:param open_browser: if True, attempt to open the auth url
in the system default web browser. The url will be printed
regardless of this param's setting.
This flow is intentionally very simple.
For complete control over the OAuth flow, pass an
``oauth2client.client.OAuth2Credentials``
to :func:`login` instead.
"""
if storage_filepath is cls._path_sentinel:
storage_filepath = cls.OAUTH_FILEPATH
flow = OAuth2WebServerFlow(**cls._session_class.oauth._asdict())
auth_uri = flow.step1_get_authorize_url()
print()
print("Visit the following url:\n %s" % auth_uri)
if open_browser:
print()
print('Opening your browser to it now...', end=' ')
webbrowser.open(auth_uri)
print('done.')
print("If you don't see your browser, you can just copy and paste the url.")
print()
code = input("Follow the prompts, then paste the auth code here and hit enter: ")
credentials = flow.step2_exchange(code)
if storage_filepath is not None:
if storage_filepath == cls.OAUTH_FILEPATH:
utils.make_sure_path_exists(os.path.dirname(cls.OAUTH_FILEPATH), 0o700)
storage = oauth2client.file.Storage(storage_filepath)
storage.put(credentials)
return credentials
def _oauth_login(self, oauth_credentials):
"""Return True on success."""
if isinstance(oauth_credentials, str):
oauth_file = oauth_credentials
if oauth_file == self.OAUTH_FILEPATH:
utils.make_sure_path_exists(os.path.dirname(self.OAUTH_FILEPATH), 0o700)
storage = oauth2client.file.Storage(oauth_file)
oauth_credentials = storage.get()
if oauth_credentials is None:
self.logger.warning("could not retrieve oauth credentials from '%r'", oauth_file)
return False
if not self.session.login(oauth_credentials):
self.logger.warning("failed to authenticate")
return False
self.logger.info("oauth successful")
return True
|
import logging
from homeassistant.components.device_tracker.config_entry import ScannerEntity
from homeassistant.components.device_tracker.const import (
DOMAIN as DEVICE_TRACKER,
SOURCE_TYPE_ROUTER,
)
from homeassistant.core import callback
from homeassistant.helpers import entity_registry
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC
from homeassistant.helpers.dispatcher import async_dispatcher_connect
import homeassistant.util.dt as dt_util
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up device tracker for Mikrotik component."""
hub = hass.data[DOMAIN][config_entry.entry_id]
tracked = {}
registry = await entity_registry.async_get_registry(hass)
# Restore clients that is not a part of active clients list.
for entity in registry.entities.values():
if (
entity.config_entry_id == config_entry.entry_id
and entity.domain == DEVICE_TRACKER
):
if (
entity.unique_id in hub.api.devices
or entity.unique_id not in hub.api.all_devices
):
continue
hub.api.restore_device(entity.unique_id)
@callback
def update_hub():
"""Update the status of the device."""
update_items(hub, async_add_entities, tracked)
async_dispatcher_connect(hass, hub.signal_update, update_hub)
update_hub()
@callback
def update_items(hub, async_add_entities, tracked):
"""Update tracked device state from the hub."""
new_tracked = []
for mac, device in hub.api.devices.items():
if mac not in tracked:
tracked[mac] = MikrotikHubTracker(device, hub)
new_tracked.append(tracked[mac])
if new_tracked:
async_add_entities(new_tracked)
class MikrotikHubTracker(ScannerEntity):
"""Representation of network device."""
def __init__(self, device, hub):
"""Initialize the tracked device."""
self.device = device
self.hub = hub
self.unsub_dispatcher = None
@property
def is_connected(self):
"""Return true if the client is connected to the network."""
if (
self.device.last_seen
and (dt_util.utcnow() - self.device.last_seen)
< self.hub.option_detection_time
):
return True
return False
@property
def source_type(self):
"""Return the source type of the client."""
return SOURCE_TYPE_ROUTER
@property
def name(self) -> str:
"""Return the name of the client."""
return self.device.name
@property
def unique_id(self) -> str:
"""Return a unique identifier for this device."""
return self.device.mac
@property
def available(self) -> bool:
"""Return if controller is available."""
return self.hub.available
@property
def device_state_attributes(self):
"""Return the device state attributes."""
if self.is_connected:
return self.device.attrs
return None
@property
def device_info(self):
"""Return a client description for device registry."""
info = {
"connections": {(CONNECTION_NETWORK_MAC, self.device.mac)},
"identifiers": {(DOMAIN, self.device.mac)},
# We only get generic info from device discovery and so don't want
# to override API specific info that integrations can provide
"default_name": self.name,
}
return info
async def async_added_to_hass(self):
"""Client entity created."""
_LOGGER.debug("New network device tracker %s (%s)", self.name, self.unique_id)
self.unsub_dispatcher = async_dispatcher_connect(
self.hass, self.hub.signal_update, self.async_write_ha_state
)
async def async_update(self):
"""Synchronize state with hub."""
_LOGGER.debug(
"Updating Mikrotik tracked client %s (%s)", self.entity_id, self.unique_id
)
await self.hub.request_update()
async def will_remove_from_hass(self):
"""Disconnect from dispatcher."""
if self.unsub_dispatcher:
self.unsub_dispatcher()
|
import glob
import os
import re
import shutil
import subprocess
import sys
from unittest_mixins import ModuleCleaner
from coverage import env
from coverage.backward import invalidate_import_caches, unicode_class
from coverage.misc import output_encoding
def run_command(cmd):
"""Run a command in a sub-process.
Returns the exit status code and the combined stdout and stderr.
"""
if env.PY2 and isinstance(cmd, unicode_class):
cmd = cmd.encode(sys.getfilesystemencoding())
# In some strange cases (PyPy3 in a virtualenv!?) the stdout encoding of
# the subprocess is set incorrectly to ascii. Use an environment variable
# to force the encoding to be the same as ours.
sub_env = dict(os.environ)
sub_env['PYTHONIOENCODING'] = output_encoding()
proc = subprocess.Popen(
cmd,
shell=True,
env=sub_env,
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT
)
output, _ = proc.communicate()
status = proc.returncode
# Get the output, and canonicalize it to strings with newlines.
if not isinstance(output, str):
output = output.decode(output_encoding())
output = output.replace('\r', '')
return status, output
class CheckUniqueFilenames(object):
"""Asserts the uniqueness of file names passed to a function."""
def __init__(self, wrapped):
self.filenames = set()
self.wrapped = wrapped
@classmethod
def hook(cls, obj, method_name):
"""Replace a method with our checking wrapper.
The method must take a string as a first argument. That argument
will be checked for uniqueness across all the calls to this method.
The values don't have to be file names actually, just strings, but
we only use it for filename arguments.
"""
method = getattr(obj, method_name)
hook = cls(method)
setattr(obj, method_name, hook.wrapper)
return hook
def wrapper(self, filename, *args, **kwargs):
"""The replacement method. Check that we don't have dupes."""
assert filename not in self.filenames, (
"File name %r passed to %r twice" % (filename, self.wrapped)
)
self.filenames.add(filename)
ret = self.wrapped(filename, *args, **kwargs)
return ret
def re_lines(text, pat, match=True):
"""Return the text of lines that match `pat` in the string `text`.
If `match` is false, the selection is inverted: only the non-matching
lines are included.
Returns a string, the text of only the selected lines.
"""
return "".join(l for l in text.splitlines(True) if bool(re.search(pat, l)) == match)
def re_line(text, pat):
"""Return the one line in `text` that matches regex `pat`.
Raises an AssertionError if more than one, or less than one, line matches.
"""
lines = re_lines(text, pat).splitlines()
assert len(lines) == 1
return lines[0]
def remove_files(*patterns):
"""Remove all files that match any of the patterns."""
for pattern in patterns:
for fname in glob.glob(pattern):
os.remove(fname)
class SuperModuleCleaner(ModuleCleaner):
"""Remember the state of sys.modules and restore it later."""
def clean_local_file_imports(self):
"""Clean up the results of calls to `import_local_file`.
Use this if you need to `import_local_file` the same file twice in
one test.
"""
# So that we can re-import files, clean them out first.
self.cleanup_modules()
# Also have to clean out the .pyc file, since the timestamp
# resolution is only one second, a changed file might not be
# picked up.
remove_files("*.pyc", "*$py.class")
if os.path.exists("__pycache__"):
shutil.rmtree("__pycache__")
invalidate_import_caches()
# Map chars to numbers for arcz_to_arcs
_arcz_map = {'.': -1}
_arcz_map.update(dict((c, ord(c) - ord('0')) for c in '123456789'))
_arcz_map.update(dict(
(c, 10 + ord(c) - ord('A')) for c in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
))
def arcz_to_arcs(arcz):
"""Convert a compact textual representation of arcs to a list of pairs.
The text has space-separated pairs of letters. Period is -1, 1-9 are
1-9, A-Z are 10 through 36. The resulting list is sorted regardless of
the order of the input pairs.
".1 12 2." --> [(-1,1), (1,2), (2,-1)]
Minus signs can be included in the pairs:
"-11, 12, 2-5" --> [(-1,1), (1,2), (2,-5)]
"""
arcs = []
for pair in arcz.split():
asgn = bsgn = 1
if len(pair) == 2:
a, b = pair
else:
assert len(pair) == 3
if pair[0] == '-':
_, a, b = pair
asgn = -1
else:
assert pair[1] == '-'
a, _, b = pair
bsgn = -1
arcs.append((asgn * _arcz_map[a], bsgn * _arcz_map[b]))
return sorted(arcs)
_arcz_unmap = {val: ch for ch, val in _arcz_map.items()}
def _arcs_to_arcz_repr_one(num):
"""Return an arcz form of the number `num`, or "?" if there is none."""
if num == -1:
return "."
z = ""
if num < 0:
z += "-"
num *= -1
z += _arcz_unmap.get(num, "?")
return z
def arcs_to_arcz_repr(arcs):
"""Convert a list of arcs to a readable multi-line form for asserting.
Each pair is on its own line, with a comment showing the arcz form,
to make it easier to decode when debugging test failures.
"""
repr_list = []
for a, b in arcs:
line = repr((a, b))
line += " # "
line += _arcs_to_arcz_repr_one(a)
line += _arcs_to_arcz_repr_one(b)
repr_list.append(line)
return "\n".join(repr_list) + "\n"
|
import time
import sys
from math import floor, log10
from collections import deque
class FileProgress(object):
__slots__ = ('current', 'status', 'total')
def __init__(self, total_sz: int, current: int=0):
self.total = total_sz
self.current = current
self.status = None
def update(self, chunk):
self.current += chunk.__sizeof__()
def reset(self):
self.current = 0
def done(self):
self.current = self.total
class MultiProgress(object):
"""Container that accumulates multiple FileProgress objects"""
def __init__(self):
self._progresses = []
self._last_inv = None
self._last_prog = 0
self._last_speeds = deque([0] * 10, 10)
def end(self):
self.print_progress()
print()
failed = sum(1 for s in self._progresses if s.status)
if failed:
print('%d file(s) failed.' % failed)
def add(self, progress: FileProgress):
self._progresses.append(progress)
def print_progress(self):
total = 0
current = 0
complete = 0
for p in self._progresses:
total += p.total
current += p.current
if p.total <= p.current:
complete += 1
if current > total:
total = current
self._print(total, current, len(self._progresses), complete)
def _print(self, total_sz: int, current_sz: int, total_items: int, done: int):
"""Prints a line that includes a progress bar, total and current transfer size,
total and done items, average speed, and ETA. Uses ANSI escape codes."""
if not self._last_inv:
self._last_inv = time.time()
t = time.time()
duration = t - self._last_inv
speed = (current_sz - self._last_prog) / duration if duration else 0
rate = float(current_sz) / total_sz if total_sz else 1
self._last_speeds.append(speed)
avg_speed = float(sum(self._last_speeds)) / len(self._last_speeds)
eta = float(total_sz - current_sz) / avg_speed if avg_speed else 0
self._last_inv, self._last_prog = t, current_sz
percentage = round(rate * 100, ndigits=2) if rate <= 1 else 100
completed = "#" * int(percentage / 4)
spaces = " " * (25 - len(completed))
item_width = floor(log10(total_items))
sys.stdout.write('[%s%s] %s%% of %s %s/%d %s %s\x1b[K\r'
% (completed, spaces, ('%3.1f' % percentage).rjust(5),
(file_size_str(total_sz)).rjust(7),
str(done).rjust(item_width + 1), total_items,
(speed_str(avg_speed)).rjust(10), time_str(eta).rjust(7)))
sys.stdout.flush()
def speed_str(num: int, suffix='B', time_suffix='/s') -> str:
for unit in ['', 'K', 'M', 'G', 'T', 'P', 'E', 'Z']:
if abs(num) < 1000.0:
return "%3.1f%s%s%s" % (num, unit, suffix, time_suffix)
num /= 1000.0
return "%.1f%s%s%s" % (num, 'Y', suffix, time_suffix)
def file_size_str(num: int, suffix='B') -> str:
for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']:
if abs(num) < 1024.0:
return "%4.0f%s%s" % (num, unit, suffix)
num /= 1024.0
return "%.1f%s%s" % (num, 'Yi', suffix)
def time_str(num: float) -> str:
if num <= 0:
return '0s'
if num < 60:
return '%02ds' % num
elif num < 3600:
seconds = num % 60 // 1
minutes = (num - seconds) // 60
return '%02d:%02dm' % (minutes, seconds)
elif num <= 86400:
minutes = num % 3600 // 60
hours = (num - minutes) // 3600
return '%02d:%02dh' % (hours, minutes)
elif num <= 31536000:
hours = num % 86400 // 3600
days = (num - hours) // 86400
if days >= 100:
return '%id' % days
return '%id %02dh' % (days, hours)
else:
return '>1 year'
|
from __future__ import unicode_literals
import re
import json
try:
from html.parser import HTMLParser # py3
except ImportError:
from HTMLParser import HTMLParser # py2
import warnings
from django.db import migrations
from djangocms_text_ckeditor.models import Text as TextModel
def _replace_text_body(old_body, input_pattern, output_tag, id_format):
regex = re.compile(input_pattern)
def _do_replace(match):
before_id, plugin_id, after_id = match.groups()
if not plugin_id:
return ''
bits = []
if before_id:
bits.append(before_id.strip())
bits.append(id_format.format(plugin_id))
if after_id:
bits.append(after_id.strip())
# By using .join() we ensure the correct
# amount of spaces are used to separate the different
# attributes.
tag_attrs = ' '.join(bits)
return output_tag.format(tag_attrs)
new_body, count = regex.subn(_do_replace, old_body)
return new_body, count
def forwards(apps, schema_editor):
html_parser = HTMLParser()
for element in TextModel.objects.all():
if element.plugin_type != 'AcceptConditionPlugin':
continue
try:
old_body = html_parser.unescape(json.loads(element.body).get('html_content'))
except ValueError:
continue
new_body, count = _replace_text_body(
old_body,
input_pattern=r'<img ([^>]*)\bid="plugin_obj_(?P<pk>\d+)"([^>]*)/?>',
output_tag='<cms-plugin {}></cms-plugin>',
id_format='id="{}"'
)
# print("Convert Cascade Element {} -> Text:\n {}".format(element.pk, new_body))
if count >= 1:
# Only update body if there were plugins in the text
TextModel.objects.filter(pk=element.pk).update(body=new_body)
def backwards(apps, schema_editor):
warnings.warn("Backward migration is not implemented yet")
class Migration(migrations.Migration):
dependencies = [
('shop', '0003_glossary_fields'),
]
operations = [
migrations.RunSQL([("INSERT INTO djangocms_text_ckeditor_text (cmsplugin_ptr_id, body) "
"SELECT cmsplugin_ptr_id, glossary FROM cmsplugin_cascade_element "
"INNER JOIN cms_cmsplugin ON cmsplugin_ptr_id=id WHERE plugin_type='AcceptConditionFormPlugin';", None)]),
migrations.RunSQL([("UPDATE cms_cmsplugin SET plugin_type='AcceptConditionPlugin' WHERE id IN ("
"SELECT id FROM (SELECT * FROM cms_cmsplugin) AS plugins WHERE plugin_type='AcceptConditionFormPlugin');", None)]),
migrations.RunSQL([("DELETE FROM cmsplugin_cascade_element WHERE cmsplugin_ptr_id IN ("
"SELECT id FROM cms_cmsplugin WHERE plugin_type='AcceptConditionPlugin');", None)]),
migrations.RunPython(forwards, backwards),
]
|
from typing import MutableSequence
from PyQt5.QtCore import Qt, QModelIndex, QAbstractItemModel
from qutebrowser.utils import log, qtutils, utils
from qutebrowser.api import cmdutils
class CompletionModel(QAbstractItemModel):
"""A model that proxies access to one or more completion categories.
Top level indices represent categories.
Child indices represent rows of those tables.
Attributes:
column_widths: The width percentages of the columns used in the
completion view.
_categories: The sub-categories.
"""
def __init__(self, *, column_widths=(30, 70, 0), parent=None):
super().__init__(parent)
self.column_widths = column_widths
self._categories: MutableSequence[QAbstractItemModel] = []
def _cat_from_idx(self, index):
"""Return the category pointed to by the given index.
Args:
idx: A QModelIndex
Returns:
A category if the index points at one, else None
"""
# items hold an index to the parent category in their internalPointer
# categories have an empty internalPointer
if index.isValid() and not index.internalPointer():
return self._categories[index.row()]
return None
def add_category(self, cat):
"""Add a completion category to the model."""
self._categories.append(cat)
def data(self, index, role=Qt.DisplayRole):
"""Return the item data for index.
Override QAbstractItemModel::data.
Args:
index: The QModelIndex to get item flags for.
Return: The item data, or None on an invalid index.
"""
if role != Qt.DisplayRole:
return None
cat = self._cat_from_idx(index)
if cat:
# category header
if index.column() == 0:
return self._categories[index.row()].name
return None
# item
cat = self._cat_from_idx(index.parent())
if not cat:
return None
idx = cat.index(index.row(), index.column())
return cat.data(idx)
def flags(self, index):
"""Return the item flags for index.
Override QAbstractItemModel::flags.
Return: The item flags, or Qt.NoItemFlags on error.
"""
if not index.isValid():
return Qt.NoItemFlags
if index.parent().isValid():
# item
return (Qt.ItemIsEnabled | Qt.ItemIsSelectable |
Qt.ItemNeverHasChildren)
else:
# category
return Qt.NoItemFlags
def index(self, row, col, parent=QModelIndex()):
"""Get an index into the model.
Override QAbstractItemModel::index.
Return: A QModelIndex.
"""
if (row < 0 or row >= self.rowCount(parent) or
col < 0 or col >= self.columnCount(parent)):
return QModelIndex()
if parent.isValid():
if parent.column() != 0:
return QModelIndex()
# store a pointer to the parent category in internalPointer
return self.createIndex(row, col, self._categories[parent.row()])
return self.createIndex(row, col, None)
def parent(self, index):
"""Get an index to the parent of the given index.
Override QAbstractItemModel::parent.
Args:
index: The QModelIndex to get the parent index for.
"""
parent_cat = index.internalPointer()
if not parent_cat:
# categories have no parent
return QModelIndex()
row = self._categories.index(parent_cat)
return self.createIndex(row, 0, None)
def rowCount(self, parent=QModelIndex()):
"""Override QAbstractItemModel::rowCount."""
if not parent.isValid():
# top-level
return len(self._categories)
cat = self._cat_from_idx(parent)
if not cat or parent.column() != 0:
# item or nonzero category column (only first col has children)
return 0
else:
# category
return cat.rowCount()
def columnCount(self, parent=QModelIndex()):
"""Override QAbstractItemModel::columnCount."""
utils.unused(parent)
return len(self.column_widths)
def canFetchMore(self, parent):
"""Override to forward the call to the categories."""
cat = self._cat_from_idx(parent)
if cat:
return cat.canFetchMore(QModelIndex())
return False
def fetchMore(self, parent):
"""Override to forward the call to the categories."""
cat = self._cat_from_idx(parent)
if cat:
cat.fetchMore(QModelIndex())
def count(self):
"""Return the count of non-category items."""
return sum(t.rowCount() for t in self._categories)
def set_pattern(self, pattern):
"""Set the filter pattern for all categories.
Args:
pattern: The filter pattern to set.
"""
log.completion.debug("Setting completion pattern '{}'".format(pattern))
self.layoutAboutToBeChanged.emit() # type: ignore[attr-defined]
for cat in self._categories:
cat.set_pattern(pattern)
self.layoutChanged.emit() # type: ignore[attr-defined]
def first_item(self):
"""Return the index of the first child (non-category) in the model."""
for row, cat in enumerate(self._categories):
if cat.rowCount() > 0:
parent = self.index(row, 0)
index = self.index(0, 0, parent)
qtutils.ensure_valid(index)
return index
return QModelIndex()
def last_item(self):
"""Return the index of the last child (non-category) in the model."""
for row, cat in reversed(list(enumerate(self._categories))):
childcount = cat.rowCount()
if childcount > 0:
parent = self.index(row, 0)
index = self.index(childcount - 1, 0, parent)
qtutils.ensure_valid(index)
return index
return QModelIndex()
def columns_to_filter(self, index):
"""Return the column indices the filter pattern applies to.
Args:
index: index of the item to check.
Return: A list of integers.
"""
cat = self._cat_from_idx(index.parent())
return cat.columns_to_filter if cat else []
def delete_cur_item(self, index):
"""Delete the row at the given index."""
qtutils.ensure_valid(index)
parent = index.parent()
cat = self._cat_from_idx(parent)
assert cat, "CompletionView sent invalid index for deletion"
if not cat.delete_func:
raise cmdutils.CommandError("Cannot delete this item.")
data = [cat.data(cat.index(index.row(), i))
for i in range(cat.columnCount())]
cat.delete_func(data)
self.beginRemoveRows(parent, index.row(), index.row())
cat.removeRow(index.row(), QModelIndex())
self.endRemoveRows()
|
from pyatag import errors
from homeassistant import config_entries, data_entry_flow
from homeassistant.components.atag import DOMAIN
from homeassistant.core import HomeAssistant
from tests.async_mock import PropertyMock, patch
from tests.components.atag import (
PAIR_REPLY,
RECEIVE_REPLY,
UID,
USER_INPUT,
init_integration,
)
from tests.test_util.aiohttp import AiohttpClientMocker
async def test_show_form(hass):
"""Test that the form is served with no input."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
async def test_adding_second_device(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test that only one Atag configuration is allowed."""
await init_integration(hass, aioclient_mock)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}, data=USER_INPUT
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
with patch(
"pyatag.AtagOne.id",
new_callable=PropertyMock(return_value="secondary_device"),
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}, data=USER_INPUT
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
async def test_connection_error(hass):
"""Test we show user form on Atag connection error."""
with patch("pyatag.AtagOne.authorize", side_effect=errors.AtagException()):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_USER},
data=USER_INPUT,
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
assert result["errors"] == {"base": "cannot_connect"}
async def test_unauthorized(hass):
"""Test we show correct form when Unauthorized error is raised."""
with patch("pyatag.AtagOne.authorize", side_effect=errors.Unauthorized()):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_USER},
data=USER_INPUT,
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
assert result["errors"] == {"base": "unauthorized"}
async def test_full_flow_implementation(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test registering an integration and finishing flow works."""
aioclient_mock.post(
"http://127.0.0.1:10000/pair",
json=PAIR_REPLY,
)
aioclient_mock.post(
"http://127.0.0.1:10000/retrieve",
json=RECEIVE_REPLY,
)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_USER},
data=USER_INPUT,
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == UID
assert result["result"].unique_id == UID
|
from typing import TYPE_CHECKING, Optional
from PyQt5.QtWidgets import QTreeView, QSizePolicy, QStyleFactory, QWidget
from PyQt5.QtCore import pyqtSlot, pyqtSignal, Qt, QItemSelectionModel, QSize
from qutebrowser.config import config, stylesheet
from qutebrowser.completion import completiondelegate
from qutebrowser.utils import utils, usertypes, debug, log, qtutils
from qutebrowser.api import cmdutils
if TYPE_CHECKING:
from qutebrowser.mainwindow.statusbar import command
class CompletionView(QTreeView):
"""The view showing available completions.
Based on QTreeView but heavily customized so root elements show as category
headers, and children show as flat list.
Attributes:
pattern: Current filter pattern, used for highlighting.
_win_id: The ID of the window this CompletionView is associated with.
_height: The height to use for the CompletionView.
_height_perc: Either None or a percentage if height should be relative.
_delegate: The item delegate used.
_column_widths: A list of column widths, in percent.
_active: Whether a selection is active.
_cmd: The statusbar Command object.
Signals:
update_geometry: Emitted when the completion should be resized.
selection_changed: Emitted when the completion item selection changes.
"""
# Drawing the item foreground will be done by CompletionItemDelegate, so we
# don't define that in this stylesheet.
STYLESHEET = """
QTreeView {
font: {{ conf.fonts.completion.entry }};
background-color: {{ conf.colors.completion.even.bg }};
alternate-background-color: {{ conf.colors.completion.odd.bg }};
outline: 0;
border: 0px;
}
QTreeView::item:disabled {
background-color: {{ conf.colors.completion.category.bg }};
border-top: 1px solid
{{ conf.colors.completion.category.border.top }};
border-bottom: 1px solid
{{ conf.colors.completion.category.border.bottom }};
}
QTreeView::item:selected, QTreeView::item:selected:hover {
border-top: 1px solid
{{ conf.colors.completion.item.selected.border.top }};
border-bottom: 1px solid
{{ conf.colors.completion.item.selected.border.bottom }};
background-color: {{ conf.colors.completion.item.selected.bg }};
}
QTreeView:item::hover {
border: 0px;
}
QTreeView QScrollBar {
width: {{ conf.completion.scrollbar.width }}px;
background: {{ conf.colors.completion.scrollbar.bg }};
}
QTreeView QScrollBar::handle {
background: {{ conf.colors.completion.scrollbar.fg }};
border: {{ conf.completion.scrollbar.padding }}px solid
{{ conf.colors.completion.scrollbar.bg }};
min-height: 10px;
}
QTreeView QScrollBar::sub-line, QScrollBar::add-line {
border: none;
background: none;
}
"""
update_geometry = pyqtSignal()
selection_changed = pyqtSignal(str)
def __init__(self, *,
cmd: 'command.Command',
win_id: int,
parent: QWidget = None) -> None:
super().__init__(parent)
self.pattern: Optional[str] = None
self._win_id = win_id
self._cmd = cmd
self._active = False
config.instance.changed.connect(self._on_config_changed)
self._delegate = completiondelegate.CompletionItemDelegate(self)
self.setItemDelegate(self._delegate)
self.setStyle(QStyleFactory.create('Fusion'))
stylesheet.set_register(self)
self.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Fixed)
self.setHeaderHidden(True)
self.setAlternatingRowColors(True)
self.setIndentation(0)
self.setItemsExpandable(False)
self.setExpandsOnDoubleClick(False)
self.setAnimated(False)
self.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
# WORKAROUND
# This is a workaround for weird race conditions with invalid
# item indexes leading to segfaults in Qt.
#
# Some background: http://bugs.quassel-irc.org/issues/663
# The proposed fix there was later reverted because it didn't help.
self.setUniformRowHeights(True)
self.hide()
# FIXME set elidemode
# https://github.com/qutebrowser/qutebrowser/issues/118
def __repr__(self):
return utils.get_repr(self)
@pyqtSlot(str)
def _on_config_changed(self, option):
if option in ['completion.height', 'completion.shrink']:
self.update_geometry.emit()
def _resize_columns(self):
"""Resize the completion columns based on column_widths."""
if self.model() is None:
return
width = self.size().width()
column_widths = self.model().column_widths
pixel_widths = [(width * perc // 100) for perc in column_widths]
delta = self.verticalScrollBar().sizeHint().width()
for i, width in reversed(list(enumerate(pixel_widths))):
if width > delta:
pixel_widths[i] -= delta
break
for i, w in enumerate(pixel_widths):
assert w >= 0, (i, w)
self.setColumnWidth(i, w)
def _next_idx(self, upwards):
"""Get the previous/next QModelIndex displayed in the view.
Used by tab_handler.
Args:
upwards: Get previous item, not next.
Return:
A QModelIndex.
"""
idx = self.selectionModel().currentIndex()
if not idx.isValid():
# No item selected yet
if upwards:
return self.model().last_item()
else:
return self.model().first_item()
while True:
idx = self.indexAbove(idx) if upwards else self.indexBelow(idx)
# wrap around if we arrived at beginning/end
if not idx.isValid() and upwards:
return self.model().last_item()
elif not idx.isValid() and not upwards:
idx = self.model().first_item()
self.scrollTo(idx.parent())
return idx
elif idx.parent().isValid():
# Item is a real item, not a category header -> success
return idx
raise utils.Unreachable
def _next_page(self, upwards):
"""Return the index a page away from the selected index.
Args:
upwards: Get previous item, not next.
Return:
A QModelIndex.
"""
old_idx = self.selectionModel().currentIndex()
idx = old_idx
model = self.model()
if not idx.isValid():
# No item selected yet
return model.last_item() if upwards else model.first_item()
# Find height of each CompletionView element
rect = self.visualRect(idx)
qtutils.ensure_valid(rect)
page_length = self.height() // rect.height()
# Skip one pageful, except leave one old line visible
offset = -(page_length - 1) if upwards else page_length - 1
idx = model.sibling(old_idx.row() + offset, old_idx.column(), old_idx)
# Skip category headers
while idx.isValid() and not idx.parent().isValid():
idx = self.indexAbove(idx) if upwards else self.indexBelow(idx)
if idx.isValid():
return idx
border_item = model.first_item() if upwards else model.last_item()
# Wrap around if we were already at the beginning/end
if old_idx == border_item:
return self._next_idx(upwards)
# Select the first/last item before wrapping around
if upwards:
self.scrollTo(border_item.parent())
return border_item
def _next_category_idx(self, upwards):
"""Get the index of the previous/next category.
Args:
upwards: Get previous item, not next.
Return:
A QModelIndex.
"""
idx = self.selectionModel().currentIndex()
if not idx.isValid():
return self._next_idx(upwards).sibling(0, 0)
idx = idx.parent()
direction = -1 if upwards else 1
while True:
idx = idx.sibling(idx.row() + direction, 0)
if not idx.isValid() and upwards:
# wrap around to the first item of the last category
return self.model().last_item().sibling(0, 0)
elif not idx.isValid() and not upwards:
# wrap around to the first item of the first category
idx = self.model().first_item()
self.scrollTo(idx.parent())
return idx
elif idx.isValid() and idx.child(0, 0).isValid():
# scroll to ensure the category is visible
self.scrollTo(idx)
return idx.child(0, 0)
raise utils.Unreachable
@cmdutils.register(instance='completion',
modes=[usertypes.KeyMode.command], scope='window')
@cmdutils.argument('which', choices=['next', 'prev',
'next-category', 'prev-category',
'next-page', 'prev-page'])
@cmdutils.argument('history', flag='H')
def completion_item_focus(self, which, history=False):
"""Shift the focus of the completion menu to another item.
Args:
which: 'next', 'prev',
'next-category', 'prev-category',
'next-page', or 'prev-page'.
history: Navigate through command history if no text was typed.
"""
if history:
if (self._cmd.text() == ':' or self._cmd.history.is_browsing() or
not self._active):
if which == 'next':
self._cmd.command_history_next()
return
elif which == 'prev':
self._cmd.command_history_prev()
return
else:
raise cmdutils.CommandError("Can't combine --history with "
"{}!".format(which))
if not self._active:
return
selmodel = self.selectionModel()
indices = {
'next': lambda: self._next_idx(upwards=False),
'prev': lambda: self._next_idx(upwards=True),
'next-category': lambda: self._next_category_idx(upwards=False),
'prev-category': lambda: self._next_category_idx(upwards=True),
'next-page': lambda: self._next_page(upwards=False),
'prev-page': lambda: self._next_page(upwards=True),
}
idx = indices[which]()
if not idx.isValid():
return
selmodel.setCurrentIndex(
idx,
QItemSelectionModel.ClearAndSelect | # type: ignore[arg-type]
QItemSelectionModel.Rows)
# if the last item is focused, try to fetch more
if idx.row() == self.model().rowCount(idx.parent()) - 1:
self.expandAll()
count = self.model().count()
if count == 0:
self.hide()
elif count == 1 and config.val.completion.quick:
self.hide()
elif config.val.completion.show == 'auto':
self.show()
def set_model(self, model):
"""Switch completion to a new model.
Called from on_update_completion().
Args:
model: The model to use.
"""
if self.model() is not None and model is not self.model():
self.model().deleteLater()
self.selectionModel().deleteLater()
self.setModel(model)
if model is None:
self._active = False
self.hide()
return
model.setParent(self)
self._active = True
self.pattern = None
self._maybe_show()
self._resize_columns()
for i in range(model.rowCount()):
self.expand(model.index(i, 0))
def set_pattern(self, pattern: str) -> None:
"""Set the pattern on the underlying model."""
if not self.model():
return
if self.pattern == pattern:
# no changes, abort
log.completion.debug(
"Ignoring pattern set request as pattern has not changed.")
return
self.pattern = pattern
with debug.log_time(log.completion, 'Set pattern {}'.format(pattern)):
self.model().set_pattern(pattern)
self.selectionModel().clear()
self._maybe_update_geometry()
self._maybe_show()
def _maybe_show(self):
if (config.val.completion.show == 'always' and
self.model().count() > 0):
self.show()
else:
self.hide()
def _maybe_update_geometry(self):
"""Emit the update_geometry signal if the config says so."""
if config.val.completion.shrink:
self.update_geometry.emit()
@pyqtSlot()
def on_clear_completion_selection(self):
"""Clear the selection model when an item is activated."""
self.hide()
selmod = self.selectionModel()
if selmod is not None:
selmod.clearSelection()
selmod.clearCurrentIndex()
def sizeHint(self):
"""Get the completion size according to the config."""
# Get the configured height/percentage.
confheight = str(config.val.completion.height)
if confheight.endswith('%'):
perc = int(confheight.rstrip('%'))
height = self.window().height() * perc // 100
else:
height = int(confheight)
# Shrink to content size if needed and shrinking is enabled
if config.val.completion.shrink:
contents_height = (
self.viewportSizeHint().height() +
self.horizontalScrollBar().sizeHint().height())
if contents_height <= height:
height = contents_height
# The width isn't really relevant as we're expanding anyways.
return QSize(-1, height)
def selectionChanged(self, selected, deselected):
"""Extend selectionChanged to call completers selection_changed."""
if not self._active:
return
super().selectionChanged(selected, deselected)
indexes = selected.indexes()
if not indexes:
return
data = str(self.model().data(indexes[0]))
self.selection_changed.emit(data)
def resizeEvent(self, e):
"""Extend resizeEvent to adjust column size."""
super().resizeEvent(e)
self._resize_columns()
def showEvent(self, e):
"""Adjust the completion size and scroll when it's freshly shown."""
self.update_geometry.emit()
scrollbar = self.verticalScrollBar()
if scrollbar is not None:
scrollbar.setValue(scrollbar.minimum())
super().showEvent(e)
@cmdutils.register(instance='completion',
modes=[usertypes.KeyMode.command], scope='window')
def completion_item_del(self):
"""Delete the current completion item."""
index = self.currentIndex()
if not index.isValid():
raise cmdutils.CommandError("No item selected!")
self.model().delete_cur_item(index)
@cmdutils.register(instance='completion',
modes=[usertypes.KeyMode.command], scope='window')
def completion_item_yank(self, sel=False):
"""Yank the current completion item into the clipboard.
Args:
sel: Use the primary selection instead of the clipboard.
"""
text = self._cmd.selectedText()
if not text:
index = self.currentIndex()
if not index.isValid():
raise cmdutils.CommandError("No item selected!")
text = self.model().data(index)
if not utils.supports_selection():
sel = False
utils.set_clipboard(text, selection=sel)
|
import collections
import logging
from typing import List, Mapping, Tuple
import cairo
from gi.repository import Gdk, GObject, Gtk
from meld.settings import get_meld_settings
from meld.style import get_common_theme
from meld.tree import STATE_ERROR, STATE_MODIFIED, STATE_NEW
log = logging.getLogger(__name__)
class ChunkMap(Gtk.DrawingArea):
__gtype_name__ = "ChunkMap"
adjustment = GObject.Property(
type=Gtk.Adjustment,
nick='Adjustment used for scrolling the mapped view',
flags=(
GObject.ParamFlags.READWRITE |
GObject.ParamFlags.CONSTRUCT_ONLY
),
)
handle_overdraw_alpha = GObject.Property(
type=float,
nick='Alpha of the document handle overdraw',
default=0.2,
)
handle_outline_alpha = GObject.Property(
type=float,
nick='Alpha of the document handle outline',
default=0.4,
)
@GObject.Property(
type=GObject.TYPE_PYOBJECT,
nick='Chunks defining regions in the mapped view',
)
def chunks(self):
return self._chunks
@chunks.setter
def chunks_set(self, chunks):
self._chunks = chunks
self._cached_map = None
self.queue_draw()
overdraw_padding: int = 2
def __init__(self):
super().__init__()
self.chunks = []
self._have_grab = False
self._cached_map = None
def do_realize(self):
if not self.adjustment:
log.critical(
f'{self.__gtype_name__} initialized without an adjustment')
return Gtk.DrawingArea.do_realize(self)
self.set_events(
Gdk.EventMask.POINTER_MOTION_MASK |
Gdk.EventMask.BUTTON_PRESS_MASK |
Gdk.EventMask.BUTTON_RELEASE_MASK
)
self.adjustment.connect('changed', lambda w: self.queue_draw())
self.adjustment.connect('value-changed', lambda w: self.queue_draw())
meld_settings = get_meld_settings()
meld_settings.connect('changed', self.on_setting_changed)
self.on_setting_changed(meld_settings, 'style-scheme')
return Gtk.DrawingArea.do_realize(self)
def do_size_allocate(self, *args):
self._cached_map = None
return Gtk.DrawingArea.do_size_allocate(self, *args)
def on_setting_changed(self, settings, key):
if key == 'style-scheme':
self.fill_colors, self.line_colors = get_common_theme()
self._cached_map = None
def get_height_scale(self) -> float:
return 1.0
def get_map_base_colors(
self) -> Tuple[Gdk.RGBA, Gdk.RGBA, Gdk.RGBA, Gdk.RGBA]:
raise NotImplementedError()
def _make_map_base_colors(
self, widget) -> Tuple[Gdk.RGBA, Gdk.RGBA, Gdk.RGBA, Gdk.RGBA]:
stylecontext = widget.get_style_context()
base_set, base = (
stylecontext.lookup_color('theme_base_color'))
if not base_set:
base = Gdk.RGBA(1.0, 1.0, 1.0, 1.0)
text_set, text = (
stylecontext.lookup_color('theme_text_color'))
if not text_set:
base = Gdk.RGBA(0.0, 0.0, 0.0, 1.0)
border_set, border = (
stylecontext.lookup_color('borders'))
if not border_set:
base = Gdk.RGBA(0.95, 0.95, 0.95, 1.0)
handle_overdraw = text.copy()
handle_overdraw.alpha = self.handle_overdraw_alpha
handle_outline = text.copy()
handle_outline.alpha = self.handle_outline_alpha
return base, border, handle_overdraw, handle_outline
def chunk_coords_by_tag(self) -> Mapping[str, List[Tuple[float, float]]]:
"""Map chunks to buffer offsets for drawing, ordered by tag"""
raise NotImplementedError()
def do_draw(self, context: cairo.Context) -> bool:
if not self.adjustment or self.adjustment.get_upper() <= 0:
return False
height = self.get_allocated_height()
width = self.get_allocated_width()
if width <= 0 or height <= 0:
return False
base_bg, base_outline, handle_overdraw, handle_outline = (
self.get_map_base_colors())
x0 = self.overdraw_padding + 0.5
x1 = width - 2 * x0
height_scale = height * self.get_height_scale()
if self._cached_map is None:
surface = cairo.Surface.create_similar(
context.get_target(), cairo.CONTENT_COLOR_ALPHA, width, height)
cache_ctx = cairo.Context(surface)
cache_ctx.set_line_width(1)
cache_ctx.rectangle(x0, -0.5, x1, height_scale + 0.5)
cache_ctx.set_source_rgba(*base_bg)
cache_ctx.fill()
# We get drawing coordinates by tag to minimise our source
# colour setting, and make this loop slightly cleaner.
tagged_diffs = self.chunk_coords_by_tag()
for tag, diffs in tagged_diffs.items():
cache_ctx.set_source_rgba(*self.fill_colors[tag])
for y0, y1 in diffs:
y0 = round(y0 * height_scale) + 0.5
y1 = round(y1 * height_scale) - 0.5
cache_ctx.rectangle(x0, y0, x1, y1 - y0)
cache_ctx.fill_preserve()
cache_ctx.set_source_rgba(*self.line_colors[tag])
cache_ctx.stroke()
cache_ctx.rectangle(x0, -0.5, x1, height_scale + 0.5)
cache_ctx.set_source_rgba(*base_outline)
cache_ctx.stroke()
self._cached_map = surface
context.set_source_surface(self._cached_map, 0, 0)
context.paint()
# Draw our scroll position indicator
context.set_line_width(1)
context.set_source_rgba(*handle_overdraw)
adj_y = self.adjustment.get_value() / self.adjustment.get_upper()
adj_h = self.adjustment.get_page_size() / self.adjustment.get_upper()
context.rectangle(
x0 - self.overdraw_padding, round(height_scale * adj_y) + 0.5,
x1 + 2 * self.overdraw_padding, round(height_scale * adj_h) - 1,
)
context.fill_preserve()
context.set_source_rgba(*handle_outline)
context.stroke()
return True
def _scroll_to_location(self, location: float):
raise NotImplementedError()
def _scroll_fraction(self, position: float):
"""Scroll the mapped textview to the given position
This uses GtkTextView's scrolling so that the movement is
animated.
:param position: Position to scroll to, in event coordinates
"""
if not self.adjustment:
return
height = self.get_height_scale() * self.get_allocated_height()
fraction = position / height
adj = self.adjustment
location = fraction * (adj.get_upper() - adj.get_lower())
self._scroll_to_location(location)
def do_button_press_event(self, event: Gdk.EventButton) -> bool:
if event.button == 1:
self._scroll_fraction(event.y)
self.grab_add()
self._have_grab = True
return True
return False
def do_button_release_event(self, event: Gdk.EventButton) -> bool:
if event.button == 1:
self.grab_remove()
self._have_grab = False
return True
return False
def do_motion_notify_event(self, event: Gdk.EventMotion) -> bool:
if self._have_grab:
self._scroll_fraction(event.y)
return True
class TextViewChunkMap(ChunkMap):
__gtype_name__ = 'TextViewChunkMap'
textview = GObject.Property(
type=Gtk.TextView,
nick='Textview being mapped',
flags=(
GObject.ParamFlags.READWRITE |
GObject.ParamFlags.CONSTRUCT_ONLY
),
)
paired_adjustment_1 = GObject.Property(
type=Gtk.Adjustment,
nick='Paired adjustment used for scaling the map',
flags=(
GObject.ParamFlags.READWRITE |
GObject.ParamFlags.CONSTRUCT_ONLY
),
)
paired_adjustment_2 = GObject.Property(
type=Gtk.Adjustment,
nick='Paired adjustment used for scaling the map',
flags=(
GObject.ParamFlags.READWRITE |
GObject.ParamFlags.CONSTRUCT_ONLY
),
)
def get_height_scale(self):
adjustments = [
self.props.adjustment,
self.props.paired_adjustment_1,
self.props.paired_adjustment_2,
]
heights = [
adj.get_upper() for adj in adjustments
if adj.get_upper() > 0
]
return self.props.adjustment.get_upper() / max(heights)
def get_map_base_colors(self):
return self._make_map_base_colors(self.textview)
def chunk_coords_by_tag(self):
buf = self.textview.get_buffer()
tagged_diffs: Mapping[str, List[Tuple[float, float]]]
tagged_diffs = collections.defaultdict(list)
y, h = self.textview.get_line_yrange(buf.get_end_iter())
max_y = float(y + h)
for chunk in self.chunks:
start_iter = buf.get_iter_at_line(chunk.start_a)
y0, _ = self.textview.get_line_yrange(start_iter)
if chunk.start_a == chunk.end_a:
y, h = y0, 0
else:
end_iter = buf.get_iter_at_line(chunk.end_a - 1)
y, h = self.textview.get_line_yrange(end_iter)
tagged_diffs[chunk.tag].append((y0 / max_y, (y + h) / max_y))
return tagged_diffs
def do_draw(self, context: cairo.Context) -> bool:
if not self.textview:
return False
return ChunkMap.do_draw(self, context)
def _scroll_to_location(self, location: float):
if not self.textview:
return
_, it = self.textview.get_iter_at_location(0, location)
self.textview.scroll_to_iter(it, 0.0, True, 1.0, 0.5)
class TreeViewChunkMap(ChunkMap):
__gtype_name__ = 'TreeViewChunkMap'
treeview = GObject.Property(
type=Gtk.TreeView,
nick='Treeview being mapped',
flags=(
GObject.ParamFlags.READWRITE |
GObject.ParamFlags.CONSTRUCT_ONLY
),
)
treeview_idx = GObject.Property(
type=int,
nick='Index of the Treeview within the store',
flags=(
GObject.ParamFlags.READWRITE |
GObject.ParamFlags.CONSTRUCT_ONLY
),
)
chunk_type_map = {
STATE_NEW: "insert",
STATE_ERROR: "error",
STATE_MODIFIED: "replace",
}
def __init__(self):
super().__init__()
self.model_signal_ids = []
def do_realize(self):
self.treeview.connect('row-collapsed', self.clear_cached_map)
self.treeview.connect('row-expanded', self.clear_cached_map)
self.treeview.connect('notify::model', self.connect_model)
self.connect_model()
return ChunkMap.do_realize(self)
def connect_model(self, *args):
for model, signal_id in self.model_signal_ids:
model.disconnect(signal_id)
model = self.treeview.get_model()
self.model_signal_ids = [
(model, model.connect('row-changed', self.clear_cached_map)),
(model, model.connect('row-deleted', self.clear_cached_map)),
(model, model.connect('row-inserted', self.clear_cached_map)),
(model, model.connect('rows-reordered', self.clear_cached_map)),
]
def clear_cached_map(self, *args):
self._cached_map = None
def get_map_base_colors(self):
return self._make_map_base_colors(self.treeview)
def chunk_coords_by_tag(self):
def recurse_tree_states(rowiter):
row_states.append(
model.get_state(rowiter.iter, self.treeview_idx))
if self.treeview.row_expanded(rowiter.path):
for row in rowiter.iterchildren():
recurse_tree_states(row)
row_states = []
model = self.treeview.get_model()
recurse_tree_states(next(iter(model)))
# Terminating mark to force the last chunk to be added
row_states.append(None)
tagged_diffs: Mapping[str, List[Tuple[float, float]]]
tagged_diffs = collections.defaultdict(list)
numlines = len(row_states) - 1
chunkstart, laststate = 0, row_states[0]
for index, state in enumerate(row_states):
if state != laststate:
action = self.chunk_type_map.get(laststate)
if action is not None:
chunk = (chunkstart / numlines, index / numlines)
tagged_diffs[action].append(chunk)
chunkstart, laststate = index, state
return tagged_diffs
def do_draw(self, context: cairo.Context) -> bool:
if not self.treeview:
return False
return ChunkMap.do_draw(self, context)
def _scroll_to_location(self, location: float):
if not self.treeview or self.adjustment.get_upper() <= 0:
return
location -= self.adjustment.get_page_size() / 2
self.treeview.scroll_to_point(-1, location)
|
import socket
import mock
import pytest
from paasta_tools import generate_services_file
from paasta_tools import generate_services_yaml
MOCK_NAMESPACES = [
("foo.main", {"proxy_port": 1024}),
("bar.canary", {"proxy_port": 1025}),
]
@pytest.yield_fixture
def mock_namespaces():
with mock.patch(
"paasta_tools.generate_services_file.get_all_namespaces",
autospec=True,
return_value=MOCK_NAMESPACES,
):
yield
def test_main(tmpdir, mock_namespaces):
services_yaml = tmpdir.join("services.yaml")
with mock.patch.object(generate_services_file, "datetime") as m, mock.patch.object(
socket, "getfqdn", return_value="somehost.yelp"
):
m.now().isoformat.return_value = "$TIME"
generate_services_yaml.main((services_yaml.strpath,))
assert services_yaml.read() == (
"# This file is automatically generated by paasta_tools.\n"
"# It was automatically generated at $TIME on somehost.yelp.\n"
"---\n"
"bar.canary:\n"
" host: 169.254.255.254\n"
" port: 1025\n"
"foo.main:\n"
" host: 169.254.255.254\n"
" port: 1024\n"
)
|
import collections
import itertools
import os
import re
import shutil
import subprocess
import tempfile
from typing import ClassVar
from gi.repository import Gio, GLib
from meld.conf import _
# ignored, new, normal, ignored changes,
# error, placeholder, vc added
# vc modified, vc renamed, vc conflict, vc removed
# locally removed, end
(STATE_IGNORED, STATE_NONE, STATE_NORMAL, STATE_NOCHANGE,
STATE_ERROR, STATE_EMPTY, STATE_NEW,
STATE_MODIFIED, STATE_RENAMED, STATE_CONFLICT, STATE_REMOVED,
STATE_MISSING, STATE_NONEXIST, STATE_MAX) = list(range(14))
# VC conflict types
(CONFLICT_MERGED, CONFLICT_BASE, CONFLICT_LOCAL,
CONFLICT_REMOTE, CONFLICT_MAX) = list(range(5))
# These names are used by BZR, and are logically identical.
CONFLICT_OTHER = CONFLICT_REMOTE
CONFLICT_THIS = CONFLICT_LOCAL
conflicts = [_("Merged"), _("Base"), _("Local"), _("Remote")]
assert len(conflicts) == CONFLICT_MAX
# Lifted from the itertools recipes section
def partition(pred, iterable):
t1, t2 = itertools.tee(iterable)
return (list(itertools.ifilterfalse(pred, t1)),
list(itertools.ifilter(pred, t2)))
class Entry:
# These are labels for possible states of version controlled files;
# not all states have a label to avoid visual clutter.
state_names = {
STATE_IGNORED: _("Ignored"),
STATE_NONE: _("Unversioned"),
STATE_NORMAL: "",
STATE_NOCHANGE: "",
STATE_ERROR: _("Error"),
STATE_EMPTY: "",
STATE_NEW: _("Newly added"),
STATE_MODIFIED: _("Modified"),
STATE_RENAMED: _("Renamed"),
STATE_CONFLICT: "<b>%s</b>" % _("Conflict"),
STATE_REMOVED: _("Removed"),
STATE_MISSING: _("Missing"),
STATE_NONEXIST: _("Not present"),
}
def __init__(self, path, name, state, isdir, options=None):
self.path = path
self.name = name
self.state = state
self.isdir = isdir
if isinstance(options, list):
options = ','.join(options)
self.options = options
def __str__(self):
return "<%s:%s %s>" % (self.__class__.__name__, self.path,
self.get_status() or "Normal")
def __repr__(self):
return "%s(%r, %r, %r)" % (self.__class__.__name__, self.name,
self.path, self.state)
def get_status(self):
return self.state_names[self.state]
def is_present(self):
"""Should this Entry actually be present on the file system"""
return self.state not in (STATE_REMOVED, STATE_MISSING)
@staticmethod
def is_modified(entry):
return entry.state >= STATE_NEW or (
entry.isdir and (entry.state > STATE_NONE))
@staticmethod
def is_normal(entry):
return entry.state == STATE_NORMAL
@staticmethod
def is_nonvc(entry):
return entry.state == STATE_NONE or (
entry.isdir and (entry.state > STATE_IGNORED))
@staticmethod
def is_ignored(entry):
return entry.state == STATE_IGNORED or entry.isdir
class Vc:
VC_DIR: ClassVar[str]
#: Whether to walk the current location's parents to find a
#: repository root. Only used in legacy version control systems
#: (e.g., old SVN, CVS, RCS).
VC_ROOT_WALK: ClassVar[bool] = True
def __init__(self, path):
# Save the requested comparison location. The location may be a
# sub-directory of the repository we are diffing and can be useful in
# limiting meld's output to the requested location.
#
# If the location requested is a file (e.g., a single-file command line
# comparison) then the location is set to the containing directory.
self.root, self.location = self.is_in_repo(path)
if not self.root:
raise ValueError
self._tree_cache = {}
self._tree_meta_cache = {}
self._tree_missing_cache = collections.defaultdict(set)
def run(self, *args, use_locale_encoding=True):
"""Return subprocess running VC with `args` at VC's location
For example, `git_vc.run('log', '-p')` will run `git log -p`
and return the subprocess object.
If use_locale_encoding is True, the return value is a unicode
text stream with universal newlines. If use_locale_encoding is
False, the return value is a binary stream.
Note that this runs at the *location*, not at the *root*.
"""
cmd = (self.CMD,) + args
return subprocess.Popen(
cmd, cwd=self.location, stdout=subprocess.PIPE,
universal_newlines=use_locale_encoding)
def get_files_to_commit(self, paths):
"""Get a list of files that will be committed from paths
paths is a list of paths under the version control system root,
which may include directories. The return value must be a list
of file paths that would actually be committed given the path
argument; specifically this should exclude unchanged files and
recursively list files in directories.
"""
raise NotImplementedError()
def get_commit_message_prefill(self):
"""Get a version-control defined pre-filled commit message
This will return a unicode message in situations where the
version control system has a (possibly partial) pre-filled
message, or None if no such message exists.
This method should use pre-filled commit messages wherever
provided by the version control system, most commonly these are
given in merging, revert or cherry-picking scenarios.
"""
return None
def get_commits_to_push_summary(self):
"""Return a one-line readable description of unpushed commits
This provides a one-line description of what would be pushed by the
version control's push action, e.g., "2 unpushed commits in 3
branches". Version control systems that always only push the current
branch should not show branch information.
"""
raise NotImplementedError()
def get_valid_actions(self, path_states):
"""Get the set of valid actions for paths with version states
path_states is a list of (path, state) tuples describing paths
in the version control system. This will return all valid
version control actions that could reasonably be taken on *all*
of the paths in path_states.
If an individual plugin needs special handling, or doesn't
implement all standard actions, this should be overridden.
"""
valid_actions = set()
states = path_states.values()
if bool(path_states):
valid_actions.add('compare')
valid_actions.add('update')
# TODO: We can't do this; this shells out for each selection change...
# if bool(self.get_commits_to_push()):
valid_actions.add('push')
non_removeable_states = (STATE_NONE, STATE_IGNORED, STATE_REMOVED)
non_revertable_states = (STATE_NONE, STATE_NORMAL, STATE_IGNORED)
# TODO: We can't disable this for NORMAL, because folders don't
# inherit any state from their children, but committing a folder with
# modified children is expected behaviour.
if all(s not in (STATE_NONE, STATE_IGNORED) for s in states):
valid_actions.add('commit')
if all(s not in (STATE_NORMAL, STATE_REMOVED) for s in states):
valid_actions.add('add')
if all(s == STATE_CONFLICT for s in states):
valid_actions.add('resolve')
if (all(s not in non_removeable_states for s in states)
and self.root not in path_states.keys()):
valid_actions.add('remove')
if all(s not in non_revertable_states for s in states):
valid_actions.add('revert')
return valid_actions
def get_path_for_repo_file(self, path, commit=None):
"""Returns a file path for the repository path at commit
If *commit* is given, the path returned will point to a copy of
the file at *path* at the given commit, as interpreted by the
VCS. If *commit* is **None**, the current revision is used.
Even if the VCS maintains an on-disk copy of the given path, a
temp file with file-at-commit content must be created and its
path returned, to avoid destructive editing. The VCS plugin
must **not** delete temp files it creates.
"""
raise NotImplementedError()
def get_path_for_conflict(self, path, conflict):
"""Returns a file path for the conflicted repository path
*conflict* is the side of the conflict to be retrieved, and
must be one of the CONFLICT_* constants.
"""
raise NotImplementedError()
def refresh_vc_state(self, path=None):
"""Update cached version control state
If a path is provided, for example when a file has been modified
and saved in the file comparison view and needs its state
refreshed, then only that path will be updated.
If no path is provided then the version control tree rooted at
its `location` will be recursively refreshed.
"""
if path is None:
self._tree_cache = {}
self._tree_missing_cache = collections.defaultdict(set)
path = './'
self._update_tree_state_cache(path)
def get_entries(self, base):
parent = Gio.File.new_for_path(base)
enumerator = parent.enumerate_children(
'standard::name,standard::display-name,standard::type',
Gio.FileQueryInfoFlags.NOFOLLOW_SYMLINKS, None)
for file_info in enumerator:
if file_info.get_name() == self.VC_DIR:
continue
gfile = enumerator.get_child(file_info)
path = gfile.get_path()
name = file_info.get_display_name()
state = self._tree_cache.get(path, STATE_NORMAL)
meta = self._tree_meta_cache.get(path, "")
isdir = file_info.get_file_type() == Gio.FileType.DIRECTORY
yield Entry(path, name, state, isdir, options=meta)
# Removed entries are not in the filesystem, so must be added here
for name in self._tree_missing_cache[base]:
path = os.path.join(base, name)
state = self._tree_cache.get(path, STATE_NORMAL)
# TODO: Ideally we'd know whether this was a folder
# or a file. Since it's gone however, only the VC
# knows, and may or may not tell us.
meta = self._tree_meta_cache.get(path, "")
yield Entry(path, name, state, isdir=False, options=meta)
def _add_missing_cache_entry(self, path, state):
if state in (STATE_REMOVED, STATE_MISSING):
folder, name = os.path.split(path)
self._tree_missing_cache[folder].add(name)
def get_entry(self, path):
"""Return the entry associated with the given path in this VC
If the given path does not correspond to an entry in the VC,
this method returns an Entry with the appropriate REMOVED or
MISSING state.
"""
gfile = Gio.File.new_for_path(path)
try:
file_info = gfile.query_info(
'standard::*', Gio.FileQueryInfoFlags.NOFOLLOW_SYMLINKS, None)
name = file_info.get_display_name()
isdir = file_info.get_file_type() == Gio.FileType.DIRECTORY
except GLib.Error as e:
if e.domain != 'g-io-error-quark':
raise
# Handling for non-existent files (or other IO errors)
name = path
isdir = False
path = gfile.get_path()
state = self._tree_cache.get(path, STATE_NORMAL)
meta = self._tree_meta_cache.get(path, "")
return Entry(path, name, state, isdir, options=meta)
@classmethod
def is_installed(cls):
try:
call([cls.CMD])
return True
except Exception:
return False
@classmethod
def is_in_repo(cls, path):
root = None
location = path if os.path.isdir(path) else os.path.dirname(path)
if cls.VC_ROOT_WALK:
root = cls.find_repo_root(location)
elif cls.check_repo_root(location):
root = location
return root, location
@classmethod
def check_repo_root(cls, location):
return os.path.isdir(os.path.join(location, cls.VC_DIR))
@classmethod
def find_repo_root(cls, location):
while location:
if cls.check_repo_root(location):
return location
location, old = os.path.dirname(location), location
if location == old:
break
@classmethod
def valid_repo(cls, path):
"""Determine if a directory is a valid repository for this class"""
raise NotImplementedError
class InvalidVCPath(ValueError):
"""Raised when a VC module is passed an invalid (or not present) path."""
def __init__(self, vc, path, err):
self.vc = vc
self.path = path
self.error = err
def __str__(self):
return "%s: Path %s is invalid or not present\nError: %s\n" % \
(self.vc.NAME, self.path, self.error)
class InvalidVCRevision(ValueError):
"""Raised when a VC module is passed a revision spec it can't handle."""
def __init__(self, vc, rev, err):
self.vc = vc
self.revision = rev
self.error = err
def __str__(self):
return "%s: Doesn't understand or have revision %s\nError: %s\n" % \
(self.vc.NAME, self.revision, self.error)
def popen(cmd, cwd=None, use_locale_encoding=True):
"""Return the stdout output of a given command as a stream.
If use_locale_encoding is True, the output is parsed to unicode
text stream with universal newlines.
If use_locale_encoding is False output is treated as binary stream.
"""
process = subprocess.Popen(
cmd, cwd=cwd, stdout=subprocess.PIPE,
universal_newlines=use_locale_encoding)
return process.stdout
def call_temp_output(cmd, cwd, file_id=''):
"""Call `cmd` in `cwd` and write the output to a temporary file
This returns the name of the temporary file used. It is the
caller's responsibility to delete this file.
If `file_id` is provided, it is used as part of the
temporary file's name, for ease of identification.
"""
process = subprocess.Popen(
cmd, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
vc_file = process.stdout
# Error handling here involves doing nothing; in most cases, the only
# sane response is to return an empty temp file.
prefix = 'meld-tmp' + ('-' + file_id if file_id else '')
with tempfile.NamedTemporaryFile(prefix=prefix, delete=False) as f:
shutil.copyfileobj(vc_file, f)
return f.name
# Return the return value of a given command
def call(cmd, cwd=None):
devnull = open(os.devnull, "wb")
return subprocess.call(cmd, cwd=cwd, stdout=devnull, stderr=devnull)
base_re = re.compile(
br"^<{7}.*?$\r?\n(?P<local>.*?)"
br"^\|{7}.*?$\r?\n(?P<base>.*?)"
br"^={7}.*?$\r?\n(?P<remote>.*?)"
br"^>{7}.*?$\r?\n", flags=re.DOTALL | re.MULTILINE)
def base_from_diff3(merged):
return base_re.sub(br"==== BASE ====\n\g<base>==== BASE ====\n", merged)
|
from typing import Dict, Sequence
from pyheos import Dispatcher, Heos, HeosPlayer, HeosSource, InputSource, const
import pytest
from homeassistant.components import ssdp
from homeassistant.components.heos import DOMAIN
from homeassistant.const import CONF_HOST
from tests.async_mock import Mock, patch as patch
from tests.common import MockConfigEntry
@pytest.fixture(name="config_entry")
def config_entry_fixture():
"""Create a mock HEOS config entry."""
return MockConfigEntry(
domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, title="Controller (127.0.0.1)"
)
@pytest.fixture(name="controller")
def controller_fixture(
players, favorites, input_sources, playlists, change_data, dispatcher
):
"""Create a mock Heos controller fixture."""
mock_heos = Mock(Heos)
for player in players.values():
player.heos = mock_heos
mock_heos.dispatcher = dispatcher
mock_heos.get_players.return_value = players
mock_heos.players = players
mock_heos.get_favorites.return_value = favorites
mock_heos.get_input_sources.return_value = input_sources
mock_heos.get_playlists.return_value = playlists
mock_heos.load_players.return_value = change_data
mock_heos.is_signed_in = True
mock_heos.signed_in_username = "[email protected]"
mock_heos.connection_state = const.STATE_CONNECTED
mock = Mock(return_value=mock_heos)
with patch("homeassistant.components.heos.Heos", new=mock), patch(
"homeassistant.components.heos.config_flow.Heos", new=mock
):
yield mock_heos
@pytest.fixture(name="config")
def config_fixture():
"""Create hass config fixture."""
return {DOMAIN: {CONF_HOST: "127.0.0.1"}}
@pytest.fixture(name="players")
def player_fixture(quick_selects):
"""Create a mock HeosPlayer."""
player = Mock(HeosPlayer)
player.player_id = 1
player.name = "Test Player"
player.model = "Test Model"
player.version = "1.0.0"
player.is_muted = False
player.available = True
player.state = const.PLAY_STATE_STOP
player.ip_address = "127.0.0.1"
player.network = "wired"
player.shuffle = False
player.repeat = const.REPEAT_OFF
player.volume = 25
player.now_playing_media.supported_controls = const.CONTROLS_ALL
player.now_playing_media.album_id = 1
player.now_playing_media.queue_id = 1
player.now_playing_media.source_id = 1
player.now_playing_media.station = "Station Name"
player.now_playing_media.type = "Station"
player.now_playing_media.album = "Album"
player.now_playing_media.artist = "Artist"
player.now_playing_media.media_id = "1"
player.now_playing_media.duration = None
player.now_playing_media.current_position = None
player.now_playing_media.image_url = "http://"
player.now_playing_media.song = "Song"
player.get_quick_selects.return_value = quick_selects
return {player.player_id: player}
@pytest.fixture(name="favorites")
def favorites_fixture() -> Dict[int, HeosSource]:
"""Create favorites fixture."""
station = Mock(HeosSource)
station.type = const.TYPE_STATION
station.name = "Today's Hits Radio"
station.media_id = "123456789"
radio = Mock(HeosSource)
radio.type = const.TYPE_STATION
radio.name = "Classical MPR (Classical Music)"
radio.media_id = "s1234"
return {1: station, 2: radio}
@pytest.fixture(name="input_sources")
def input_sources_fixture() -> Sequence[InputSource]:
"""Create a set of input sources for testing."""
source = Mock(InputSource)
source.player_id = 1
source.input_name = const.INPUT_AUX_IN_1
source.name = "HEOS Drive - Line In 1"
return [source]
@pytest.fixture(name="dispatcher")
def dispatcher_fixture() -> Dispatcher:
"""Create a dispatcher for testing."""
return Dispatcher()
@pytest.fixture(name="discovery_data")
def discovery_data_fixture() -> dict:
"""Return mock discovery data for testing."""
return {
ssdp.ATTR_SSDP_LOCATION: "http://127.0.0.1:60006/upnp/desc/aios_device/aios_device.xml",
ssdp.ATTR_UPNP_DEVICE_TYPE: "urn:schemas-denon-com:device:AiosDevice:1",
ssdp.ATTR_UPNP_FRIENDLY_NAME: "Office",
ssdp.ATTR_UPNP_MANUFACTURER: "Denon",
ssdp.ATTR_UPNP_MODEL_NAME: "HEOS Drive",
ssdp.ATTR_UPNP_MODEL_NUMBER: "DWSA-10 4.0",
ssdp.ATTR_UPNP_SERIAL: None,
ssdp.ATTR_UPNP_UDN: "uuid:e61de70c-2250-1c22-0080-0005cdf512be",
}
@pytest.fixture(name="quick_selects")
def quick_selects_fixture() -> Dict[int, str]:
"""Create a dict of quick selects for testing."""
return {
1: "Quick Select 1",
2: "Quick Select 2",
3: "Quick Select 3",
4: "Quick Select 4",
5: "Quick Select 5",
6: "Quick Select 6",
}
@pytest.fixture(name="playlists")
def playlists_fixture() -> Sequence[HeosSource]:
"""Create favorites fixture."""
playlist = Mock(HeosSource)
playlist.type = const.TYPE_PLAYLIST
playlist.name = "Awesome Music"
return [playlist]
@pytest.fixture(name="change_data")
def change_data_fixture() -> Dict:
"""Create player change data for testing."""
return {const.DATA_MAPPED_IDS: {}, const.DATA_NEW: []}
@pytest.fixture(name="change_data_mapped_ids")
def change_data_mapped_ids_fixture() -> Dict:
"""Create player change data for testing."""
return {const.DATA_MAPPED_IDS: {101: 1}, const.DATA_NEW: []}
|
import logging
import time
from collections import deque
class ShIO(object):
"""
The ShIO object is the read/write interface to users and running scripts.
It acts as a staging area so that the UI Delegate calls can return without
waiting for user read/write (no blocking on main thread).
"""
def __init__(self, stash, debug=False):
self.stash = stash
self.debug = debug
self.logger = logging.getLogger('StaSh.IO')
self.tell_pos = 0
# The input buffer, push from the Left end, read from the right end
self._buffer = deque()
self.chunk_size = 4096
# When buffer is empty, hold back for certain before read again
# This is to lower the cpu usage of the reading thread so it does
# not affect the UI thread by noticeable amount
self.holdback = 0.2
self.encoding = 'utf8'
def push(self, s):
self._buffer.extendleft(s)
# Following methods to provide file like object interface
@property
def closed(self):
return False
def isatty(self):
return True
def close(self):
"""
This IO object cannot be closed.
"""
pass
def seek(self, offset):
"""
Seek of stdout is not the real seek as file, it seems merely set
the current posotion as the given parameter.
:param offset:
:return:
"""
self.tell_pos = offset
def tell(self):
return self.tell_pos
def truncate(self, size=None):
"""do nothing"""
def read(self, size=-1):
size = size if size != 0 else 1
if size == -1:
return ''.join(self._buffer.pop() for _ in len(self._buffer))
else:
ret = []
while len(ret) < size:
try:
ret.append(self._buffer.pop())
except IndexError:
# Wait briefly when the buffer is empty to avoid taxing the CPU
time.sleep(self.holdback)
return ''.join(ret)
def readline(self, size=-1):
ret = []
while True:
try:
ret.append(self._buffer.pop())
if ret[-1] in ['\n', '\0']:
break
except IndexError:
time.sleep(self.holdback)
if ret[-1] == '\0':
del ret[-1]
line = ''.join(ret)
# localized history for running scripts
# TODO: Adding to history for read as well?
self.stash.runtime.history.add(line)
return line
def readlines(self, size=-1):
ret = []
while True:
try:
ret.append(self._buffer.pop())
if ret[-1] == '\0':
break
except IndexError:
time.sleep(self.holdback)
ret = ''.join(ret[:-1]) # do not include the EOF
if size != -1:
ret = ret[:size]
for line in ret.splitlines():
self.stash.runtime.history.add(line)
return ret.splitlines(True)
def read1(self):
"""
Put MiniBuffer in cbreak mode to process character by character.
Normally the MiniBuffer only sends out its reading after a LF.
With this method, MiniBuffer sends out its reading after every
single char.
The caller is responsible for break out this reading explicitly.
"""
# TODO: Currently not supported by ShMiniBuffer
try:
self.stash.mini_buffer.cbreak = True
while True:
try:
yield self._buffer.pop()
except IndexError:
time.sleep(self.holdback)
finally:
self.stash.mini_buffer.cbreak = False
def readline_no_block(self):
"""
Read lines from the buffer but does NOT wait till lines to be completed.
If no complete line can be read, just return with None.
This is useful for runtime to process multiple commands from user. The
generator form also helps the program to keep reading and processing
user command when a program is running at the same time.
:return: str:
"""
ret = []
while True:
try:
ret.append(self._buffer.pop())
if ret[-1] == '\n':
yield ''.join(ret)
ret = []
except IndexError:
self._buffer.extend(ret)
break
def write(self, s, no_wait=False):
if len(s) == 0: # skip empty string
return
idx = 0
while True:
self.stash.stream.feed(s[idx:idx + self.chunk_size], no_wait=no_wait) # main screen only
idx += self.chunk_size
if idx >= len(s):
break
def writelines(self, s_list):
self.write(''.join(s_list))
def flush(self):
pass
|
import logging
import attr
import eternalegypt
from homeassistant.components.notify import ATTR_TARGET, DOMAIN, BaseNotificationService
from . import CONF_RECIPIENT, DATA_KEY
_LOGGER = logging.getLogger(__name__)
async def async_get_service(hass, config, discovery_info=None):
"""Get the notification service."""
if discovery_info is None:
return
return NetgearNotifyService(hass, discovery_info)
@attr.s
class NetgearNotifyService(BaseNotificationService):
"""Implementation of a notification service."""
hass = attr.ib()
config = attr.ib()
async def async_send_message(self, message="", **kwargs):
"""Send a message to a user."""
modem_data = self.hass.data[DATA_KEY].get_modem_data(self.config)
if not modem_data:
_LOGGER.error("Modem not ready")
return
targets = kwargs.get(ATTR_TARGET, self.config[DOMAIN][CONF_RECIPIENT])
if not targets:
_LOGGER.warning("No recipients")
return
if not message:
return
for target in targets:
try:
await modem_data.modem.sms(target, message)
except eternalegypt.Error:
_LOGGER.error("Unable to send to %s", target)
|
import os
import sys
import warnings
import pathlib
import pytest
import hypothesis
from PyQt5.QtCore import PYQT_VERSION
pytest.register_assert_rewrite('helpers')
from helpers import logfail
from helpers.logfail import fail_on_logging
from helpers.messagemock import message_mock
from helpers.fixtures import * # noqa: F403
from helpers import utils as testutils
from qutebrowser.utils import qtutils, standarddir, usertypes, utils, version
from qutebrowser.misc import objects, earlyinit
from qutebrowser.qt import sip
import qutebrowser.app # To register commands
_qute_scheme_handler = None
# Set hypothesis settings
hypothesis.settings.register_profile(
'default', hypothesis.settings(deadline=600))
hypothesis.settings.register_profile(
'ci', hypothesis.settings(
deadline=None,
suppress_health_check=[hypothesis.HealthCheck.too_slow]))
hypothesis.settings.load_profile('ci' if testutils.ON_CI else 'default')
def _apply_platform_markers(config, item):
"""Apply a skip marker to a given item."""
markers = [
('posix',
pytest.mark.skipif,
not utils.is_posix,
"Requires a POSIX os"),
('windows',
pytest.mark.skipif,
not utils.is_windows,
"Requires Windows"),
('linux',
pytest.mark.skipif,
not utils.is_linux,
"Requires Linux"),
('mac',
pytest.mark.skipif,
not utils.is_mac,
"Requires macOS"),
('not_mac',
pytest.mark.skipif,
utils.is_mac,
"Skipped on macOS"),
('not_frozen',
pytest.mark.skipif,
getattr(sys, 'frozen', False),
"Can't be run when frozen"),
('frozen',
pytest.mark.skipif,
not getattr(sys, 'frozen', False),
"Can only run when frozen"),
('ci',
pytest.mark.skipif,
not testutils.ON_CI,
"Only runs on CI."),
('no_ci',
pytest.mark.skipif,
testutils.ON_CI,
"Skipped on CI."),
('unicode_locale',
pytest.mark.skipif,
sys.getfilesystemencoding() == 'ascii',
"Skipped because of ASCII locale"),
('qtwebkit6021_xfail',
pytest.mark.xfail,
version.qWebKitVersion and # type: ignore[unreachable]
version.qWebKitVersion() == '602.1',
"Broken on WebKit 602.1")
]
for searched_marker, new_marker_kind, condition, default_reason in markers:
marker = item.get_closest_marker(searched_marker)
if not marker or not condition:
continue
if 'reason' in marker.kwargs:
reason = '{}: {}'.format(default_reason, marker.kwargs['reason'])
del marker.kwargs['reason']
else:
reason = default_reason + '.'
new_marker = new_marker_kind(condition, *marker.args,
reason=reason, **marker.kwargs)
item.add_marker(new_marker)
def pytest_collection_modifyitems(config, items):
"""Handle custom markers.
pytest hook called after collection has been performed.
Adds a marker named "gui" which can be used to filter gui tests from the
command line.
For example:
pytest -m "not gui" # run all tests except gui tests
pytest -m "gui" # run only gui tests
It also handles the platform specific markers by translating them to skipif
markers.
Args:
items: list of _pytest.main.Node items, where each item represents
a python test that will be executed.
Reference:
http://pytest.org/latest/plugins.html
"""
remaining_items = []
deselected_items = []
for item in items:
deselected = False
if 'qapp' in getattr(item, 'fixturenames', ()):
item.add_marker('gui')
if hasattr(item, 'module'):
test_basedir = pathlib.Path(__file__).parent
module_path = pathlib.Path(item.module.__file__)
module_root_dir = module_path.relative_to(test_basedir).parts[0]
assert module_root_dir in ['end2end', 'unit', 'helpers',
'test_conftest.py']
if module_root_dir == 'end2end':
item.add_marker(pytest.mark.end2end)
_apply_platform_markers(config, item)
if list(item.iter_markers('xfail_norun')):
item.add_marker(pytest.mark.xfail(run=False))
if deselected:
deselected_items.append(item)
else:
remaining_items.append(item)
config.hook.pytest_deselected(items=deselected_items)
items[:] = remaining_items
def pytest_ignore_collect(path):
"""Ignore BDD tests if we're unable to run them."""
skip_bdd = hasattr(sys, 'frozen')
rel_path = path.relto(os.path.dirname(__file__))
return rel_path == os.path.join('end2end', 'features') and skip_bdd
@pytest.fixture(scope='session')
def qapp_args():
"""Make QtWebEngine unit tests run on older Qt versions + newer kernels."""
seccomp_args = testutils.seccomp_args(qt_flag=False)
if seccomp_args:
return [sys.argv[0]] + seccomp_args
return []
@pytest.fixture(scope='session')
def qapp(qapp):
"""Change the name of the QApplication instance."""
qapp.setApplicationName('qute_test')
return qapp
def pytest_addoption(parser):
parser.addoption('--qute-delay', action='store', default=0, type=int,
help="Delay between qutebrowser commands.")
parser.addoption('--qute-profile-subprocs', action='store_true',
default=False, help="Run cProfile for subprocesses.")
parser.addoption('--qute-bdd-webengine', action='store_true',
help='Use QtWebEngine for BDD tests')
def pytest_configure(config):
webengine_arg = config.getoption('--qute-bdd-webengine')
webengine_env = os.environ.get('QUTE_BDD_WEBENGINE', 'false')
config.webengine = webengine_arg or webengine_env == 'true'
# Fail early if QtWebEngine is not available
if config.webengine:
import PyQt5.QtWebEngineWidgets
earlyinit.configure_pyqt()
@pytest.fixture(scope='session', autouse=True)
def check_display(request):
if utils.is_linux and not os.environ.get('DISPLAY', ''):
raise Exception("No display and no Xvfb available!")
@pytest.fixture(autouse=True)
def set_backend(monkeypatch, request):
"""Make sure the backend global is set."""
if not request.config.webengine and version.qWebKitVersion:
backend = usertypes.Backend.QtWebKit
else:
backend = usertypes.Backend.QtWebEngine
monkeypatch.setattr(objects, 'backend', backend)
@pytest.fixture(autouse=True, scope='session')
def apply_libgl_workaround():
"""Make sure we load libGL early so QtWebEngine tests run properly."""
utils.libgl_workaround()
@pytest.fixture(autouse=True)
def apply_fake_os(monkeypatch, request):
fake_os = request.node.get_closest_marker('fake_os')
if not fake_os:
return
name = fake_os.args[0]
mac = False
windows = False
linux = False
posix = False
if name == 'unknown':
pass
elif name == 'mac':
mac = True
posix = True
elif name == 'windows':
windows = True
elif name == 'linux':
linux = True
posix = True
elif name == 'posix':
posix = True
else:
raise ValueError("Invalid fake_os {}".format(name))
monkeypatch.setattr(utils, 'is_mac', mac)
monkeypatch.setattr(utils, 'is_linux', linux)
monkeypatch.setattr(utils, 'is_windows', windows)
monkeypatch.setattr(utils, 'is_posix', posix)
@pytest.fixture(scope='session', autouse=True)
def check_yaml_c_exts():
"""Make sure PyYAML C extensions are available on CI.
Not available yet with a nightly Python, see:
https://github.com/yaml/pyyaml/issues/416
"""
if 'CI' in os.environ and sys.version_info[:2] != (3, 10):
from yaml import CLoader
@pytest.hookimpl(tryfirst=True, hookwrapper=True)
def pytest_runtest_makereport(item, call):
"""Make test information available in fixtures.
See http://pytest.org/latest/example/simple.html#making-test-result-information-available-in-fixtures
"""
outcome = yield
rep = outcome.get_result()
setattr(item, "rep_" + rep.when, rep)
@pytest.hookimpl(hookwrapper=True)
def pytest_terminal_summary(terminalreporter):
"""Group benchmark results on CI."""
if testutils.ON_CI:
terminalreporter.write_line(
testutils.gha_group_begin('Benchmark results'))
yield
terminalreporter.write_line(testutils.gha_group_end())
else:
yield
|
from . import const
# Manufacturers
FIBARO = 0x010F
GE = 0x0063
PHILIO = 0x013C
SOMFY = 0x0047
WENZHOU = 0x0118
LEVITON = 0x001D
# Product IDs
GE_FAN_CONTROLLER_12730 = 0x3034
GE_FAN_CONTROLLER_14287 = 0x3131
JASCO_FAN_CONTROLLER_14314 = 0x3138
PHILIO_SLIM_SENSOR = 0x0002
PHILIO_3_IN_1_SENSOR_GEN_4 = 0x000D
PHILIO_PAN07 = 0x0005
VIZIA_FAN_CONTROLLER_VRF01 = 0x0334
LEVITON_DECORA_FAN_CONTROLLER_ZW4SF = 0x0002
# Product Types
FGFS101_FLOOD_SENSOR_TYPE = 0x0B00
FGRM222_SHUTTER2 = 0x0301
FGR222_SHUTTER2 = 0x0302
GE_DIMMER = 0x4944
PHILIO_SWITCH = 0x0001
PHILIO_SENSOR = 0x0002
SOMFY_ZRTSI = 0x5A52
VIZIA_DIMMER = 0x1001
LEVITON_DECORA_FAN_CONTROLLER = 0x0038
# Mapping devices
PHILIO_SLIM_SENSOR_MOTION_MTII = (PHILIO, PHILIO_SENSOR, PHILIO_SLIM_SENSOR, 0)
PHILIO_3_IN_1_SENSOR_GEN_4_MOTION_MTII = (
PHILIO,
PHILIO_SENSOR,
PHILIO_3_IN_1_SENSOR_GEN_4,
0,
)
PHILIO_PAN07_MTI_INSTANCE = (PHILIO, PHILIO_SWITCH, PHILIO_PAN07, 1)
WENZHOU_SLIM_SENSOR_MOTION_MTII = (WENZHOU, PHILIO_SENSOR, PHILIO_SLIM_SENSOR, 0)
# Workarounds
WORKAROUND_NO_OFF_EVENT = "trigger_no_off_event"
WORKAROUND_NO_POSITION = "workaround_no_position"
WORKAROUND_REFRESH_NODE_ON_UPDATE = "refresh_node_on_update"
WORKAROUND_IGNORE = "workaround_ignore"
# List of workarounds by (manufacturer_id, product_type, product_id, index)
DEVICE_MAPPINGS_MTII = {
PHILIO_SLIM_SENSOR_MOTION_MTII: WORKAROUND_NO_OFF_EVENT,
PHILIO_3_IN_1_SENSOR_GEN_4_MOTION_MTII: WORKAROUND_NO_OFF_EVENT,
WENZHOU_SLIM_SENSOR_MOTION_MTII: WORKAROUND_NO_OFF_EVENT,
}
# List of workarounds by (manufacturer_id, product_type, product_id, instance)
DEVICE_MAPPINGS_MTI_INSTANCE = {
PHILIO_PAN07_MTI_INSTANCE: WORKAROUND_REFRESH_NODE_ON_UPDATE
}
SOMFY_ZRTSI_CONTROLLER_MT = (SOMFY, SOMFY_ZRTSI)
# List of workarounds by (manufacturer_id, product_type)
DEVICE_MAPPINGS_MT = {SOMFY_ZRTSI_CONTROLLER_MT: WORKAROUND_NO_POSITION}
# Component mapping devices
FIBARO_FGFS101_SENSOR_ALARM = (
FIBARO,
FGFS101_FLOOD_SENSOR_TYPE,
const.COMMAND_CLASS_SENSOR_ALARM,
)
FIBARO_FGRM222_BINARY = (FIBARO, FGRM222_SHUTTER2, const.COMMAND_CLASS_SWITCH_BINARY)
FIBARO_FGR222_BINARY = (FIBARO, FGR222_SHUTTER2, const.COMMAND_CLASS_SWITCH_BINARY)
GE_FAN_CONTROLLER_12730_MULTILEVEL = (
GE,
GE_DIMMER,
GE_FAN_CONTROLLER_12730,
const.COMMAND_CLASS_SWITCH_MULTILEVEL,
)
GE_FAN_CONTROLLER_14287_MULTILEVEL = (
GE,
GE_DIMMER,
GE_FAN_CONTROLLER_14287,
const.COMMAND_CLASS_SWITCH_MULTILEVEL,
)
JASCO_FAN_CONTROLLER_14314_MULTILEVEL = (
GE,
GE_DIMMER,
JASCO_FAN_CONTROLLER_14314,
const.COMMAND_CLASS_SWITCH_MULTILEVEL,
)
VIZIA_FAN_CONTROLLER_VRF01_MULTILEVEL = (
LEVITON,
VIZIA_DIMMER,
VIZIA_FAN_CONTROLLER_VRF01,
const.COMMAND_CLASS_SWITCH_MULTILEVEL,
)
LEVITON_FAN_CONTROLLER_ZW4SF_MULTILEVEL = (
LEVITON,
LEVITON_DECORA_FAN_CONTROLLER,
LEVITON_DECORA_FAN_CONTROLLER_ZW4SF,
const.COMMAND_CLASS_SWITCH_MULTILEVEL,
)
# List of component workarounds by
# (manufacturer_id, product_type, command_class)
DEVICE_COMPONENT_MAPPING = {
FIBARO_FGFS101_SENSOR_ALARM: "binary_sensor",
FIBARO_FGRM222_BINARY: WORKAROUND_IGNORE,
FIBARO_FGR222_BINARY: WORKAROUND_IGNORE,
}
# List of component workarounds by
# (manufacturer_id, product_type, product_id, command_class)
DEVICE_COMPONENT_MAPPING_MTI = {
GE_FAN_CONTROLLER_12730_MULTILEVEL: "fan",
GE_FAN_CONTROLLER_14287_MULTILEVEL: "fan",
JASCO_FAN_CONTROLLER_14314_MULTILEVEL: "fan",
VIZIA_FAN_CONTROLLER_VRF01_MULTILEVEL: "fan",
LEVITON_FAN_CONTROLLER_ZW4SF_MULTILEVEL: "fan",
}
def get_device_component_mapping(value):
"""Get mapping of value to another component."""
if value.node.manufacturer_id.strip() and value.node.product_type.strip():
manufacturer_id = int(value.node.manufacturer_id, 16)
product_type = int(value.node.product_type, 16)
product_id = int(value.node.product_id, 16)
result = DEVICE_COMPONENT_MAPPING.get(
(manufacturer_id, product_type, value.command_class)
)
if result:
return result
result = DEVICE_COMPONENT_MAPPING_MTI.get(
(manufacturer_id, product_type, product_id, value.command_class)
)
if result:
return result
return None
def get_device_mapping(value):
"""Get mapping of value to a workaround."""
if (
value.node.manufacturer_id.strip()
and value.node.product_id.strip()
and value.node.product_type.strip()
):
manufacturer_id = int(value.node.manufacturer_id, 16)
product_type = int(value.node.product_type, 16)
product_id = int(value.node.product_id, 16)
result = DEVICE_MAPPINGS_MTII.get(
(manufacturer_id, product_type, product_id, value.index)
)
if result:
return result
result = DEVICE_MAPPINGS_MTI_INSTANCE.get(
(manufacturer_id, product_type, product_id, value.instance)
)
if result:
return result
return DEVICE_MAPPINGS_MT.get((manufacturer_id, product_type))
return None
|
from __future__ import print_function
import unittest
class TestCase(unittest.TestCase):
def setUp(self):
unittest.TestCase.setUp(self)
def tearDown(self):
unittest.TestCase.tearDown(self)
def testIt(self):
self.a = 10
self.xxx()
def xxx(self):
if False:
pass
print('a')
if False:
pass
pass
if False:
pass
print('rara')
if __name__ == '__main__':
print('test2')
unittest.main()
|
import numpy as np
import six
def assert_is_instance_segmentation_link(link, n_fg_class):
"""Checks if a link satisfies instance segmentation link APIs.
This function checks if a given link satisfies instance segmentation
link APIs or not.
If the link does not satifiy the APIs, this function raises an
:class:`AssertionError`.
Args:
link: A link to be checked.
n_fg_class (int): The number of foreground classes.
"""
imgs = [
np.random.randint(0, 256, size=(3, 480, 640)).astype(np.float32),
np.random.randint(0, 256, size=(3, 480, 320)).astype(np.float32)]
result = link.predict(imgs)
assert len(result) == 3, \
'Link must return three elements: masks, labels and scores.'
masks, labels, scores = result
assert len(masks) == len(imgs), \
'The length of masks must be same as that of imgs.'
assert len(labels) == len(imgs), \
'The length of labels must be same as that of imgs.'
assert len(scores) == len(imgs), \
'The length of scores must be same as that of imgs.'
for img, mask, label, score in six.moves.zip(imgs, masks, labels, scores):
assert isinstance(mask, np.ndarray), \
'mask must be a numpy.ndarray.'
assert mask.dtype == np.bool, \
'The type of mask must be bool'
assert mask.shape[1:] == img.shape[1:], \
'The shape of mask must be (R, H, W).'
assert isinstance(label, np.ndarray), \
'label must be a numpy.ndarray.'
assert label.dtype == np.int32, \
'The type of label must be numpy.int32.'
assert label.shape[1:] == (), \
'The shape of label must be (*,).'
assert len(label) == len(mask), \
'The length of label must be same as that of mask.'
if len(label) > 0:
assert label.min() >= 0 and label.max() < n_fg_class, \
'The value of label must be in [0, n_fg_class - 1].'
assert isinstance(score, np.ndarray), \
'score must be a numpy.ndarray.'
assert score.dtype == np.float32, \
'The type of score must be numpy.float32.'
assert score.shape[1:] == (), \
'The shape of score must be (*,).'
assert len(score) == len(mask), \
'The length of score must be same as that of mask.'
|
import logging
from typing import List, Optional
from teslajsonpy.exceptions import UnknownPresetMode
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
HVAC_MODE_HEAT_COOL,
HVAC_MODE_OFF,
SUPPORT_PRESET_MODE,
SUPPORT_TARGET_TEMPERATURE,
)
from homeassistant.const import ATTR_TEMPERATURE, TEMP_CELSIUS, TEMP_FAHRENHEIT
from . import DOMAIN as TESLA_DOMAIN, TeslaDevice
_LOGGER = logging.getLogger(__name__)
SUPPORT_HVAC = [HVAC_MODE_HEAT_COOL, HVAC_MODE_OFF]
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Tesla binary_sensors by config_entry."""
async_add_entities(
[
TeslaThermostat(
device,
hass.data[TESLA_DOMAIN][config_entry.entry_id]["coordinator"],
)
for device in hass.data[TESLA_DOMAIN][config_entry.entry_id]["devices"][
"climate"
]
],
True,
)
class TeslaThermostat(TeslaDevice, ClimateEntity):
"""Representation of a Tesla climate."""
@property
def supported_features(self):
"""Return the list of supported features."""
return SUPPORT_TARGET_TEMPERATURE | SUPPORT_PRESET_MODE
@property
def hvac_mode(self):
"""Return hvac operation ie. heat, cool mode.
Need to be one of HVAC_MODE_*.
"""
if self.tesla_device.is_hvac_enabled():
return HVAC_MODE_HEAT_COOL
return HVAC_MODE_OFF
@property
def hvac_modes(self):
"""Return the list of available hvac operation modes.
Need to be a subset of HVAC_MODES.
"""
return SUPPORT_HVAC
@property
def temperature_unit(self):
"""Return the unit of measurement."""
if self.tesla_device.measurement == "F":
return TEMP_FAHRENHEIT
return TEMP_CELSIUS
@property
def current_temperature(self):
"""Return the current temperature."""
return self.tesla_device.get_current_temp()
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return self.tesla_device.get_goal_temp()
async def async_set_temperature(self, **kwargs):
"""Set new target temperatures."""
temperature = kwargs.get(ATTR_TEMPERATURE)
if temperature:
_LOGGER.debug("%s: Setting temperature to %s", self.name, temperature)
await self.tesla_device.set_temperature(temperature)
async def async_set_hvac_mode(self, hvac_mode):
"""Set new target hvac mode."""
_LOGGER.debug("%s: Setting hvac mode to %s", self.name, hvac_mode)
if hvac_mode == HVAC_MODE_OFF:
await self.tesla_device.set_status(False)
elif hvac_mode == HVAC_MODE_HEAT_COOL:
await self.tesla_device.set_status(True)
async def async_set_preset_mode(self, preset_mode: str) -> None:
"""Set new preset mode."""
_LOGGER.debug("%s: Setting preset_mode to: %s", self.name, preset_mode)
try:
await self.tesla_device.set_preset_mode(preset_mode)
except UnknownPresetMode as ex:
_LOGGER.error("%s", ex.message)
@property
def preset_mode(self) -> Optional[str]:
"""Return the current preset mode, e.g., home, away, temp.
Requires SUPPORT_PRESET_MODE.
"""
return self.tesla_device.preset_mode
@property
def preset_modes(self) -> Optional[List[str]]:
"""Return a list of available preset modes.
Requires SUPPORT_PRESET_MODE.
"""
return self.tesla_device.preset_modes
|
from random import getrandbits
import voluptuous as vol
from homeassistant.components.binary_sensor import (
DEVICE_CLASSES_SCHEMA,
PLATFORM_SCHEMA,
BinarySensorEntity,
)
from homeassistant.const import CONF_DEVICE_CLASS, CONF_NAME
import homeassistant.helpers.config_validation as cv
DEFAULT_NAME = "Random Binary Sensor"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Random binary sensor."""
name = config.get(CONF_NAME)
device_class = config.get(CONF_DEVICE_CLASS)
async_add_entities([RandomSensor(name, device_class)], True)
class RandomSensor(BinarySensorEntity):
"""Representation of a Random binary sensor."""
def __init__(self, name, device_class):
"""Initialize the Random binary sensor."""
self._name = name
self._device_class = device_class
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def is_on(self):
"""Return true if sensor is on."""
return self._state
@property
def device_class(self):
"""Return the sensor class of the sensor."""
return self._device_class
async def async_update(self):
"""Get new state and update the sensor's state."""
self._state = bool(getrandbits(1))
|
import time
from mock import patch, ANY, call
from arctic.auth import Credential
from arctic.scripts import arctic_prune_versions as mpv
from ...util import run_as_main
def test_prune_versions_symbol(mongo_host, library, library_name):
with patch('arctic.scripts.arctic_prune_versions.prune_versions', autospec=True) as prune_versions, \
patch('arctic.scripts.utils.get_auth', return_value=Credential('admin', 'adminuser', 'adminpwd')), \
patch('pymongo.database.Database.authenticate', return_value=True):
run_as_main(mpv.main, '--host', mongo_host, '--library', library_name, '--symbols', 'sym1,sym2')
prune_versions.assert_has_calls([call(ANY, ['sym1', 'sym2'], 10)])
def test_prune_versions_full(mongo_host, library, library_name):
with patch('arctic.scripts.arctic_prune_versions.do_db_auth', return_value=True):
# Write some stuff with snapshots
library.snapshot('snap')
library.write('symbol', "val1")
library.write('symbol', "val2")
library.snapshot('snap1')
library.write('symbol', "val3")
# Prune older than 10 mins - nothing deleted
run_as_main(mpv.main, '--host', mongo_host, '--library', library_name, '--keep-mins', 10)
assert [x['version'] for x in library.list_versions('symbol')] == [3, 2, 1]
# Prune older than 0 minutes, v1 deleted
run_as_main(mpv.main, '--host', mongo_host, '--library', library_name, '--keep-mins', 0)
assert [x['version'] for x in library.list_versions('symbol')] == [3, 2]
# Delete the snapshots
library.delete_snapshot('snap')
library.delete_snapshot('snap1')
run_as_main(mpv.main, '--host', mongo_host, '--library', library_name, '--keep-mins', 0)
assert [x['version'] for x in library.list_versions('symbol')] == [3]
def test_keep_recent_snapshots(library):
library.write("cherry", "blob")
half_a_day_ago = time.time() - (3600 * 12.)
with patch('time.time', return_value=half_a_day_ago):
library.snapshot("snappy")
library._snapshots.delete_one({"name": "snappy"})
mpv.prune_versions(library, ["cherry"], 10)
assert len(library._versions.find_one({"symbol": "cherry"}).get("parent", [])) == 1
def test_fix_broken_snapshot_references(library):
library.write("cherry", "blob")
one_day_ago = time.time() - (3600 * 24.) - 10 # make sure we are a few seconds before 24 hours
with patch('time.time', return_value=one_day_ago):
library.snapshot("snappy")
library._snapshots.delete_one({"name": "snappy"})
mpv.prune_versions(library, ["cherry"], 10)
assert library._versions.find_one({"symbol": "cherry"}).get("parent", []) == []
def test_keep_only_one_version(library):
library.write("cherry", "blob")
library.write("cherry", "blob")
one_day_ago = time.time() - (3600 * 24.) - 10 # make sure we are a few seconds before 24 hours
with patch('time.time', return_value=one_day_ago):
library.snapshot("snappy")
library._snapshots.delete_one({"name": "snappy"})
mpv.prune_versions(library, ["cherry"], 0)
assert len(list(library._versions.find({"symbol": "cherry"}))) == 1
|
from __future__ import print_function
import sys
import select
import argparse
import telnetlib
import threading
from stash.system.shcommon import K_CC, K_CD, K_HUP, K_HDN, K_CU, K_TAB, K_HIST, K_CZ, K_KB
_SYS_STDOUT = sys.__stdout__
_stash = globals()['_stash']
""":type : StaSh"""
try:
import pyte
except ImportError:
_stash('pip install pyte==0.4.10')
import pyte
class StashTelnet(object):
"""
Wrapper class for telnet client and pyte screen
"""
def __init__(self):
# Initialize the pyte screen based on the current screen size
# noinspection PyUnresolvedReferences
self.screen = pyte.screens.DiffScreen(*_stash.terminal.get_wh())
self.stream = pyte.Stream()
self.stream.attach(self.screen)
self.client = None
def connect(self, host, port=23, timeout=2):
print('Connecting...')
try:
self.client = telnetlib.Telnet(host, port, timeout)
return True
except:
return False
def stdout_thread(self):
while self.running:
# Get the list sockets which are readable
try:
read_sockets, write_sockets, error_sockets = select.select([self.client], [], [])
except:
break
for sock in read_sockets: # incoming message from remote server
if sock == self.client:
rcv = sock.read_very_eager()
self.feed_screen(rcv)
def feed_screen(self, data):
"""
Feed data to the screen
:param data: data to feed
:type data: str
"""
if data:
data = data.decode('utf-8', errors='ignore')
x, y = self.screen.cursor.x, self.screen.cursor.y
self.stream.feed(data)
if self.screen.dirty or x != self.screen.cursor.x or y != self.screen.cursor.y:
self.update_screen()
self.screen.dirty.clear()
def update_screen(self):
_stash.main_screen.load_pyte_screen(self.screen)
_stash.renderer.render(no_wait=True)
def interactive(self):
t1 = threading.Thread(target=self.stdout_thread)
t1.daemon = True
self.running = True
t1.start()
t1.join()
self.client.close()
print('\nconnection closed\n')
CTRL_KEY_FLAG = (1 << 18)
class SshUserActionDelegate(object):
"""
Substitute the default user actions delegates
"""
def __init__(self, telnet):
self.telnet = telnet
def send(self, s):
# self.telnet.stream.feed(s.decode('utf-8') if hasattr(s, "decode") else s)
self.telnet.feed_screen(s.decode("utf-8" ) if hasattr(s, "decode") else s)
self.telnet.client.write(s.encode('utf-8'))
class SshTvVkKcDelegate(SshUserActionDelegate):
"""
Delegate for TextView, Virtual keys and Key command
"""
def textview_did_begin_editing(self, tv):
_stash.terminal.is_editing = True
def textview_did_end_editing(self, tv):
_stash.terminal.is_editing = False
def textview_should_change(self, tv, rng, replacement):
print("SSH: tvsc: " + repr((rng, replacement)))
# _stash.mini_buffer.feed(rng, replacement)
if replacement == '': # delete
replacement = '\x08'
# self.telnet.feed_screen(replacement)
self.send(replacement)
return False # always false
def textview_did_change(self, tv):
pass
def textview_did_change_selection(self, tv):
pass
def kc_pressed(self, key, modifierFlags):
if modifierFlags == CTRL_KEY_FLAG:
if key == 'C':
self.send('\x03')
self.telnet.running = False
elif key == 'D':
self.send('\x04')
elif key == 'A':
self.send('\x01')
elif key == 'E':
self.send('\x05')
elif key == 'K':
self.send('\x0B')
elif key == 'L':
self.send('\x0C')
elif key == 'U':
self.send('\x15')
elif key == 'Z':
self.send('\x1A')
elif key == '[':
self.send('\x1B') # ESC
elif modifierFlags == 0:
if key == 'UIKeyInputUpArrow':
self.send('\x10')
elif key == 'UIKeyInputDownArrow':
self.send('\x0E')
elif key == 'UIKeyInputLeftArrow':
self.send('\033[D')
elif key == 'UIKeyInputRightArrow':
self.send('\033[C')
def vk_tapped(self, vk):
if vk == K_TAB:
self.send('\t')
elif vk == K_CC:
self.kc_pressed('C', CTRL_KEY_FLAG)
elif vk == K_CD:
self.kc_pressed('D', CTRL_KEY_FLAG)
elif vk == K_CU:
self.kc_pressed('U', CTRL_KEY_FLAG)
elif vk == K_CZ:
self.kc_pressed('Z', CTRL_KEY_FLAG)
elif vk == K_HUP:
self.kc_pressed('UIKeyInputUpArrow', 0)
elif vk == K_HDN:
self.kc_pressed('UIKeyInputDownArrow', 0)
elif vk == K_KB:
if _stash.terminal.is_editing:
_stash.terminal.end_editing()
else:
_stash.terminal.begin_editing()
class SshSVDelegate(SshUserActionDelegate):
"""
Delegate for scroll view
"""
SCROLL_PER_CHAR = 20.0 # Number of pixels to scroll to move 1 character
def scrollview_did_scroll(self, scrollview):
# integrate small scroll motions, but keep scrollview from actually moving
if not scrollview.decelerating:
scrollview.superview.dx -= scrollview.content_offset[0] / SshSVDelegate.SCROLL_PER_CHAR
scrollview.content_offset = (0.0, 0.0)
offset = int(scrollview.superview.dx)
if offset:
scrollview.superview.dx -= offset
if offset > 0:
self.send('\033[C')
else:
self.send('\033[D')
if __name__ == '__main__':
ap = argparse.ArgumentParser()
ap.add_argument('host', help='host to connect')
ap.add_argument('-p', '--port', action='store', default=23, type=int, help='port for telnet (default: 23)')
ap.add_argument('--timeout', type=int, default=2, help='timeout')
args = ap.parse_args()
telnet = StashTelnet()
tv_vk_kc_delegate = SshTvVkKcDelegate(telnet)
sv_delegate = SshSVDelegate(telnet)
if telnet.connect(host=args.host, port=args.port, timeout=args.timeout):
print('Connected. Press Ctrl-C to quit.')
_stash.stream.feed(u'\u009bc', render_it=False)
with _stash.user_action_proxy.config(tv_responder=tv_vk_kc_delegate,
kc_responder=tv_vk_kc_delegate.kc_pressed,
vk_responder=tv_vk_kc_delegate.vk_tapped,
sv_responder=sv_delegate):
telnet.interactive()
else:
print('Unable to connect')
|
import pytest
from flasgger.base import Swagger
def test_init_config(monkeypatch):
def __init__(self, config=None, merge=False):
self._init_config(config, merge)
monkeypatch.setattr(Swagger, "__init__", __init__)
# # Unspecified config will be initialized to dict()
t = Swagger(config=None, merge=False)
assert t.config == Swagger.DEFAULT_CONFIG
# Empty dict passed to arguments will be overriden with default_config
empty_dict = dict()
t = Swagger(config=empty_dict, merge=False)
assert t.config == Swagger.DEFAULT_CONFIG
assert t.config is not empty_dict
# Config will be merged
d = {"a": 0}
t = Swagger(config=d, merge=False)
assert t.config is d
# Config will be overridden
t = Swagger(config={"a": 0}, merge=False)
assert t.config == {"a": 0}
# Config will be merged
t = Swagger(config={"a": 0}, merge=True)
assert t.config.items() > {"a": 0}.items()
assert all( t.config[k] == v for k, v in Swagger.DEFAULT_CONFIG.items() )
# Config will be merged
empty_dict = dict()
t = Swagger(config=empty_dict, merge=True)
assert t.config == Swagger.DEFAULT_CONFIG
# keys in DEFAULT_CONFIG will be overridden
d = {"specs": [
{
"endpoint": "swagger",
"route": "/characteristics/swagger.json",
"rule_filter": lambda rule: True, # all in
"model_filter": lambda tag: True, # all in
}
],}
t = Swagger(config=d, merge=True)
assert all( t.config[k] == v for k, v in d.items() )
assert t.config["specs"] == d["specs"]
|
import logging
import os
import subprocess
from tempfile import TemporaryDirectory
from typing import Any
from typing import Dict
from typing import List
from typing import Optional
from typing import Set
import ruamel.yaml as yaml
from service_configuration_lib import read_extra_service_information
from paasta_tools.cli.cmds.validate import validate_schema
from paasta_tools.utils import AUTO_SOACONFIG_SUBDIR
from paasta_tools.utils import DEFAULT_SOA_DIR
log = logging.getLogger(__name__)
# Must have a schema defined
KNOWN_CONFIG_TYPES = ("marathon", "kubernetes")
HEADER_COMMENT = """
# This file contains recommended config values for your service generated by
# automated processes.
#
# Your service will use these values if they are not defined in
# {regular_filename}.
#
# If you would like to override a config value defined here, add the config
# value to {regular_filename} instead of editing this file.
# ==============================================================================
{{}}
"""
# ^ Needs an empty dict at the end for ruamel to return a non-None value when loading
# ^ Braces are doubled for escaping in call to .format
def write_auto_config_data(
service: str, extra_info: str, data: Dict[str, Any], soa_dir: str = DEFAULT_SOA_DIR
) -> Optional[str]:
"""
Replaces the contents of an automated config file for a service, or creates the file if it does not exist.
Returns the filename of the modified file, or None if no file was written.
"""
service_dir = f"{soa_dir}/{service}"
if not os.path.exists(service_dir):
log.warning(
f"Service {service} does not exist in configs, skipping auto config update"
)
return None
subdir = f"{service_dir}/{AUTO_SOACONFIG_SUBDIR}"
if not os.path.exists(subdir):
os.mkdir(subdir)
filename = f"{subdir}/{extra_info}.yaml"
with open(filename, "w") as f:
content = yaml.round_trip_load(
HEADER_COMMENT.format(regular_filename=f"{service}/{extra_info}.yaml")
)
content.update(data)
f.write(yaml.round_trip_dump(content))
return filename
def _commit_files(files: List[str], message: str) -> bool:
"""
Stages the given files and creates a commit with the given message.
Returns True if a new commit was created, False if the files are unchanged.
"""
subprocess.check_call(["git", "add"] + files)
# Skip commit if no changes are staged
result_code = subprocess.call(["git", "diff-index", "--quiet", "--cached", "HEAD"])
if result_code == 0:
return False
else:
subprocess.check_call(["git", "commit", "--no-verify", "--message", message])
return True
class PushNotFastForwardError(Exception):
pass
class ValidationError(Exception):
pass
def _push_to_remote(branch: str) -> None:
try:
subprocess.check_output(
("git", "push", "origin", branch), stderr=subprocess.STDOUT
)
except subprocess.CalledProcessError as e:
if "Updates were rejected" in str(e.stdout):
raise PushNotFastForwardError()
else:
log.error(f"Push to {branch} failed with:\n {e.stdout}")
raise
def validate_auto_config_file(filepath: str):
basename = os.path.basename(filepath)
for file_type in KNOWN_CONFIG_TYPES:
if basename.startswith(file_type):
return bool(
validate_schema(filepath, f"{AUTO_SOACONFIG_SUBDIR}/{file_type}")
)
else:
logging.info(f"{filepath} is invalid because it has no validator defined")
return False
class AutoConfigUpdater:
"""
Helper class for updating automated paasta configs.
Usage:
updater = AutoConfigUpdater('about_me', '[email protected]:my_configs', branch='test')
# The context manager clones the repo into a local temp directory, then
# cleans up afterwards.
with updater:
# The updater replaces the content of files, so get the existing data
# first if you want to update it
data = updater.get_existing_configs('service_foo', 'conf_file')
data["new_key"] = "g_minor"
# Now write the new data
updater.write_configs('service_foo', 'conf_file', data)
# Edit more files...
# Once you're done editing files, commit. If all files pass validation,
# the updater will commit the changes and push them to the desired branch
# on the remote.
updater.commit_to_remote(extra_message="Adding some extra context.")
Raises PushNotFastForwardError if the updated branch does not include changes in the
remote branch.
"""
def __init__(
self,
config_source: str,
git_remote: str,
branch: str = "master",
working_dir: Optional[str] = None,
do_clone: bool = True,
):
self.config_source = config_source
self.git_remote = git_remote
self.branch = branch
self.working_dir = working_dir
self.do_clone = do_clone
self.files_changed: Set[str] = set()
self.tmp_dir = None
def __enter__(self):
if self.do_clone:
self.tmp_dir = TemporaryDirectory(dir=self.working_dir)
self.working_dir = self.tmp_dir.name
subprocess.check_call(["git", "clone", self.git_remote, self.working_dir])
self.pwd = os.getcwd()
os.chdir(self.working_dir)
if self.branch != "master":
subprocess.check_call(["git", "checkout", "-b", self.branch])
return self
def __exit__(self, type, value, traceback):
os.chdir(self.pwd)
if self.tmp_dir:
self.tmp_dir.cleanup()
def write_configs(self, service: str, extra_info: str, configs: Dict[str, Any]):
result = write_auto_config_data(
service, extra_info, configs, soa_dir=self.working_dir
)
if result:
self.files_changed.add(result)
def get_existing_configs(self, service: str, extra_info: str) -> Dict[str, Any]:
return read_extra_service_information(
service, f"{AUTO_SOACONFIG_SUBDIR}/{extra_info}", soa_dir=self.working_dir,
)
def validate(self):
return_code = True
for filepath in self.files_changed:
# We don't short circuit after a failure so the caller gets info on all the failures
return_code = validate_auto_config_file(filepath) and return_code
return return_code
def commit_to_remote(self, extra_message: str = ""):
if not self.validate():
log.error("Files failed validation, not committing changes")
raise ValidationError
# TODO: more identifying information, like hostname or paasta_tools version?
message = f"Update to {AUTO_SOACONFIG_SUBDIR} configs from {self.config_source}"
if extra_message:
message = f"{message}\n\n{extra_message}"
if _commit_files(list(self.files_changed), message):
_push_to_remote(self.branch)
else:
log.info("No files changed, no push required.")
|
import logging
import sucks
from homeassistant.components.vacuum import (
SUPPORT_BATTERY,
SUPPORT_CLEAN_SPOT,
SUPPORT_FAN_SPEED,
SUPPORT_LOCATE,
SUPPORT_RETURN_HOME,
SUPPORT_SEND_COMMAND,
SUPPORT_STATUS,
SUPPORT_STOP,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
VacuumEntity,
)
from homeassistant.helpers.icon import icon_for_battery_level
from . import ECOVACS_DEVICES
_LOGGER = logging.getLogger(__name__)
SUPPORT_ECOVACS = (
SUPPORT_BATTERY
| SUPPORT_RETURN_HOME
| SUPPORT_CLEAN_SPOT
| SUPPORT_STOP
| SUPPORT_TURN_OFF
| SUPPORT_TURN_ON
| SUPPORT_LOCATE
| SUPPORT_STATUS
| SUPPORT_SEND_COMMAND
| SUPPORT_FAN_SPEED
)
ATTR_ERROR = "error"
ATTR_COMPONENT_PREFIX = "component_"
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Ecovacs vacuums."""
vacuums = []
for device in hass.data[ECOVACS_DEVICES]:
vacuums.append(EcovacsVacuum(device))
_LOGGER.debug("Adding Ecovacs Vacuums to Home Assistant: %s", vacuums)
add_entities(vacuums, True)
class EcovacsVacuum(VacuumEntity):
"""Ecovacs Vacuums such as Deebot."""
def __init__(self, device):
"""Initialize the Ecovacs Vacuum."""
self.device = device
self.device.connect_and_wait_until_ready()
if self.device.vacuum.get("nick") is not None:
self._name = str(self.device.vacuum["nick"])
else:
# In case there is no nickname defined, use the device id
self._name = str(format(self.device.vacuum["did"]))
self._fan_speed = None
self._error = None
_LOGGER.debug("Vacuum initialized: %s", self.name)
async def async_added_to_hass(self) -> None:
"""Set up the event listeners now that hass is ready."""
self.device.statusEvents.subscribe(lambda _: self.schedule_update_ha_state())
self.device.batteryEvents.subscribe(lambda _: self.schedule_update_ha_state())
self.device.lifespanEvents.subscribe(lambda _: self.schedule_update_ha_state())
self.device.errorEvents.subscribe(self.on_error)
def on_error(self, error):
"""Handle an error event from the robot.
This will not change the entity's state. If the error caused the state
to change, that will come through as a separate on_status event
"""
if error == "no_error":
self._error = None
else:
self._error = error
self.hass.bus.fire(
"ecovacs_error", {"entity_id": self.entity_id, "error": error}
)
self.schedule_update_ha_state()
@property
def should_poll(self) -> bool:
"""Return True if entity has to be polled for state."""
return False
@property
def unique_id(self) -> str:
"""Return an unique ID."""
return self.device.vacuum.get("did")
@property
def is_on(self):
"""Return true if vacuum is currently cleaning."""
return self.device.is_cleaning
@property
def is_charging(self):
"""Return true if vacuum is currently charging."""
return self.device.is_charging
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def supported_features(self):
"""Flag vacuum cleaner robot features that are supported."""
return SUPPORT_ECOVACS
@property
def status(self):
"""Return the status of the vacuum cleaner."""
return self.device.vacuum_status
def return_to_base(self, **kwargs):
"""Set the vacuum cleaner to return to the dock."""
self.device.run(sucks.Charge())
@property
def battery_icon(self):
"""Return the battery icon for the vacuum cleaner."""
return icon_for_battery_level(
battery_level=self.battery_level, charging=self.is_charging
)
@property
def battery_level(self):
"""Return the battery level of the vacuum cleaner."""
if self.device.battery_status is not None:
return self.device.battery_status * 100
return super().battery_level
@property
def fan_speed(self):
"""Return the fan speed of the vacuum cleaner."""
return self.device.fan_speed
@property
def fan_speed_list(self):
"""Get the list of available fan speed steps of the vacuum cleaner."""
return [sucks.FAN_SPEED_NORMAL, sucks.FAN_SPEED_HIGH]
def turn_on(self, **kwargs):
"""Turn the vacuum on and start cleaning."""
self.device.run(sucks.Clean())
def turn_off(self, **kwargs):
"""Turn the vacuum off stopping the cleaning and returning home."""
self.return_to_base()
def stop(self, **kwargs):
"""Stop the vacuum cleaner."""
self.device.run(sucks.Stop())
def clean_spot(self, **kwargs):
"""Perform a spot clean-up."""
self.device.run(sucks.Spot())
def locate(self, **kwargs):
"""Locate the vacuum cleaner."""
self.device.run(sucks.PlaySound())
def set_fan_speed(self, fan_speed, **kwargs):
"""Set fan speed."""
if self.is_on:
self.device.run(sucks.Clean(mode=self.device.clean_status, speed=fan_speed))
def send_command(self, command, params=None, **kwargs):
"""Send a command to a vacuum cleaner."""
self.device.run(sucks.VacBotCommand(command, params))
@property
def device_state_attributes(self):
"""Return the device-specific state attributes of this vacuum."""
data = {}
data[ATTR_ERROR] = self._error
for key, val in self.device.components.items():
attr_name = ATTR_COMPONENT_PREFIX + key
data[attr_name] = int(val * 100)
return data
|
import logging
from afsapi import AFSAPI
import requests
import voluptuous as vol
from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity
from homeassistant.components.media_player.const import (
MEDIA_TYPE_MUSIC,
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_PLAY_MEDIA,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_SEEK,
SUPPORT_SELECT_SOURCE,
SUPPORT_STOP,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
SUPPORT_VOLUME_STEP,
)
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
STATE_IDLE,
STATE_OFF,
STATE_PAUSED,
STATE_PLAYING,
STATE_UNKNOWN,
)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
SUPPORT_FRONTIER_SILICON = (
SUPPORT_PAUSE
| SUPPORT_VOLUME_SET
| SUPPORT_VOLUME_MUTE
| SUPPORT_VOLUME_STEP
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_NEXT_TRACK
| SUPPORT_SEEK
| SUPPORT_PLAY_MEDIA
| SUPPORT_PLAY
| SUPPORT_STOP
| SUPPORT_TURN_ON
| SUPPORT_TURN_OFF
| SUPPORT_SELECT_SOURCE
)
DEFAULT_PORT = 80
DEFAULT_PASSWORD = "1234"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_PASSWORD, default=DEFAULT_PASSWORD): cv.string,
vol.Optional(CONF_NAME): cv.string,
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Frontier Silicon platform."""
if discovery_info is not None:
async_add_entities(
[AFSAPIDevice(discovery_info["ssdp_description"], DEFAULT_PASSWORD, None)],
True,
)
return True
host = config.get(CONF_HOST)
port = config.get(CONF_PORT)
password = config.get(CONF_PASSWORD)
name = config.get(CONF_NAME)
try:
async_add_entities(
[AFSAPIDevice(f"http://{host}:{port}/device", password, name)], True
)
_LOGGER.debug("FSAPI device %s:%s -> %s", host, port, password)
return True
except requests.exceptions.RequestException:
_LOGGER.error(
"Could not add the FSAPI device at %s:%s -> %s", host, port, password
)
return False
class AFSAPIDevice(MediaPlayerEntity):
"""Representation of a Frontier Silicon device on the network."""
def __init__(self, device_url, password, name):
"""Initialize the Frontier Silicon API device."""
self._device_url = device_url
self._password = password
self._state = None
self._name = name
self._title = None
self._artist = None
self._album_name = None
self._mute = None
self._source = None
self._source_list = None
self._media_image_url = None
self._max_volume = None
self._volume_level = None
# Properties
@property
def fs_device(self):
"""
Create a fresh fsapi session.
A new session is created for each request in case someone else
connected to the device in between the updates and invalidated the
existing session (i.e UNDOK).
"""
return AFSAPI(self._device_url, self._password)
@property
def name(self):
"""Return the device name."""
return self._name
@property
def media_title(self):
"""Title of current playing media."""
return self._title
@property
def media_artist(self):
"""Artist of current playing media, music track only."""
return self._artist
@property
def media_album_name(self):
"""Album name of current playing media, music track only."""
return self._album_name
@property
def media_content_type(self):
"""Content type of current playing media."""
return MEDIA_TYPE_MUSIC
@property
def supported_features(self):
"""Flag of media commands that are supported."""
return SUPPORT_FRONTIER_SILICON
@property
def state(self):
"""Return the state of the player."""
return self._state
# source
@property
def source_list(self):
"""List of available input sources."""
return self._source_list
@property
def source(self):
"""Name of the current input source."""
return self._source
@property
def media_image_url(self):
"""Image url of current playing media."""
return self._media_image_url
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self._volume_level
async def async_update(self):
"""Get the latest date and update device state."""
fs_device = self.fs_device
if not self._name:
self._name = await fs_device.get_friendly_name()
if not self._source_list:
self._source_list = await fs_device.get_mode_list()
# The API seems to include 'zero' in the number of steps (e.g. if the range is
# 0-40 then get_volume_steps returns 41) subtract one to get the max volume.
# If call to get_volume fails set to 0 and try again next time.
if not self._max_volume:
self._max_volume = int(await fs_device.get_volume_steps() or 1) - 1
if await fs_device.get_power():
status = await fs_device.get_play_status()
self._state = {
"playing": STATE_PLAYING,
"paused": STATE_PAUSED,
"stopped": STATE_IDLE,
"unknown": STATE_UNKNOWN,
None: STATE_IDLE,
}.get(status, STATE_UNKNOWN)
else:
self._state = STATE_OFF
if self._state != STATE_OFF:
info_name = await fs_device.get_play_name()
info_text = await fs_device.get_play_text()
self._title = " - ".join(filter(None, [info_name, info_text]))
self._artist = await fs_device.get_play_artist()
self._album_name = await fs_device.get_play_album()
self._source = await fs_device.get_mode()
self._mute = await fs_device.get_mute()
self._media_image_url = await fs_device.get_play_graphic()
volume = await self.fs_device.get_volume()
# Prevent division by zero if max_volume not known yet
self._volume_level = float(volume or 0) / (self._max_volume or 1)
else:
self._title = None
self._artist = None
self._album_name = None
self._source = None
self._mute = None
self._media_image_url = None
self._volume_level = None
# Management actions
# power control
async def async_turn_on(self):
"""Turn on the device."""
await self.fs_device.set_power(True)
async def async_turn_off(self):
"""Turn off the device."""
await self.fs_device.set_power(False)
async def async_media_play(self):
"""Send play command."""
await self.fs_device.play()
async def async_media_pause(self):
"""Send pause command."""
await self.fs_device.pause()
async def async_media_play_pause(self):
"""Send play/pause command."""
if "playing" in self._state:
await self.fs_device.pause()
else:
await self.fs_device.play()
async def async_media_stop(self):
"""Send play/pause command."""
await self.fs_device.pause()
async def async_media_previous_track(self):
"""Send previous track command (results in rewind)."""
await self.fs_device.rewind()
async def async_media_next_track(self):
"""Send next track command (results in fast-forward)."""
await self.fs_device.forward()
# mute
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self._mute
async def async_mute_volume(self, mute):
"""Send mute command."""
await self.fs_device.set_mute(mute)
# volume
async def async_volume_up(self):
"""Send volume up command."""
volume = await self.fs_device.get_volume()
volume = int(volume or 0) + 1
await self.fs_device.set_volume(min(volume, self._max_volume))
async def async_volume_down(self):
"""Send volume down command."""
volume = await self.fs_device.get_volume()
volume = int(volume or 0) - 1
await self.fs_device.set_volume(max(volume, 0))
async def async_set_volume_level(self, volume):
"""Set volume command."""
if self._max_volume: # Can't do anything sensible if not set
volume = int(volume * self._max_volume)
await self.fs_device.set_volume(volume)
async def async_select_source(self, source):
"""Select input source."""
await self.fs_device.set_mode(source)
|
import unittest
import numpy as np
from chainer import testing
from chainer.testing import attr
from chainercv.datasets import online_products_super_label_names
from chainercv.datasets import OnlineProductsDataset
from chainercv.utils import assert_is_label_dataset
@testing.parameterize(
{'split': 'train'},
{'split': 'test'}
)
class TestOnlineProductsDataset(unittest.TestCase):
def setUp(self):
self.dataset = OnlineProductsDataset(split=self.split)
@attr.slow
def test_online_products_dataset(self):
assert_is_label_dataset(
self.dataset, 22634, n_example=10)
for _ in range(10):
i = np.random.randint(0, len(self.dataset))
_, _, super_label = self.dataset[i]
assert isinstance(super_label, np.int32), \
'label must be a numpy.int32.'
assert super_label.ndim == 0, 'The ndim of label must be 0'
assert (super_label >= 0 and
super_label < len(online_products_super_label_names)), \
'The value of label must be in [0, n_class - 1].'
testing.run_module(__name__, __file__)
|
from unittest import TestCase
import numpy as np
from scattertext import CohensD, CredTFIDF, OncePerDocFrequencyRanker
from scattertext.test.test_termDocMatrixFactory import build_hamlet_jz_corpus
class TestCredTFIDF(TestCase):
def test_get_score_df(self):
corpus = build_hamlet_jz_corpus()
tfidf = (CredTFIDF(corpus)
.set_term_ranker(OncePerDocFrequencyRanker)
.set_categories('hamlet'))
np.testing.assert_almost_equal(tfidf
.get_scores()[:5], [3.0757237e-05, 4.1256023e-02, 4.1256023e-02, 5.5708409e-02,
4.1256023e-02])
#print(tfidf.get_score_df().iloc[0])
self.assertEqual(list(tfidf.get_score_df().columns), ['pos_cred_tfidf', 'neg_cred_tfidf', 'delta_cred_tf_idf'])
def test_get_name(self):
corpus = build_hamlet_jz_corpus()
self.assertEqual(CredTFIDF(corpus).get_name(), 'Delta mean cred-tf-idf')
|
import itertools
import os
import pickle
from hashlib import sha256
from radicale.log import logger
class CollectionSyncMixin:
def sync(self, old_token=None):
# The sync token has the form http://radicale.org/ns/sync/TOKEN_NAME
# where TOKEN_NAME is the sha256 hash of all history etags of present
# and past items of the collection.
def check_token_name(token_name):
if len(token_name) != 64:
return False
for c in token_name:
if c not in "0123456789abcdef":
return False
return True
old_token_name = None
if old_token:
# Extract the token name from the sync token
if not old_token.startswith("http://radicale.org/ns/sync/"):
raise ValueError("Malformed token: %r" % old_token)
old_token_name = old_token[len("http://radicale.org/ns/sync/"):]
if not check_token_name(old_token_name):
raise ValueError("Malformed token: %r" % old_token)
# Get the current state and sync-token of the collection.
state = {}
token_name_hash = sha256()
# Find the history of all existing and deleted items
for href, item in itertools.chain(
((item.href, item) for item in self.get_all()),
((href, None) for href in self._get_deleted_history_hrefs())):
history_etag = self._update_history_etag(href, item)
state[href] = history_etag
token_name_hash.update((href + "/" + history_etag).encode())
token_name = token_name_hash.hexdigest()
token = "http://radicale.org/ns/sync/%s" % token_name
if token_name == old_token_name:
# Nothing changed
return token, ()
token_folder = os.path.join(self._filesystem_path,
".Radicale.cache", "sync-token")
token_path = os.path.join(token_folder, token_name)
old_state = {}
if old_token_name:
# load the old token state
old_token_path = os.path.join(token_folder, old_token_name)
try:
# Race: Another process might have deleted the file.
with open(old_token_path, "rb") as f:
old_state = pickle.load(f)
except (FileNotFoundError, pickle.UnpicklingError,
ValueError) as e:
if isinstance(e, (pickle.UnpicklingError, ValueError)):
logger.warning(
"Failed to load stored sync token %r in %r: %s",
old_token_name, self.path, e, exc_info=True)
# Delete the damaged file
try:
os.remove(old_token_path)
except (FileNotFoundError, PermissionError):
pass
raise ValueError("Token not found: %r" % old_token)
# write the new token state or update the modification time of
# existing token state
if not os.path.exists(token_path):
self._storage._makedirs_synced(token_folder)
try:
# Race: Other processes might have created and locked the file.
with self._atomic_write(token_path, "wb") as f:
pickle.dump(state, f)
except PermissionError:
pass
else:
# clean up old sync tokens and item cache
self._clean_cache(token_folder, os.listdir(token_folder),
max_age=self._storage.configuration.get(
"storage", "max_sync_token_age"))
self._clean_history()
else:
# Try to update the modification time
try:
# Race: Another process might have deleted the file.
os.utime(token_path)
except FileNotFoundError:
pass
changes = []
# Find all new, changed and deleted (that are still in the item cache)
# items
for href, history_etag in state.items():
if history_etag != old_state.get(href):
changes.append(href)
# Find all deleted items that are no longer in the item cache
for href, history_etag in old_state.items():
if href not in state:
changes.append(href)
return token, changes
|
from homematicip.base.enums import DoorCommand, DoorState
from homeassistant.components.cover import (
ATTR_CURRENT_POSITION,
ATTR_CURRENT_TILT_POSITION,
DOMAIN as COVER_DOMAIN,
)
from homeassistant.components.homematicip_cloud import DOMAIN as HMIPC_DOMAIN
from homeassistant.const import STATE_CLOSED, STATE_OPEN, STATE_UNKNOWN
from homeassistant.setup import async_setup_component
from .helper import async_manipulate_test_data, get_and_check_entity_basics
async def test_manually_configured_platform(hass):
"""Test that we do not set up an access point."""
assert await async_setup_component(
hass, COVER_DOMAIN, {COVER_DOMAIN: {"platform": HMIPC_DOMAIN}}
)
assert not hass.data.get(HMIPC_DOMAIN)
async def test_hmip_cover_shutter(hass, default_mock_hap_factory):
"""Test HomematicipCoverShutte."""
entity_id = "cover.broll_1"
entity_name = "BROLL_1"
device_model = "HmIP-BROLL"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=[entity_name]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == "closed"
assert ha_state.attributes["current_position"] == 0
service_call_counter = len(hmip_device.mock_calls)
await hass.services.async_call(
"cover", "open_cover", {"entity_id": entity_id}, blocking=True
)
assert len(hmip_device.mock_calls) == service_call_counter + 1
assert hmip_device.mock_calls[-1][0] == "set_shutter_level"
assert hmip_device.mock_calls[-1][1] == (0,)
await async_manipulate_test_data(hass, hmip_device, "shutterLevel", 0)
ha_state = hass.states.get(entity_id)
assert ha_state.state == STATE_OPEN
assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100
await hass.services.async_call(
"cover",
"set_cover_position",
{"entity_id": entity_id, "position": "50"},
blocking=True,
)
assert len(hmip_device.mock_calls) == service_call_counter + 3
assert hmip_device.mock_calls[-1][0] == "set_shutter_level"
assert hmip_device.mock_calls[-1][1] == (0.5,)
await async_manipulate_test_data(hass, hmip_device, "shutterLevel", 0.5)
ha_state = hass.states.get(entity_id)
assert ha_state.state == STATE_OPEN
assert ha_state.attributes[ATTR_CURRENT_POSITION] == 50
await hass.services.async_call(
"cover", "close_cover", {"entity_id": entity_id}, blocking=True
)
assert len(hmip_device.mock_calls) == service_call_counter + 5
assert hmip_device.mock_calls[-1][0] == "set_shutter_level"
assert hmip_device.mock_calls[-1][1] == (1,)
await async_manipulate_test_data(hass, hmip_device, "shutterLevel", 1)
ha_state = hass.states.get(entity_id)
assert ha_state.state == STATE_CLOSED
assert ha_state.attributes[ATTR_CURRENT_POSITION] == 0
await hass.services.async_call(
"cover", "stop_cover", {"entity_id": entity_id}, blocking=True
)
assert len(hmip_device.mock_calls) == service_call_counter + 7
assert hmip_device.mock_calls[-1][0] == "set_shutter_stop"
assert hmip_device.mock_calls[-1][1] == ()
await async_manipulate_test_data(hass, hmip_device, "shutterLevel", None)
ha_state = hass.states.get(entity_id)
assert ha_state.state == STATE_UNKNOWN
async def test_hmip_cover_slats(hass, default_mock_hap_factory):
"""Test HomematicipCoverSlats."""
entity_id = "cover.sofa_links"
entity_name = "Sofa links"
device_model = "HmIP-FBL"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=[entity_name]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == STATE_CLOSED
assert ha_state.attributes[ATTR_CURRENT_POSITION] == 0
assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 0
service_call_counter = len(hmip_device.mock_calls)
await hass.services.async_call(
"cover", "open_cover_tilt", {"entity_id": entity_id}, blocking=True
)
assert len(hmip_device.mock_calls) == service_call_counter + 1
assert hmip_device.mock_calls[-1][0] == "set_slats_level"
assert hmip_device.mock_calls[-1][1] == (0,)
await async_manipulate_test_data(hass, hmip_device, "shutterLevel", 0)
await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 0)
ha_state = hass.states.get(entity_id)
assert ha_state.state == STATE_OPEN
assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100
assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 100
await hass.services.async_call(
"cover",
"set_cover_tilt_position",
{"entity_id": entity_id, "tilt_position": "50"},
blocking=True,
)
assert len(hmip_device.mock_calls) == service_call_counter + 4
assert hmip_device.mock_calls[-1][0] == "set_slats_level"
assert hmip_device.mock_calls[-1][1] == (0.5,)
await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 0.5)
ha_state = hass.states.get(entity_id)
assert ha_state.state == STATE_OPEN
assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100
assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 50
await hass.services.async_call(
"cover", "close_cover_tilt", {"entity_id": entity_id}, blocking=True
)
assert len(hmip_device.mock_calls) == service_call_counter + 6
assert hmip_device.mock_calls[-1][0] == "set_slats_level"
assert hmip_device.mock_calls[-1][1] == (1,)
await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 1)
ha_state = hass.states.get(entity_id)
assert ha_state.state == STATE_OPEN
assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100
assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 0
await hass.services.async_call(
"cover", "stop_cover_tilt", {"entity_id": entity_id}, blocking=True
)
assert len(hmip_device.mock_calls) == service_call_counter + 8
assert hmip_device.mock_calls[-1][0] == "set_shutter_stop"
assert hmip_device.mock_calls[-1][1] == ()
await async_manipulate_test_data(hass, hmip_device, "slatsLevel", None)
ha_state = hass.states.get(entity_id)
assert not ha_state.attributes.get(ATTR_CURRENT_TILT_POSITION)
await async_manipulate_test_data(hass, hmip_device, "shutterLevel", None)
ha_state = hass.states.get(entity_id)
assert ha_state.state == STATE_UNKNOWN
async def test_hmip_garage_door_tormatic(hass, default_mock_hap_factory):
"""Test HomematicipCoverShutte."""
entity_id = "cover.garage_door_module"
entity_name = "Garage Door Module"
device_model = "HmIP-MOD-TM"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=[entity_name]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == "closed"
assert ha_state.attributes["current_position"] == 0
service_call_counter = len(hmip_device.mock_calls)
await hass.services.async_call(
"cover", "open_cover", {"entity_id": entity_id}, blocking=True
)
assert len(hmip_device.mock_calls) == service_call_counter + 1
assert hmip_device.mock_calls[-1][0] == "send_door_command"
assert hmip_device.mock_calls[-1][1] == (DoorCommand.OPEN,)
await async_manipulate_test_data(hass, hmip_device, "doorState", DoorState.OPEN)
ha_state = hass.states.get(entity_id)
assert ha_state.state == STATE_OPEN
assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100
await hass.services.async_call(
"cover", "close_cover", {"entity_id": entity_id}, blocking=True
)
assert len(hmip_device.mock_calls) == service_call_counter + 3
assert hmip_device.mock_calls[-1][0] == "send_door_command"
assert hmip_device.mock_calls[-1][1] == (DoorCommand.CLOSE,)
await async_manipulate_test_data(hass, hmip_device, "doorState", DoorState.CLOSED)
ha_state = hass.states.get(entity_id)
assert ha_state.state == STATE_CLOSED
assert ha_state.attributes[ATTR_CURRENT_POSITION] == 0
await hass.services.async_call(
"cover", "stop_cover", {"entity_id": entity_id}, blocking=True
)
assert len(hmip_device.mock_calls) == service_call_counter + 5
assert hmip_device.mock_calls[-1][0] == "send_door_command"
assert hmip_device.mock_calls[-1][1] == (DoorCommand.STOP,)
async def test_hmip_garage_door_hoermann(hass, default_mock_hap_factory):
"""Test HomematicipCoverShutte."""
entity_id = "cover.garage_door"
entity_name = "Garage door"
device_model = "HmIP-MOD-HO"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=[entity_name]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == "closed"
assert ha_state.attributes["current_position"] == 0
service_call_counter = len(hmip_device.mock_calls)
await hass.services.async_call(
"cover", "open_cover", {"entity_id": entity_id}, blocking=True
)
assert len(hmip_device.mock_calls) == service_call_counter + 1
assert hmip_device.mock_calls[-1][0] == "send_door_command"
assert hmip_device.mock_calls[-1][1] == (DoorCommand.OPEN,)
await async_manipulate_test_data(hass, hmip_device, "doorState", DoorState.OPEN)
ha_state = hass.states.get(entity_id)
assert ha_state.state == STATE_OPEN
assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100
await hass.services.async_call(
"cover", "close_cover", {"entity_id": entity_id}, blocking=True
)
assert len(hmip_device.mock_calls) == service_call_counter + 3
assert hmip_device.mock_calls[-1][0] == "send_door_command"
assert hmip_device.mock_calls[-1][1] == (DoorCommand.CLOSE,)
await async_manipulate_test_data(hass, hmip_device, "doorState", DoorState.CLOSED)
ha_state = hass.states.get(entity_id)
assert ha_state.state == STATE_CLOSED
assert ha_state.attributes[ATTR_CURRENT_POSITION] == 0
await hass.services.async_call(
"cover", "stop_cover", {"entity_id": entity_id}, blocking=True
)
assert len(hmip_device.mock_calls) == service_call_counter + 5
assert hmip_device.mock_calls[-1][0] == "send_door_command"
assert hmip_device.mock_calls[-1][1] == (DoorCommand.STOP,)
async def test_hmip_cover_shutter_group(hass, default_mock_hap_factory):
"""Test HomematicipCoverShutteGroup."""
entity_id = "cover.rollos_shuttergroup"
entity_name = "Rollos ShutterGroup"
device_model = None
mock_hap = await default_mock_hap_factory.async_get_mock_hap(test_groups=["Rollos"])
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == "closed"
assert ha_state.attributes[ATTR_CURRENT_POSITION] == 0
service_call_counter = len(hmip_device.mock_calls)
await hass.services.async_call(
"cover", "open_cover", {"entity_id": entity_id}, blocking=True
)
assert len(hmip_device.mock_calls) == service_call_counter + 1
assert hmip_device.mock_calls[-1][0] == "set_shutter_level"
assert hmip_device.mock_calls[-1][1] == (0,)
await async_manipulate_test_data(hass, hmip_device, "shutterLevel", 0)
ha_state = hass.states.get(entity_id)
assert ha_state.state == STATE_OPEN
assert ha_state.attributes[ATTR_CURRENT_POSITION] == 100
await hass.services.async_call(
"cover",
"set_cover_position",
{"entity_id": entity_id, "position": "50"},
blocking=True,
)
assert len(hmip_device.mock_calls) == service_call_counter + 3
assert hmip_device.mock_calls[-1][0] == "set_shutter_level"
assert hmip_device.mock_calls[-1][1] == (0.5,)
await async_manipulate_test_data(hass, hmip_device, "shutterLevel", 0.5)
ha_state = hass.states.get(entity_id)
assert ha_state.state == STATE_OPEN
assert ha_state.attributes[ATTR_CURRENT_POSITION] == 50
await hass.services.async_call(
"cover", "close_cover", {"entity_id": entity_id}, blocking=True
)
assert len(hmip_device.mock_calls) == service_call_counter + 5
assert hmip_device.mock_calls[-1][0] == "set_shutter_level"
assert hmip_device.mock_calls[-1][1] == (1,)
await async_manipulate_test_data(hass, hmip_device, "shutterLevel", 1)
ha_state = hass.states.get(entity_id)
assert ha_state.state == STATE_CLOSED
assert ha_state.attributes[ATTR_CURRENT_POSITION] == 0
await hass.services.async_call(
"cover", "stop_cover", {"entity_id": entity_id}, blocking=True
)
assert len(hmip_device.mock_calls) == service_call_counter + 7
assert hmip_device.mock_calls[-1][0] == "set_shutter_stop"
assert hmip_device.mock_calls[-1][1] == ()
await async_manipulate_test_data(hass, hmip_device, "shutterLevel", None)
ha_state = hass.states.get(entity_id)
assert ha_state.state == STATE_UNKNOWN
async def test_hmip_cover_slats_group(hass, default_mock_hap_factory):
"""Test slats with HomematicipCoverShutteGroup."""
entity_id = "cover.rollos_shuttergroup"
entity_name = "Rollos ShutterGroup"
device_model = None
mock_hap = await default_mock_hap_factory.async_get_mock_hap(test_groups=["Rollos"])
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 1)
ha_state = hass.states.get(entity_id)
assert ha_state.state == STATE_CLOSED
assert ha_state.attributes[ATTR_CURRENT_POSITION] == 0
assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 0
service_call_counter = len(hmip_device.mock_calls)
await hass.services.async_call(
"cover",
"set_cover_position",
{"entity_id": entity_id, "position": "50"},
blocking=True,
)
await hass.services.async_call(
"cover", "open_cover_tilt", {"entity_id": entity_id}, blocking=True
)
assert len(hmip_device.mock_calls) == service_call_counter + 2
assert hmip_device.mock_calls[-1][0] == "set_slats_level"
assert hmip_device.mock_calls[-1][1] == (0,)
await async_manipulate_test_data(hass, hmip_device, "shutterLevel", 0.5)
await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 0)
ha_state = hass.states.get(entity_id)
assert ha_state.state == STATE_OPEN
assert ha_state.attributes[ATTR_CURRENT_POSITION] == 50
assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 100
await hass.services.async_call(
"cover",
"set_cover_tilt_position",
{"entity_id": entity_id, "tilt_position": "50"},
blocking=True,
)
assert len(hmip_device.mock_calls) == service_call_counter + 5
assert hmip_device.mock_calls[-1][0] == "set_slats_level"
assert hmip_device.mock_calls[-1][1] == (0.5,)
await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 0.5)
ha_state = hass.states.get(entity_id)
assert ha_state.state == STATE_OPEN
assert ha_state.attributes[ATTR_CURRENT_POSITION] == 50
assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 50
await hass.services.async_call(
"cover", "close_cover_tilt", {"entity_id": entity_id}, blocking=True
)
assert len(hmip_device.mock_calls) == service_call_counter + 7
assert hmip_device.mock_calls[-1][0] == "set_slats_level"
assert hmip_device.mock_calls[-1][1] == (1,)
await async_manipulate_test_data(hass, hmip_device, "slatsLevel", 1)
ha_state = hass.states.get(entity_id)
assert ha_state.state == STATE_OPEN
assert ha_state.attributes[ATTR_CURRENT_POSITION] == 50
assert ha_state.attributes[ATTR_CURRENT_TILT_POSITION] == 0
await hass.services.async_call(
"cover", "stop_cover_tilt", {"entity_id": entity_id}, blocking=True
)
assert len(hmip_device.mock_calls) == service_call_counter + 9
assert hmip_device.mock_calls[-1][0] == "set_shutter_stop"
assert hmip_device.mock_calls[-1][1] == ()
|
import pytest
from homeassistant.components.alarm_control_panel import DOMAIN
import homeassistant.components.automation as automation
from homeassistant.const import (
CONF_PLATFORM,
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_ARMED_NIGHT,
STATE_ALARM_DISARMED,
STATE_ALARM_TRIGGERED,
STATE_UNKNOWN,
)
from homeassistant.helpers import device_registry
from homeassistant.setup import async_setup_component
from tests.common import (
MockConfigEntry,
assert_lists_same,
async_get_device_automation_capabilities,
async_get_device_automations,
mock_device_registry,
mock_registry,
)
@pytest.fixture
def device_reg(hass):
"""Return an empty, loaded, registry."""
return mock_device_registry(hass)
@pytest.fixture
def entity_reg(hass):
"""Return an empty, loaded, registry."""
return mock_registry(hass)
async def test_get_actions(hass, device_reg, entity_reg):
"""Test we get the expected actions from a alarm_control_panel."""
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(DOMAIN, "test", "5678", device_id=device_entry.id)
hass.states.async_set(
"alarm_control_panel.test_5678", "attributes", {"supported_features": 15}
)
expected_actions = [
{
"domain": DOMAIN,
"type": "arm_away",
"device_id": device_entry.id,
"entity_id": "alarm_control_panel.test_5678",
},
{
"domain": DOMAIN,
"type": "arm_home",
"device_id": device_entry.id,
"entity_id": "alarm_control_panel.test_5678",
},
{
"domain": DOMAIN,
"type": "arm_night",
"device_id": device_entry.id,
"entity_id": "alarm_control_panel.test_5678",
},
{
"domain": DOMAIN,
"type": "disarm",
"device_id": device_entry.id,
"entity_id": "alarm_control_panel.test_5678",
},
{
"domain": DOMAIN,
"type": "trigger",
"device_id": device_entry.id,
"entity_id": "alarm_control_panel.test_5678",
},
]
actions = await async_get_device_automations(hass, "action", device_entry.id)
assert_lists_same(actions, expected_actions)
async def test_get_actions_arm_night_only(hass, device_reg, entity_reg):
"""Test we get the expected actions from a alarm_control_panel."""
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(DOMAIN, "test", "5678", device_id=device_entry.id)
hass.states.async_set(
"alarm_control_panel.test_5678", "attributes", {"supported_features": 4}
)
expected_actions = [
{
"domain": DOMAIN,
"type": "arm_night",
"device_id": device_entry.id,
"entity_id": "alarm_control_panel.test_5678",
},
{
"domain": DOMAIN,
"type": "disarm",
"device_id": device_entry.id,
"entity_id": "alarm_control_panel.test_5678",
},
]
actions = await async_get_device_automations(hass, "action", device_entry.id)
assert_lists_same(actions, expected_actions)
async def test_get_action_capabilities(hass, device_reg, entity_reg):
"""Test we get the expected capabilities from a sensor trigger."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(
DOMAIN,
"test",
platform.ENTITIES["no_arm_code"].unique_id,
device_id=device_entry.id,
)
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
expected_capabilities = {
"arm_away": {"extra_fields": []},
"arm_home": {"extra_fields": []},
"arm_night": {"extra_fields": []},
"disarm": {
"extra_fields": [{"name": "code", "optional": True, "type": "string"}]
},
"trigger": {"extra_fields": []},
}
actions = await async_get_device_automations(hass, "action", device_entry.id)
assert len(actions) == 5
for action in actions:
capabilities = await async_get_device_automation_capabilities(
hass, "action", action
)
assert capabilities == expected_capabilities[action["type"]]
async def test_get_action_capabilities_arm_code(hass, device_reg, entity_reg):
"""Test we get the expected capabilities from a sensor trigger."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(
DOMAIN,
"test",
platform.ENTITIES["arm_code"].unique_id,
device_id=device_entry.id,
)
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
expected_capabilities = {
"arm_away": {
"extra_fields": [{"name": "code", "optional": True, "type": "string"}]
},
"arm_home": {
"extra_fields": [{"name": "code", "optional": True, "type": "string"}]
},
"arm_night": {
"extra_fields": [{"name": "code", "optional": True, "type": "string"}]
},
"disarm": {
"extra_fields": [{"name": "code", "optional": True, "type": "string"}]
},
"trigger": {"extra_fields": []},
}
actions = await async_get_device_automations(hass, "action", device_entry.id)
assert len(actions) == 5
for action in actions:
capabilities = await async_get_device_automation_capabilities(
hass, "action", action
)
assert capabilities == expected_capabilities[action["type"]]
async def test_action(hass):
"""Test for turn_on and turn_off actions."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {
"platform": "event",
"event_type": "test_event_arm_away",
},
"action": {
"domain": DOMAIN,
"device_id": "abcdefgh",
"entity_id": "alarm_control_panel.alarm_no_arm_code",
"type": "arm_away",
},
},
{
"trigger": {
"platform": "event",
"event_type": "test_event_arm_home",
},
"action": {
"domain": DOMAIN,
"device_id": "abcdefgh",
"entity_id": "alarm_control_panel.alarm_no_arm_code",
"type": "arm_home",
},
},
{
"trigger": {
"platform": "event",
"event_type": "test_event_arm_night",
},
"action": {
"domain": DOMAIN,
"device_id": "abcdefgh",
"entity_id": "alarm_control_panel.alarm_no_arm_code",
"type": "arm_night",
},
},
{
"trigger": {"platform": "event", "event_type": "test_event_disarm"},
"action": {
"domain": DOMAIN,
"device_id": "abcdefgh",
"entity_id": "alarm_control_panel.alarm_no_arm_code",
"type": "disarm",
"code": "1234",
},
},
{
"trigger": {
"platform": "event",
"event_type": "test_event_trigger",
},
"action": {
"domain": DOMAIN,
"device_id": "abcdefgh",
"entity_id": "alarm_control_panel.alarm_no_arm_code",
"type": "trigger",
},
},
]
},
)
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
assert (
hass.states.get("alarm_control_panel.alarm_no_arm_code").state == STATE_UNKNOWN
)
hass.bus.async_fire("test_event_arm_away")
await hass.async_block_till_done()
assert (
hass.states.get("alarm_control_panel.alarm_no_arm_code").state
== STATE_ALARM_ARMED_AWAY
)
hass.bus.async_fire("test_event_arm_home")
await hass.async_block_till_done()
assert (
hass.states.get("alarm_control_panel.alarm_no_arm_code").state
== STATE_ALARM_ARMED_HOME
)
hass.bus.async_fire("test_event_arm_night")
await hass.async_block_till_done()
assert (
hass.states.get("alarm_control_panel.alarm_no_arm_code").state
== STATE_ALARM_ARMED_NIGHT
)
hass.bus.async_fire("test_event_disarm")
await hass.async_block_till_done()
assert (
hass.states.get("alarm_control_panel.alarm_no_arm_code").state
== STATE_ALARM_DISARMED
)
hass.bus.async_fire("test_event_trigger")
await hass.async_block_till_done()
assert (
hass.states.get("alarm_control_panel.alarm_no_arm_code").state
== STATE_ALARM_TRIGGERED
)
|
import logging
from kalliope.core.Utils import Utils
from kalliope.core.HookManager import HookManager
from kalliope.core.ConfigurationManager import SettingLoader
logging.basicConfig()
logger = logging.getLogger("kalliope")
class SettingEditor(object):
"""This Static class provides methods/functions to update properties from the Settings"""
@staticmethod
def _check_name_in_list_settings_entry(name_to_check, list_settings_entry):
"""
manage object models : STT, TRIGGERS, TTS, PLAYERS because they have "name" attributes
:param name_to_check: name to find in the list_settings_entry ~kalliope.core.Models.settings.SettingsEntry.SettingsEntry
:param list_settings_entry: the list of SettingsEntry to inspect
:return: True if the name_to_check corresponds to a name in the SettingsEntry list provided.
"""
found = False
for settings_entry in list_settings_entry:
if settings_entry.name == name_to_check:
found = True
break
return found
# Options
@staticmethod
def set_mute_status(mute=False):
"""
Define is the mute status
:param mute: Boolean. If false, Kalliope is voice is stopped
"""
logger.debug("[SettingEditor] mute. Switch trigger process to mute : %s" % mute)
settings = SettingLoader().settings
if mute:
Utils.print_info("Kalliope now muted, voice has been stopped.")
HookManager.on_mute()
else:
Utils.print_info("Kalliope now speaking.")
HookManager.on_unmute()
settings.options.mute = mute
@staticmethod
def set_deaf_status(trigger_instance, deaf=False):
"""
Define is the trigger is listening or not.
:param trigger_instance: the trigger instance coming from the order. It will be paused or unpaused.
:param deaf: Boolean. If true, kalliope is trigger is paused
"""
logger.debug("[MainController] deaf . Switch trigger process to deaf : %s" % deaf)
settings = SettingLoader().settings
if deaf:
trigger_instance.pause()
Utils.print_info("Kalliope now deaf, trigger has been paused")
HookManager.on_deaf()
else:
trigger_instance.unpause()
Utils.print_info("Kalliope now listening for trigger detection")
HookManager.on_undeaf()
settings.options.deaf = deaf
@staticmethod
def set_recognizer_multiplier(recognizer_multiplier):
"""
Set the new value of the recognizer_multiplier to the settings.
Must be an float.
:param recognizer_multiplier: new value for the recognizer_multiplier to push into the settings
"""
if isinstance(recognizer_multiplier, float):
settings = SettingLoader().settings
settings.options.recognizer_multiplier = recognizer_multiplier
@staticmethod
def set_recognizer_energy_ratio(recognizer_energy_ratio):
"""
Set a new value for the recognizer_energy_ratio;
Must be an float.
:param recognizer_energy_ratio: new value to push to the recognizer_energy_ratio in the Options settings
"""
if isinstance(recognizer_energy_ratio, float):
settings = SettingLoader().settings
settings.options.recognizer_energy_ratio = recognizer_energy_ratio
@staticmethod
def set_recognizer_recording_timeout(recognizer_recording_timeout):
"""
Set the new value of the recognizer_recording_timeoutt to the settings.
Must be an float.
:param recognizer_recording_timeout: new value for the recognizer_recording_timeout to push into the settings
"""
if isinstance(recognizer_recording_timeout, float):
settings = SettingLoader().settings
settings.options.recognizer_recording_timeout = recognizer_recording_timeout
@staticmethod
def set_recognizer_recording_timeout_with_silence(recognizer_recording_timeout_with_silence):
"""
Set the new value of the recognizer_recording_timeout_with_silence to the settings.
Must be an float.
:param recognizer_recording_timeout_with_silence: new value for the recognizer_recording_timeout_with_silence to push into the settings
"""
if isinstance(recognizer_recording_timeout_with_silence, float):
settings = SettingLoader().settings
settings.options.recognizer_recording_timeout_with_silence = recognizer_recording_timeout_with_silence
# Players
@classmethod
def set_default_player(cls, default_player_name):
"""
Set dynamically a new default_player in the settings
:param default_player_name: string value
"""
settings = SettingLoader().settings
if cls._check_name_in_list_settings_entry(default_player_name, settings.players):
settings.default_player_name = default_player_name
else:
logger.debug("[Settings] default_player %s is not defined in settings file ", default_player_name)
@staticmethod
def set_players(new_player):
"""
Add a new Player object in the list of players in the settings.
If PLayer already exists in settings, it will be updated with the new player provided values.
:param new_player: the new PLayer object to add in the settings.
"""
settings = SettingLoader().settings
list_no_duplicate_player = [player for player in settings.players if player.name != new_player.name]
list_no_duplicate_player.append(new_player)
settings.players = list_no_duplicate_player
# TTS
@classmethod
def set_default_tts(cls, default_tts_name):
"""
Set dynamically a new default_tts_name in the settings
:param default_tts_name: string value
"""
settings = SettingLoader().settings
# Verify that the default name exists in the settings list
if cls._check_name_in_list_settings_entry(default_tts_name, settings.ttss):
settings.default_tts_name = default_tts_name
else:
logger.debug("[SettingsEditor] default_tts %s is not defined in settings file ", default_tts_name)
@staticmethod
def set_ttss(new_tts):
"""
Add a new TTS object in the list of tts in the settings.
If TTS already exists in settings, it will be updated with the new tts provided values.
:param new_tts: the new TTS object to add in the settings.
"""
settings = SettingLoader().settings
list_no_duplicate_tts = [tts for tts in settings.ttss if tts.name != new_tts.name]
list_no_duplicate_tts.append(new_tts)
settings.ttss = list_no_duplicate_tts
# STT
@classmethod
def set_default_stt(cls, default_stt_name):
"""
Set dynamically a new default_stt_name in the settings if in the list of stts.
:param default_stt_name: string value
"""
settings = SettingLoader().settings
if cls._check_name_in_list_settings_entry(default_stt_name, settings.stts):
settings.default_stt_name = default_stt_name
else:
logger.debug("[Settings] default_stt %s is not defined in settings file ", default_stt_name)
@staticmethod
def set_stts(new_stt):
"""
Add or update the speak to text list defined in the settings.
:param new_stt: The new stt instance.
"""
settings = SettingLoader().settings
list_no_duplicate_stt = [stt for stt in settings.stts if stt.name != new_stt.name]
list_no_duplicate_stt.append(new_stt)
settings.stts = list_no_duplicate_stt
# TRIGGER
@classmethod
def set_default_trigger(cls, default_trigger):
"""
Set dynamically a new default_trigger in the settingss
:param default_trigger: string value
"""
settings = SettingLoader().settings
if cls._check_name_in_list_settings_entry(default_trigger, settings.triggers):
settings.default_trigger_name = default_trigger
else:
logger.debug("[Settings] default_trigger %s is not defined in settings file ", default_trigger)
@staticmethod
def set_trigger(new_trigger):
"""
Update the list of triggers with a new trigger instance.
If the trigger name already exists then it will be updated otherwise it will be added.
:param new_trigger: the new trigger instance
"""
settings = SettingLoader().settings
list_no_duplicate_triggers = [trigger for trigger in settings.triggers if trigger.name != new_trigger.name]
list_no_duplicate_triggers.append(new_trigger)
settings.triggers = list_no_duplicate_triggers
# HOOKS
@staticmethod
def set_hooks(hooks):
"""
Update the hooks dictionary defined in the settings with the new dictionary in param.
:param hooks: the dictionary containing hooks to update.
:type hooks : dict
"""
settings = SettingLoader().settings
settings.hooks.update(hooks)
# Variables
@staticmethod
def set_variables(variables):
"""
Update the settings variables dictionary.
:param variables: The dict of variables with the new values.
"""
settings = SettingLoader().settings
settings.variables.update(variables)
|
import logging
from urllib.parse import urlparse
from pyisy.configuration import Configuration
from pyisy.connection import Connection
import voluptuous as vol
from homeassistant import config_entries, core, exceptions
from homeassistant.components import ssdp
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_USERNAME
from homeassistant.core import callback
from .const import (
CONF_IGNORE_STRING,
CONF_RESTORE_LIGHT_STATE,
CONF_SENSOR_STRING,
CONF_TLS_VER,
CONF_VAR_SENSOR_STRING,
DEFAULT_IGNORE_STRING,
DEFAULT_RESTORE_LIGHT_STATE,
DEFAULT_SENSOR_STRING,
DEFAULT_TLS_VERSION,
DEFAULT_VAR_SENSOR_STRING,
ISY_URL_POSTFIX,
UDN_UUID_PREFIX,
)
from .const import DOMAIN # pylint:disable=unused-import
_LOGGER = logging.getLogger(__name__)
def _data_schema(schema_input):
"""Generate schema with defaults."""
return vol.Schema(
{
vol.Required(CONF_HOST, default=schema_input.get(CONF_HOST, "")): str,
vol.Required(CONF_USERNAME): str,
vol.Required(CONF_PASSWORD): str,
vol.Optional(CONF_TLS_VER, default=DEFAULT_TLS_VERSION): vol.In([1.1, 1.2]),
},
extra=vol.ALLOW_EXTRA,
)
async def validate_input(hass: core.HomeAssistant, data):
"""Validate the user input allows us to connect.
Data has the keys from DATA_SCHEMA with values provided by the user.
"""
user = data[CONF_USERNAME]
password = data[CONF_PASSWORD]
host = urlparse(data[CONF_HOST])
tls_version = data.get(CONF_TLS_VER)
if host.scheme == "http":
https = False
port = host.port or 80
elif host.scheme == "https":
https = True
port = host.port or 443
else:
_LOGGER.error("isy994 host value in configuration is invalid")
raise InvalidHost
# Connect to ISY controller.
isy_conf = await hass.async_add_executor_job(
_fetch_isy_configuration,
host.hostname,
port,
user,
password,
https,
tls_version,
host.path,
)
if not isy_conf or "name" not in isy_conf or not isy_conf["name"]:
raise CannotConnect
# Return info that you want to store in the config entry.
return {"title": f"{isy_conf['name']} ({host.hostname})", "uuid": isy_conf["uuid"]}
def _fetch_isy_configuration(
address, port, username, password, use_https, tls_ver, webroot
):
"""Validate and fetch the configuration from the ISY."""
try:
isy_conn = Connection(
address,
port,
username,
password,
use_https,
tls_ver,
webroot=webroot,
)
except ValueError as err:
raise InvalidAuth(err.args[0]) from err
return Configuration(xml=isy_conn.get_config())
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Universal Devices ISY994."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_PUSH
def __init__(self):
"""Initialize the isy994 config flow."""
self.discovered_conf = {}
@staticmethod
@callback
def async_get_options_flow(config_entry):
"""Get the options flow for this handler."""
return OptionsFlowHandler(config_entry)
async def async_step_user(self, user_input=None):
"""Handle the initial step."""
errors = {}
info = None
if user_input is not None:
try:
info = await validate_input(self.hass, user_input)
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidHost:
errors["base"] = "invalid_host"
except InvalidAuth:
errors["base"] = "invalid_auth"
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
if not errors:
await self.async_set_unique_id(info["uuid"], raise_on_progress=False)
self._abort_if_unique_id_configured()
return self.async_create_entry(title=info["title"], data=user_input)
return self.async_show_form(
step_id="user",
data_schema=_data_schema(self.discovered_conf),
errors=errors,
)
async def async_step_import(self, user_input):
"""Handle import."""
return await self.async_step_user(user_input)
async def async_step_ssdp(self, discovery_info):
"""Handle a discovered isy994."""
friendly_name = discovery_info[ssdp.ATTR_UPNP_FRIENDLY_NAME]
url = discovery_info[ssdp.ATTR_SSDP_LOCATION]
mac = discovery_info[ssdp.ATTR_UPNP_UDN]
if mac.startswith(UDN_UUID_PREFIX):
mac = mac[len(UDN_UUID_PREFIX) :]
if url.endswith(ISY_URL_POSTFIX):
url = url[: -len(ISY_URL_POSTFIX)]
await self.async_set_unique_id(mac)
self._abort_if_unique_id_configured()
self.discovered_conf = {
CONF_NAME: friendly_name,
CONF_HOST: url,
}
# pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167
self.context["title_placeholders"] = self.discovered_conf
return await self.async_step_user()
class OptionsFlowHandler(config_entries.OptionsFlow):
"""Handle a option flow for isy994."""
def __init__(self, config_entry: config_entries.ConfigEntry):
"""Initialize options flow."""
self.config_entry = config_entry
async def async_step_init(self, user_input=None):
"""Handle options flow."""
if user_input is not None:
return self.async_create_entry(title="", data=user_input)
options = self.config_entry.options
restore_light_state = options.get(
CONF_RESTORE_LIGHT_STATE, DEFAULT_RESTORE_LIGHT_STATE
)
ignore_string = options.get(CONF_IGNORE_STRING, DEFAULT_IGNORE_STRING)
sensor_string = options.get(CONF_SENSOR_STRING, DEFAULT_SENSOR_STRING)
var_sensor_string = options.get(
CONF_VAR_SENSOR_STRING, DEFAULT_VAR_SENSOR_STRING
)
options_schema = vol.Schema(
{
vol.Optional(CONF_IGNORE_STRING, default=ignore_string): str,
vol.Optional(CONF_SENSOR_STRING, default=sensor_string): str,
vol.Optional(CONF_VAR_SENSOR_STRING, default=var_sensor_string): str,
vol.Required(
CONF_RESTORE_LIGHT_STATE, default=restore_light_state
): bool,
}
)
return self.async_show_form(step_id="init", data_schema=options_schema)
class InvalidHost(exceptions.HomeAssistantError):
"""Error to indicate the host value is invalid."""
class CannotConnect(exceptions.HomeAssistantError):
"""Error to indicate we cannot connect."""
class InvalidAuth(exceptions.HomeAssistantError):
"""Error to indicate there is invalid auth."""
|
import numpy as np
from scipy import linalg
from ..defaults import _handle_default
from ..io.pick import _picks_to_idx, _picks_by_type, pick_info
from ..utils import verbose, _apply_scaling_array
def _yule_walker(X, order=1):
"""Compute Yule-Walker (adapted from statsmodels).
Operates in-place.
"""
assert X.ndim == 2
denom = X.shape[-1] - np.arange(order + 1)
r = np.zeros(order + 1, np.float64)
for di, d in enumerate(X):
d -= d.mean()
r[0] += np.dot(d, d)
for k in range(1, order + 1):
r[k] += np.dot(d[0:-k], d[k:])
r /= denom * len(X)
rho = linalg.solve(linalg.toeplitz(r[:-1]), r[1:])
sigmasq = r[0] - (r[1:] * rho).sum()
return rho, np.sqrt(sigmasq)
@verbose
def fit_iir_model_raw(raw, order=2, picks=None, tmin=None, tmax=None,
verbose=None):
r"""Fit an AR model to raw data and creates the corresponding IIR filter.
The computed filter is fitted to data from all of the picked channels,
with frequency response given by the standard IIR formula:
.. math::
H(e^{jw}) = \frac{1}{a[0] + a[1]e^{-jw} + ... + a[n]e^{-jnw}}
Parameters
----------
raw : Raw object
An instance of Raw.
order : int
Order of the FIR filter.
%(picks_good_data)s
tmin : float
The beginning of time interval in seconds.
tmax : float
The end of time interval in seconds.
%(verbose)s
Returns
-------
b : ndarray
Numerator filter coefficients.
a : ndarray
Denominator filter coefficients.
"""
start, stop = None, None
if tmin is not None:
start = raw.time_as_index(tmin)[0]
if tmax is not None:
stop = raw.time_as_index(tmax)[0] + 1
picks = _picks_to_idx(raw.info, picks)
data = raw[picks, start:stop][0]
# rescale data to similar levels
picks_list = _picks_by_type(pick_info(raw.info, picks))
scalings = _handle_default('scalings_cov_rank', None)
_apply_scaling_array(data, picks_list=picks_list, scalings=scalings)
# do the fitting
coeffs, _ = _yule_walker(data, order=order)
return np.array([1.]), np.concatenate(([1.], -coeffs))
|
from weblate.trans.models import Variant
from weblate.trans.tests.test_views import ViewTestCase
class VariantTest(ViewTestCase):
def create_component(self):
return self.create_android()
def add_variants(self, suffix: str = ""):
request = self.get_request()
translation = self.component.source_translation
translation.add_units(request, {f"bar{suffix}": "Default string"})
translation.add_units(request, {"barMin": "Min string"})
translation.add_units(request, {"barShort": "Short string"})
def test_edit_component(self, suffix: str = ""):
self.add_variants()
self.assertEqual(Variant.objects.count(), 0)
self.component.variant_regex = "(Min|Short|Max)$"
self.component.save()
self.assertEqual(Variant.objects.count(), 1)
self.assertEqual(Variant.objects.get().unit_set.count(), 6)
self.component.variant_regex = ""
self.component.save()
self.assertEqual(Variant.objects.count(), 0)
def test_add_units(self, suffix: str = ""):
self.component.variant_regex = "(Min|Short|Max)$"
self.component.save()
self.assertEqual(Variant.objects.count(), 0)
self.add_variants(suffix)
self.assertEqual(Variant.objects.count(), 1)
self.assertEqual(Variant.objects.get().unit_set.count(), 6)
def test_edit_component_suffix(self):
self.test_edit_component("Max")
def test_add_units_suffix(self):
self.test_add_units("Max")
def test_variants_inner(self):
self.component.variant_regex = (
"//(SCRTEXT_S|SCRTEXT_M|SCRTEXT_L|REPTEXT|DDTEXT)"
)
self.component.save()
units = (
"DTEL///ABSD/DE_INTEND_POSTBACKGR//SCRTEXT_M 00001",
"DTEL///ABSD/DE_INTEND_POSTBACKGR//REPTEXT 00001",
"DTEL///ABSD/DE_INTEND_POSTBACKGR//SCRTEXT_L 00001",
"DTEL///ABSD/DE_INTEND_POSTBACKGR//SCRTEXT_S 00001",
"DTEL///ABSD/DE_INTEND_POSTBACKGR//DDTEXT 00001",
)
request = self.get_request()
translation = self.component.source_translation
translation.add_units(request, {key: "Test string" for key in units})
self.assertEqual(Variant.objects.count(), 1)
self.assertEqual(Variant.objects.get().unit_set.count(), 10)
|
from __future__ import unicode_literals
from core.EXTEND import wordsharker
from lib.data.data import pyoptions
from lib.fun.fun import unique
def simplejoin(first, second):
ff = ss = []
if type(first) is list:
ff.extend(first)
else:
ff.append(first)
if type(second) is list:
ff.extend(second)
else:
ff.append(second)
for f in ff:
for s in ss:
yield f + s
def middlejoin(firstlist, secondlist, midstr):
for f in firstlist:
for s in secondlist:
yield f + midstr + s
def headjoins(firstlist, secondlist, head):
for f in firstlist:
for s in secondlist:
yield head + f + s
def tailjoins(firstlist, secondlist, tail):
for f in firstlist:
for s in secondlist:
yield f + s + tail
def numjoinum(num1, num2):
yield num1 + num2
if pyoptions.level <= 1:
yield num1.replace('0', '') + num2
yield num1 + num2.replace('0', '')
yield num1.replace('0', '') + num2.replace('0', '')
for mid in pyoptions.sedb_trick_mid:
yield num1 + mid + num2
yield num2 + mid + num1
if pyoptions.level <= 1:
yield num1.replace('0', '') + mid + num2
yield num1 + mid + num2.replace('0', '')
yield num1.replace('0', '') + mid + num2.replace('0', '')
yield num2.replace('0', '') + mid + num1
yield num2 + mid + num1.replace('0', '')
yield num2.replace('0', '') + mid + num1.replace('0', '')
for suf in pyoptions.sedb_trick_suf:
yield num1 + num2 + suf
yield num2 + num1 + suf
if pyoptions.level <= 1:
yield num1.replace('0', '') + num2 + suf
yield num1 + num2.replace('0', '') + suf
yield num1.replace('0', '') + num2.replace('0', '') + suf
yield num2.replace('0', '') + num1 + suf
yield num2 + num1.replace('0', '') + suf
yield num2.replace('0', '') + num1.replace('0', '') + suf
for pre in pyoptions.sedb_trick_pre:
yield pre + num1 + num2
yield pre + num2 + num1
if pyoptions.level <= 1:
yield pre + num1.replace('0', '') + num2
yield pre + num1 + num2.replace('0', '')
yield pre + num1.replace('0', '') + num2.replace('0', '')
yield pre + num2.replace('0', '') + num1
yield pre + num2 + num1.replace('0', '')
yield pre + num2.replace('0', '') + num1.replace('0', '')
def strnumjoin(str1, num1):
yield str1 + num1
if pyoptions.level <= 1:
yield str1 + num1.replace('0', '')
for mid in pyoptions.sedb_trick_mid:
yield num1 + mid + str1
yield str1 + mid + num1
if pyoptions.level <= 1:
yield num1.replace('0', '') + mid + str1
yield str1 + mid + num1.replace('0', '')
for suf in pyoptions.sedb_trick_suf:
yield num1 + str1 + suf
yield str1 + num1 + suf
if pyoptions.level <= 1:
yield num1.replace('0', '') + str1 + suf
yield str1 + num1.replace('0', '') + suf
for pre in pyoptions.sedb_trick_pre:
yield pre + num1 + str1
yield pre + str1 + num1
if pyoptions.level <= 1:
yield pre + num1.replace('0', '') + str1
yield pre + str1 + num1.replace('0', '')
def mailshaper(mail):
shapes = []
part = mail.partition('@')
shapes.append(mail)
if part[2]:
shapes.append(part[0])
shapes.append(part[2])
shapes.append(part[0] + part[1])
shapes.append(part[1] + part[2])
return shapes
# ymd format: yyyyMMdd dmy format: ddMMyyyy
def dateshaper(date):
shapes = []
# 20150806 or 06082015
shapes.append(date)
if pyoptions.ymd_format:
# 150806
shapes.append(date[2:8])
# 201586
shapes.append(date[0:4] + date[4].replace("0", "") + date[5:6] + date[6].replace("0", "") + date[7:8])
if pyoptions.level <= 1:
# 15086
shapes.append(date[2:6] + date[6].replace("0", "") + date[7:8])
# 15806
shapes.append(date[2:4] + date[4].replace("0", "") + date[5:8])
# 2015086
shapes.append(date[0:6] + date[6].replace("0", "") + date[7:8])
# 2015806
shapes.append(date[0:4] + date[4].replace("0", "") + date[5:8])
# 086
shapes.append(date[4:6] + date[6].replace("0", "") + date[7:8])
if pyoptions.level <= 2:
# 806
shapes.append(date[4].replace("0", "") + date[5:8])
# 86
shapes.append(date[4].replace("0", "") + date[5:6] + date[6].replace("0", "") + date[7:8])
if pyoptions.level <= 3:
# 2015
shapes.append(date[0:4])
# 0806
shapes.append(date[4:8])
# 1586
shapes.append(date[2:4] + date[4].replace("0", "") + date[5:6] + date[6].replace("0", "") + date[7:8])
else:
# 20150806
shapes.append(date[4:8] + date[2:4] + date[0:2])
# 060815
shapes.append(date[0:4] + date[6:8])
# 682015
shapes.append(date[0].replace("0", "") + date[1] + date[2].replace("0", "") + date[3:8])
if pyoptions.level <= 3:
# 0608
shapes.append(date[0:4])
# 2015
shapes.append(date[4:8])
# 6815
shapes.append(date[0].replace("0", "") + date[1] + date[2].replace("0", "") + date[3] + date[6:8])
# 20150608
shapes.append(date[4:8] + date[0:4])
return shapes
def wordshaper(word, *args):
shapes = []
if not args:
shapes.extend(wordsharker(word, pyoptions.sedb_leet))
else:
if not type(word) is list:
shapes.extend(wordsharker(word, pyoptions.sedb_leet))
else:
for w in word:
shapes.extend(wordsharker(w, pyoptions.sedb_leet))
for arg in args:
if not type(arg) is list:
shapes.extend(wordsharker(arg, pyoptions.sedb_leet))
else:
for a in arg:
shapes.extend(wordsharker(a, pyoptions.sedb_leet))
return unique(shapes)
|
import os
import sys
import argparse
_stash = globals()['_stash']
def main(args):
ap = argparse.ArgumentParser()
ap.add_argument(
'-n',
nargs='?',
metavar='number',
type=int,
help='maximum number of arguments taken from standard input for each invocation of utility'
)
ap.add_argument('-I', dest='replstr', nargs='?', help='replacement string')
ap.add_argument('utility', nargs='?', default='echo', help='utility to invoke')
ap.add_argument('args_to_pass', metavar='arguments', nargs=argparse.REMAINDER, help='arguments to the utility')
ns = ap.parse_args(args)
lines = [line.strip() for line in sys.stdin.readlines()]
n = ns.n if ns.n else len(lines)
if ns.replstr:
n = 1
while lines:
rest = ' '.join(lines[:n])
lines = lines[n:]
args_to_pass = ' '.join(ns.args_to_pass)
if rest.strip():
if ns.replstr:
args_to_pass = args_to_pass.replace(ns.replstr, rest)
rest = ''
cmdline = '%s %s %s' % (ns.utility, args_to_pass, rest)
_stash(cmdline)
if __name__ == "__main__":
main(sys.argv[1:])
|
import copy
import sys
from flask import current_app
from flask_script import Manager
from lemur.authorities.service import get as get_authority
from lemur.notifications.messaging import send_pending_failure_notification
from lemur.pending_certificates import service as pending_certificate_service
from lemur.plugins.base import plugins
manager = Manager(usage="Handles pending certificate related tasks.")
@manager.option(
"-i", dest="ids", action="append", help="IDs of pending certificates to fetch"
)
def fetch(ids):
"""
Attempt to get full certificate for each pending certificate listed.
Args:
ids: a list of ids of PendingCertificates (passed in by manager options when run as CLI)
`python manager.py pending_certs fetch -i 123 321 all`
"""
pending_certs = pending_certificate_service.get_pending_certs(ids)
new = 0
failed = 0
for cert in pending_certs:
authority = plugins.get(cert.authority.plugin_name)
real_cert = authority.get_ordered_certificate(cert)
if real_cert:
# If a real certificate was returned from issuer, then create it in Lemur and mark
# the pending certificate as resolved
final_cert = pending_certificate_service.create_certificate(
cert, real_cert, cert.user
)
pending_certificate_service.update(cert.id, resolved_cert_id=final_cert.id)
pending_certificate_service.update(cert.id, resolved=True)
# add metrics to metrics extension
new += 1
else:
pending_certificate_service.increment_attempt(cert)
failed += 1
print(
"[+] Certificates: New: {new} Failed: {failed}".format(new=new, failed=failed)
)
@manager.command
def fetch_all_acme():
"""
Attempt to get full certificates for each pending certificate listed with the acme-issuer. This is more efficient
for acme-issued certificates because it will configure all of the DNS challenges prior to resolving any
certificates.
"""
log_data = {"function": "{}.{}".format(__name__, sys._getframe().f_code.co_name)}
pending_certs = pending_certificate_service.get_unresolved_pending_certs()
new = 0
failed = 0
wrong_issuer = 0
acme_certs = []
# We only care about certs using the acme-issuer plugin
for cert in pending_certs:
cert_authority = get_authority(cert.authority_id)
if cert_authority.plugin_name == "acme-issuer":
acme_certs.append(cert)
else:
wrong_issuer += 1
authority = plugins.get("acme-issuer")
resolved_certs = authority.get_ordered_certificates(acme_certs)
for cert in resolved_certs:
real_cert = cert.get("cert")
# It's necessary to reload the pending cert due to detached instance: http://sqlalche.me/e/bhk3
pending_cert = pending_certificate_service.get(cert.get("pending_cert").id)
if real_cert:
# If a real certificate was returned from issuer, then create it in Lemur and mark
# the pending certificate as resolved
final_cert = pending_certificate_service.create_certificate(
pending_cert, real_cert, pending_cert.user
)
pending_certificate_service.update(
pending_cert.id, resolved_cert_id=final_cert.id
)
pending_certificate_service.update(pending_cert.id, resolved=True)
# add metrics to metrics extension
new += 1
else:
failed += 1
error_log = copy.deepcopy(log_data)
error_log["message"] = "Pending certificate creation failure"
error_log["pending_cert_id"] = pending_cert.id
error_log["last_error"] = cert.get("last_error")
error_log["cn"] = pending_cert.cn
if pending_cert.number_attempts > 4:
error_log["message"] = "Marking pending certificate as resolved"
send_pending_failure_notification(
pending_cert, notify_owner=pending_cert.notify
)
# Mark "resolved" as True
pending_certificate_service.update(cert.id, resolved=True)
else:
pending_certificate_service.increment_attempt(pending_cert)
pending_certificate_service.update(
cert.get("pending_cert").id, status=str(cert.get("last_error"))
)
current_app.logger.error(error_log)
log_data["message"] = "Complete"
log_data["new"] = new
log_data["failed"] = failed
log_data["wrong_issuer"] = wrong_issuer
current_app.logger.debug(log_data)
print(
"[+] Certificates: New: {new} Failed: {failed} Not using ACME: {wrong_issuer}".format(
new=new, failed=failed, wrong_issuer=wrong_issuer
)
)
|
import unittest
from absl import flags
import mock
from perfkitbenchmarker import benchmark_spec
from perfkitbenchmarker import configs
from perfkitbenchmarker import errors
from perfkitbenchmarker import linux_benchmarks
from perfkitbenchmarker.configs import benchmark_config_spec
from perfkitbenchmarker.providers.openstack import os_virtual_machine
from tests import pkb_common_test_case
FLAGS = flags.FLAGS
_BENCHMARK_NAME = 'iperf'
_URI = 'uri45678'
_CFG_DEFAULT_DEFAULT = """
iperf:
vm_groups:
vm_1:
cloud: OpenStack
vm_spec:
OpenStack:
image: test-image
machine_type: test_machine_type
disable_interrupt_moderation: False
"""
_network_true = {'router:external': True}
_network_external = {'router:external': 'External'}
_network_fail = {'router:external': 'Fail'}
class TestOpenStackVirtualMachine(pkb_common_test_case.TestOsMixin,
os_virtual_machine.OpenStackVirtualMachine):
pass
class BaseOpenStackNetworkTest(pkb_common_test_case.PkbCommonTestCase):
def _CreateBenchmarkSpecFromYaml(self, yaml_string,
benchmark_name=_BENCHMARK_NAME):
config = configs.LoadConfig(yaml_string, {}, benchmark_name)
spec = self._CreateBenchmarkSpecFromConfigDict(config, benchmark_name)
spec.disable_interrupt_moderation = False
spec.disable_rss = False
spec.zone = 'test-zone'
spec.cidr = '192.164.1.0/24'
spec.machine_type = 'Test_machine_type'
spec.gpu_count = '1'
spec.gpu_type = 'test-gpu-type'
spec.image = 'test-image'
spec.install_packages = 'None'
spec.background_cpu_threads = 'None'
spec.background_network_mbits_per_sec = '1'
spec.background_network_ip_type = 'None'
spec.vm_metadata = {}
return spec
def _CreateBenchmarkSpecFromConfigDict(self, config_dict, benchmark_name):
config_spec = benchmark_config_spec.BenchmarkConfigSpec(
benchmark_name,
flag_values=FLAGS,
**config_dict)
benchmark_module = next((b for b in linux_benchmarks.BENCHMARKS
if b.BENCHMARK_NAME == benchmark_name))
return benchmark_spec.BenchmarkSpec(benchmark_module, config_spec, _URI)
def _CreateTestOpenStackVm(self):
spec = self._CreateBenchmarkSpecFromYaml(_CFG_DEFAULT_DEFAULT)
return TestOpenStackVirtualMachine(spec)
class OpenStackVirtualMachineTest(BaseOpenStackNetworkTest):
def setUp(self):
super(OpenStackVirtualMachineTest, self).setUp()
self.mock_check_network_exists = self.enter_context(mock.patch.object(
os_virtual_machine.OpenStackVirtualMachine,
'_CheckNetworkExists'))
FLAGS.ignore_package_requirements = True
self.openstack_vm = self._CreateTestOpenStackVm()
def test_CheckFloatingIPNetworkExistsWithTrue(self):
self.mock_check_network_exists.return_value = _network_true
network = self.openstack_vm._CheckFloatingIPNetworkExists('External')
self.assertEqual(_network_true, network)
def test_CheckFloatingIPNetworkExistsWithExternal(self):
self.mock_check_network_exists.return_value = _network_external
network = self.openstack_vm._CheckFloatingIPNetworkExists('External')
self.assertEqual(_network_external, network)
def test_CheckFloatingIPNetworkExistsWithFail(self):
self.mock_check_network_exists.return_value = _network_fail
with self.assertRaises(errors.Config.InvalidValue):
self.openstack_vm._CheckFloatingIPNetworkExists('External')
if __name__ == '__main__':
unittest.main()
|
import sys
import time
from tabulate import tabulate
from flask_script import Manager
from flask import current_app
from lemur.constants import SUCCESS_METRIC_STATUS, FAILURE_METRIC_STATUS
from lemur.extensions import metrics, sentry
from lemur.plugins.base import plugins
from lemur.sources import service as source_service
from lemur.users import service as user_service
from lemur.certificates import service as certificate_service
manager = Manager(usage="Handles all source related tasks.")
def validate_sources(source_strings):
sources = []
if not source_strings:
table = []
for source in source_service.get_all():
table.append([source.label, source.active, source.description])
print("No source specified choose from below:")
print(tabulate(table, headers=["Label", "Active", "Description"]))
sys.exit(1)
if "all" in source_strings:
sources = source_service.get_all()
else:
for source_str in source_strings:
source = source_service.get_by_label(source_str)
if not source:
print(
"Unable to find specified source with label: {0}".format(source_str)
)
sys.exit(1)
sources.append(source)
return sources
def execute_clean(plugin, certificate, source):
try:
plugin.clean(certificate, source.options)
certificate.sources.remove(source)
# If we want to remove the source from the certificate, we also need to clear any equivalent destinations to
# prevent Lemur from re-uploading the certificate.
for destination in certificate.destinations:
if destination.label == source.label:
certificate.destinations.remove(destination)
certificate_service.database.update(certificate)
return SUCCESS_METRIC_STATUS
except Exception as e:
current_app.logger.exception(e)
sentry.captureException()
@manager.option(
"-s",
"--sources",
dest="source_strings",
action="append",
help="Sources to operate on.",
)
def sync(source_strings):
sources = validate_sources(source_strings)
for source in sources:
status = FAILURE_METRIC_STATUS
start_time = time.time()
print("[+] Staring to sync source: {label}!\n".format(label=source.label))
user = user_service.get_by_username("lemur")
try:
data = source_service.sync(source, user)
print(
"[+] Certificates: New: {new} Updated: {updated}".format(
new=data["certificates"][0], updated=data["certificates"][1]
)
)
print(
"[+] Endpoints: New: {new} Updated: {updated}".format(
new=data["endpoints"][0], updated=data["endpoints"][1]
)
)
print(
"[+] Finished syncing source: {label}. Run Time: {time}".format(
label=source.label, time=(time.time() - start_time)
)
)
status = SUCCESS_METRIC_STATUS
except Exception as e:
current_app.logger.exception(e)
print("[X] Failed syncing source {label}!\n".format(label=source.label))
sentry.captureException()
metrics.send(
"source_sync_fail",
"counter",
1,
metric_tags={"source": source.label, "status": status},
)
metrics.send(
"source_sync",
"counter",
1,
metric_tags={"source": source.label, "status": status},
)
@manager.option(
"-s",
"--sources",
dest="source_strings",
action="append",
help="Sources to operate on.",
)
@manager.option(
"-c",
"--commit",
dest="commit",
action="store_true",
default=False,
help="Persist changes.",
)
def clean(source_strings, commit):
sources = validate_sources(source_strings)
for source in sources:
s = plugins.get(source.plugin_name)
if not hasattr(s, "clean"):
info_text = f"Cannot clean source: {source.label}, source plugin does not implement 'clean()'"
current_app.logger.warning(info_text)
print(info_text)
continue
start_time = time.time()
print("[+] Staring to clean source: {label}!\n".format(label=source.label))
cleaned = 0
certificates = certificate_service.get_all_pending_cleaning_expired(source)
for certificate in certificates:
status = FAILURE_METRIC_STATUS
if commit:
status = execute_clean(s, certificate, source)
metrics.send(
"certificate_clean",
"counter",
1,
metric_tags={"status": status, "source": source.label, "certificate": certificate.name},
)
current_app.logger.warning(f"Removed {certificate.name} from source {source.label} during cleaning")
cleaned += 1
info_text = f"[+] Finished cleaning source: {source.label}. " \
f"Removed {cleaned} certificates from source. " \
f"Run Time: {(time.time() - start_time)}\n"
print(info_text)
current_app.logger.warning(info_text)
@manager.option(
"-s",
"--sources",
dest="source_strings",
action="append",
help="Sources to operate on.",
)
@manager.option(
"-d",
"--days",
dest="days_to_expire",
type=int,
action="store",
required=True,
help="The expiry range within days.",
)
@manager.option(
"-c",
"--commit",
dest="commit",
action="store_true",
default=False,
help="Persist changes.",
)
def clean_unused_and_expiring_within_days(source_strings, days_to_expire, commit):
sources = validate_sources(source_strings)
for source in sources:
s = plugins.get(source.plugin_name)
if not hasattr(s, "clean"):
info_text = f"Cannot clean source: {source.label}, source plugin does not implement 'clean()'"
current_app.logger.warning(info_text)
print(info_text)
continue
start_time = time.time()
print("[+] Staring to clean source: {label}!\n".format(label=source.label))
cleaned = 0
certificates = certificate_service.get_all_pending_cleaning_expiring_in_days(source, days_to_expire)
for certificate in certificates:
status = FAILURE_METRIC_STATUS
if commit:
status = execute_clean(s, certificate, source)
metrics.send(
"certificate_clean",
"counter",
1,
metric_tags={"status": status, "source": source.label, "certificate": certificate.name},
)
current_app.logger.warning(f"Removed {certificate.name} from source {source.label} during cleaning")
cleaned += 1
info_text = f"[+] Finished cleaning source: {source.label}. " \
f"Removed {cleaned} certificates from source. " \
f"Run Time: {(time.time() - start_time)}\n"
print(info_text)
current_app.logger.warning(info_text)
@manager.option(
"-s",
"--sources",
dest="source_strings",
action="append",
help="Sources to operate on.",
)
@manager.option(
"-d",
"--days",
dest="days_since_issuance",
type=int,
action="store",
required=True,
help="Days since issuance.",
)
@manager.option(
"-c",
"--commit",
dest="commit",
action="store_true",
default=False,
help="Persist changes.",
)
def clean_unused_and_issued_since_days(source_strings, days_since_issuance, commit):
sources = validate_sources(source_strings)
for source in sources:
s = plugins.get(source.plugin_name)
if not hasattr(s, "clean"):
info_text = f"Cannot clean source: {source.label}, source plugin does not implement 'clean()'"
current_app.logger.warning(info_text)
print(info_text)
continue
start_time = time.time()
print("[+] Staring to clean source: {label}!\n".format(label=source.label))
cleaned = 0
certificates = certificate_service.get_all_pending_cleaning_issued_since_days(source, days_since_issuance)
for certificate in certificates:
status = FAILURE_METRIC_STATUS
if commit:
status = execute_clean(s, certificate, source)
metrics.send(
"certificate_clean",
"counter",
1,
metric_tags={"status": status, "source": source.label, "certificate": certificate.name},
)
current_app.logger.warning(f"Removed {certificate.name} from source {source.label} during cleaning")
cleaned += 1
info_text = f"[+] Finished cleaning source: {source.label}. " \
f"Removed {cleaned} certificates from source. " \
f"Run Time: {(time.time() - start_time)}\n"
print(info_text)
current_app.logger.warning(info_text)
|
from typing import Any, Dict
import voluptuous as vol
from homeassistant.core import HomeAssistant
from . import (
MULTI_FACTOR_AUTH_MODULE_SCHEMA,
MULTI_FACTOR_AUTH_MODULES,
MultiFactorAuthModule,
SetupFlow,
)
CONFIG_SCHEMA = MULTI_FACTOR_AUTH_MODULE_SCHEMA.extend(
{
vol.Required("data"): [
vol.Schema({vol.Required("user_id"): str, vol.Required("pin"): str})
]
},
extra=vol.PREVENT_EXTRA,
)
@MULTI_FACTOR_AUTH_MODULES.register("insecure_example")
class InsecureExampleModule(MultiFactorAuthModule):
"""Example auth module validate pin."""
DEFAULT_TITLE = "Insecure Personal Identify Number"
def __init__(self, hass: HomeAssistant, config: Dict[str, Any]) -> None:
"""Initialize the user data store."""
super().__init__(hass, config)
self._data = config["data"]
@property
def input_schema(self) -> vol.Schema:
"""Validate login flow input data."""
return vol.Schema({"pin": str})
@property
def setup_schema(self) -> vol.Schema:
"""Validate async_setup_user input data."""
return vol.Schema({"pin": str})
async def async_setup_flow(self, user_id: str) -> SetupFlow:
"""Return a data entry flow handler for setup module.
Mfa module should extend SetupFlow
"""
return SetupFlow(self, self.setup_schema, user_id)
async def async_setup_user(self, user_id: str, setup_data: Any) -> Any:
"""Set up user to use mfa module."""
# data shall has been validate in caller
pin = setup_data["pin"]
for data in self._data:
if data["user_id"] == user_id:
# already setup, override
data["pin"] = pin
return
self._data.append({"user_id": user_id, "pin": pin})
async def async_depose_user(self, user_id: str) -> None:
"""Remove user from mfa module."""
found = None
for data in self._data:
if data["user_id"] == user_id:
found = data
break
if found:
self._data.remove(found)
async def async_is_user_setup(self, user_id: str) -> bool:
"""Return whether user is setup."""
for data in self._data:
if data["user_id"] == user_id:
return True
return False
async def async_validate(self, user_id: str, user_input: Dict[str, Any]) -> bool:
"""Return True if validation passed."""
for data in self._data:
if data["user_id"] == user_id:
# user_input has been validate in caller
if data["pin"] == user_input["pin"]:
return True
return False
|
import logging
import sharp_aquos_rc
import voluptuous as vol
from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity
from homeassistant.components.media_player.const import (
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_SELECT_SOURCE,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
SUPPORT_VOLUME_STEP,
)
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
CONF_TIMEOUT,
CONF_USERNAME,
STATE_OFF,
STATE_ON,
)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "Sharp Aquos TV"
DEFAULT_PORT = 10002
DEFAULT_USERNAME = "admin"
DEFAULT_PASSWORD = "password"
DEFAULT_TIMEOUT = 0.5
DEFAULT_RETRIES = 2
SUPPORT_SHARPTV = (
SUPPORT_TURN_OFF
| SUPPORT_NEXT_TRACK
| SUPPORT_PAUSE
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_SELECT_SOURCE
| SUPPORT_VOLUME_MUTE
| SUPPORT_VOLUME_STEP
| SUPPORT_VOLUME_SET
| SUPPORT_PLAY
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_USERNAME, default=DEFAULT_USERNAME): cv.string,
vol.Optional(CONF_PASSWORD, default=DEFAULT_PASSWORD): cv.string,
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.string,
vol.Optional("retries", default=DEFAULT_RETRIES): cv.string,
vol.Optional("power_on_enabled", default=False): cv.boolean,
}
)
SOURCES = {
0: "TV / Antenna",
1: "HDMI_IN_1",
2: "HDMI_IN_2",
3: "HDMI_IN_3",
4: "HDMI_IN_4",
5: "COMPONENT IN",
6: "VIDEO_IN_1",
7: "VIDEO_IN_2",
8: "PC_IN",
}
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Sharp Aquos TV platform."""
name = config[CONF_NAME]
port = config[CONF_PORT]
username = config[CONF_USERNAME]
password = config[CONF_PASSWORD]
power_on_enabled = config["power_on_enabled"]
if discovery_info:
_LOGGER.debug("%s", discovery_info)
vals = discovery_info.split(":")
if len(vals) > 1:
port = vals[1]
host = vals[0]
remote = sharp_aquos_rc.TV(host, port, username, password, timeout=20)
add_entities([SharpAquosTVDevice(name, remote, power_on_enabled)])
return True
host = config[CONF_HOST]
remote = sharp_aquos_rc.TV(host, port, username, password, 15, 1)
add_entities([SharpAquosTVDevice(name, remote, power_on_enabled)])
return True
def _retry(func):
"""Handle query retries."""
def wrapper(obj, *args, **kwargs):
"""Wrap all query functions."""
update_retries = 5
while update_retries > 0:
try:
func(obj, *args, **kwargs)
break
except (OSError, TypeError, ValueError):
update_retries -= 1
if update_retries == 0:
obj.set_state(STATE_OFF)
return wrapper
class SharpAquosTVDevice(MediaPlayerEntity):
"""Representation of a Aquos TV."""
def __init__(self, name, remote, power_on_enabled=False):
"""Initialize the aquos device."""
self._supported_features = SUPPORT_SHARPTV
self._power_on_enabled = power_on_enabled
if self._power_on_enabled:
self._supported_features |= SUPPORT_TURN_ON
# Save a reference to the imported class
self._name = name
# Assume that the TV is not muted
self._muted = False
self._state = None
self._remote = remote
self._volume = 0
self._source = None
self._source_list = list(SOURCES.values())
def set_state(self, state):
"""Set TV state."""
self._state = state
@_retry
def update(self):
"""Retrieve the latest data."""
if self._remote.power() == 1:
self._state = STATE_ON
else:
self._state = STATE_OFF
# Set TV to be able to remotely power on
if self._power_on_enabled:
self._remote.power_on_command_settings(2)
else:
self._remote.power_on_command_settings(0)
# Get mute state
if self._remote.mute() == 2:
self._muted = False
else:
self._muted = True
# Get source
self._source = SOURCES.get(self._remote.input())
# Get volume
self._volume = self._remote.volume() / 60
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def source(self):
"""Return the current source."""
return self._source
@property
def source_list(self):
"""Return the source list."""
return self._source_list
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self._volume
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self._muted
@property
def supported_features(self):
"""Flag media player features that are supported."""
return self._supported_features
@_retry
def turn_off(self):
"""Turn off tvplayer."""
self._remote.power(0)
@_retry
def volume_up(self):
"""Volume up the media player."""
self._remote.volume(int(self._volume * 60) + 2)
@_retry
def volume_down(self):
"""Volume down media player."""
self._remote.volume(int(self._volume * 60) - 2)
@_retry
def set_volume_level(self, volume):
"""Set Volume media player."""
self._remote.volume(int(volume * 60))
@_retry
def mute_volume(self, mute):
"""Send mute command."""
self._remote.mute(0)
@_retry
def turn_on(self):
"""Turn the media player on."""
self._remote.power(1)
@_retry
def media_play_pause(self):
"""Simulate play pause media player."""
self._remote.remote_button(40)
@_retry
def media_play(self):
"""Send play command."""
self._remote.remote_button(16)
@_retry
def media_pause(self):
"""Send pause command."""
self._remote.remote_button(16)
@_retry
def media_next_track(self):
"""Send next track command."""
self._remote.remote_button(21)
@_retry
def media_previous_track(self):
"""Send the previous track command."""
self._remote.remote_button(19)
def select_source(self, source):
"""Set the input source."""
for key, value in SOURCES.items():
if source == value:
self._remote.input(key)
|
import argparse
import chainer
import matplotlib.pyplot as plt
from chainercv.datasets import coco_instance_segmentation_label_names
from chainercv.datasets import sbd_instance_segmentation_label_names
from chainercv.experimental.links import FCISResNet101
from chainercv.utils import mask_to_bbox
from chainercv.utils import read_image
from chainercv.visualizations.colormap import voc_colormap
from chainercv.visualizations import vis_bbox
from chainercv.visualizations import vis_instance_segmentation
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--gpu', type=int, default=-1)
parser.add_argument('--pretrained-model', default=None)
parser.add_argument(
'--dataset', choices=('sbd', 'coco'), default='sbd')
parser.add_argument('image')
args = parser.parse_args()
if args.dataset == 'sbd':
if args.pretrained_model is None:
args.pretrained_model = 'sbd'
label_names = sbd_instance_segmentation_label_names
model = FCISResNet101(
n_fg_class=len(label_names),
pretrained_model=args.pretrained_model)
elif args.dataset == 'coco':
if args.pretrained_model is None:
args.pretrained_model = 'coco'
label_names = coco_instance_segmentation_label_names
proposal_creator_params = FCISResNet101.proposal_creator_params
proposal_creator_params['min_size'] = 2
model = FCISResNet101(
n_fg_class=len(label_names),
anchor_scales=(4, 8, 16, 32),
pretrained_model=args.pretrained_model,
proposal_creator_params=proposal_creator_params)
if args.gpu >= 0:
chainer.cuda.get_device_from_id(args.gpu).use()
model.to_gpu()
img = read_image(args.image, color=True)
masks, labels, scores = model.predict([img])
mask, label, score = masks[0], labels[0], scores[0]
bbox = mask_to_bbox(mask)
colors = voc_colormap(list(range(1, len(mask) + 1)))
ax = vis_bbox(
img, bbox, instance_colors=colors, alpha=0.5, linewidth=1.5)
vis_instance_segmentation(
None, mask, label, score, label_names=label_names,
instance_colors=colors, alpha=0.7, ax=ax)
plt.show()
if __name__ == '__main__':
main()
|
import codecs
import shutil
from io import BytesIO
from typing import List, Optional, Union
from zipfile import ZipFile
from django.utils.functional import cached_property
from django.utils.translation import gettext_lazy as _
from translate.convert.po2html import po2html
from translate.convert.po2idml import translate_idml, write_idml
from translate.convert.po2rc import rerc
from translate.convert.rc2po import rc2po
from translate.convert.xliff2odf import translate_odf, write_odf
from translate.storage.html import htmlfile
from translate.storage.idml import INLINE_ELEMENTS, NO_TRANSLATE_ELEMENTS, open_idml
from translate.storage.odf_io import open_odf
from translate.storage.odf_shared import inline_elements, no_translate_content_elements
from translate.storage.po import pofile
from translate.storage.rc import rcfile
from translate.storage.xliff import xlifffile
from translate.storage.xml_extract.extract import (
IdMaker,
ParseState,
build_idml_store,
build_store,
make_postore_adder,
)
from weblate.formats.base import TranslationFormat
from weblate.formats.helpers import BytesIOMode
from weblate.formats.ttkit import TTKitUnit, XliffUnit
from weblate.utils.errors import report_error
class ConvertUnit(TTKitUnit):
def is_translated(self):
"""Check whether unit is translated."""
return self.unit is not None
def is_fuzzy(self, fallback=False):
"""Check whether unit needs editing."""
return fallback
@cached_property
def locations(self):
return ""
@cached_property
def context(self):
"""Return context of message."""
return "".join(self.mainunit.getlocations())
class ConvertFormat(TranslationFormat):
"""
Base class for convert based formats.
This always uses intermediate representation.
"""
monolingual = True
can_add_unit = False
needs_target_sync = True
unit_class = ConvertUnit
autoaddon = {"weblate.flags.same_edit": {}}
def save_content(self, handle):
"""Store content to file."""
raise NotImplementedError()
def save(self):
"""Save underlaying store to disk."""
self.save_atomic(self.storefile, self.save_content)
@staticmethod
def convertfile(storefile, template_store):
raise NotImplementedError()
@classmethod
def load(cls, storefile, template_store):
# Did we get file or filename?
if not hasattr(storefile, "read"):
storefile = open(storefile, "rb")
# Adjust store to have translations
store = cls.convertfile(storefile, template_store)
for unit in store.units:
if unit.isheader():
continue
# HTML does this properly on loading, others need it
if cls.needs_target_sync:
unit.target = unit.source
unit.rich_target = unit.rich_source
return store
@classmethod
def create_new_file(cls, filename, language, base):
"""Handle creation of new translation file."""
if not base:
raise ValueError("Not supported")
# Copy file
shutil.copy(base, filename)
@classmethod
def is_valid_base_for_new(cls, base, monolingual, errors: Optional[List] = None):
"""Check whether base is valid."""
if not base:
return False
try:
cls.load(base, None)
return True
except Exception:
report_error(cause="File parse error")
return False
def add_unit(self, ttkit_unit):
self.store.addunit(ttkit_unit)
@classmethod
def get_class(cls):
return None
def create_unit(self, key: str, source: Union[str, List[str]]):
raise ValueError("Not supported")
def cleanup_unused(self) -> List[str]:
"""
Bring target in sync with the source.
This is done automatically on save as it reshapes translations
based on the template.
"""
self.save()
return []
class HTMLFormat(ConvertFormat):
name = _("HTML file")
autoload = ("*.htm", "*.html")
format_id = "html"
check_flags = ("safe-html", "strict-same")
needs_target_sync = False
@staticmethod
def convertfile(storefile, template_store):
store = pofile()
# Fake input file with a blank filename
htmlparser = htmlfile(inputfile=BytesIOMode("", storefile.read()))
for htmlunit in htmlparser.units:
locations = htmlunit.getlocations()
if template_store:
# Transalation
template = template_store.find_unit_mono("".join(locations))
if template is None:
# Skip locations not present in the source HTML file
continue
# Create unit with matching source
thepo = store.addsourceunit(template.source)
thepo.target = htmlunit.source
else:
# Source file
thepo = store.addsourceunit(htmlunit.source)
thepo.target = htmlunit.source
thepo.addlocations(htmlunit.getlocations())
thepo.addnote(htmlunit.getnotes(), "developer")
store.removeduplicates("msgctxt")
return store
def save_content(self, handle):
"""Store content to file."""
convertor = po2html()
templatename = self.template_store.storefile
if hasattr(templatename, "name"):
templatename = templatename.name
with open(templatename, "rb") as templatefile:
outputstring = convertor.mergestore(
self.store, templatefile, includefuzzy=False
)
handle.write(outputstring.encode("utf-8"))
@staticmethod
def mimetype():
"""Return most common mime type for format."""
return "text/html"
@staticmethod
def extension():
"""Return most common file extension for format."""
return "html"
class OpenDocumentFormat(ConvertFormat):
name = _("OpenDocument file")
autoload = (
"*.sxw",
"*.odt",
"*.ods",
"*.odp",
"*.odg",
"*.odc",
"*.odf",
"*.odi",
"*.odm",
"*.ott",
"*.ots",
"*.otp",
"*.otg",
"*.otc",
"*.otf",
"*.oti",
"*.oth",
)
format_id = "odf"
check_flags = ("strict-same",)
unit_class = XliffUnit
@staticmethod
def convertfile(storefile, template_store):
store = xlifffile()
store.setfilename(store.getfilenode("NoName"), "odf")
contents = open_odf(storefile)
for data in contents.values():
parse_state = ParseState(no_translate_content_elements, inline_elements)
build_store(BytesIO(data), store, parse_state)
return store
def save_content(self, handle):
"""Store content to file."""
templatename = self.template_store.storefile
if hasattr(templatename, "name"):
templatename = templatename.name
# This is workaround for weird fuzzy handling in translate-toolkit
for unit in self.all_units:
if unit.xliff_state == "translated":
unit.mark_approved(True)
with open(templatename, "rb") as templatefile:
dom_trees = translate_odf(templatefile, self.store)
write_odf(templatefile, handle, dom_trees)
@staticmethod
def mimetype():
"""Return most common mime type for format."""
return "application/vnd.oasis.opendocument.text"
@staticmethod
def extension():
"""Return most common file extension for format."""
return "odt"
class IDMLFormat(ConvertFormat):
name = _("IDML file")
autoload = ("*.idml", "*.idms")
format_id = "idml"
check_flags = ("strict-same",)
@staticmethod
def convertfile(storefile, template_store):
store = pofile()
contents = open_idml(storefile)
# Create it here to avoid having repeated ids.
id_maker = IdMaker()
for filename, translatable_file in contents.items():
parse_state = ParseState(NO_TRANSLATE_ELEMENTS, INLINE_ELEMENTS)
po_store_adder = make_postore_adder(store, id_maker, filename)
build_idml_store(
BytesIO(translatable_file),
store,
parse_state,
store_adder=po_store_adder,
)
return store
def save_content(self, handle):
"""Store content to file."""
templatename = self.template_store.storefile
if hasattr(templatename, "name"):
templatename = templatename.name
with ZipFile(templatename, "r") as template_zip:
translatable_files = [
filename
for filename in template_zip.namelist()
if filename.startswith("Stories/")
]
dom_trees = translate_idml(templatename, self.store, translatable_files)
write_idml(template_zip, handle, dom_trees)
@staticmethod
def mimetype():
"""Return most common mime type for format."""
return "application/octet-stream"
@staticmethod
def extension():
"""Return most common file extension for format."""
return "idml"
class WindowsRCFormat(ConvertFormat):
name = _("RC file")
format_id = "rc"
autoload = ("*.rc",)
language_format = "bcp"
@staticmethod
def mimetype():
"""Return most common media type for format."""
return "text/plain"
@staticmethod
def extension():
"""Return most common file extension for format."""
return "rc"
@staticmethod
def convertfile(storefile, template_store):
input_store = rcfile(storefile)
convertor = rc2po()
store = convertor.convert_store(input_store)
store.rcfile = input_store
return store
def save_content(self, handle):
"""Store content to file."""
# Fallback language
lang = "LANG_ENGLISH"
sublang = "SUBLANG_DEFAULT"
# Keep existing language tags
storage = self.store.rcfile
if storage.lang:
lang = storage.lang
if storage.sublang:
sublang = storage.sublang
templatename = self.template_store.storefile
if hasattr(templatename, "name"):
templatename = templatename.name
encoding = "utf-8"
with open(templatename, "rb") as templatefile:
bom = templatefile.read(2)
if bom == codecs.BOM_UTF16_LE:
encoding = "utf-16-le"
templatefile.seek(0)
convertor = rerc(
templatefile,
lang=lang,
sublang=sublang,
charset=encoding,
)
outputrclines = convertor.convertstore(self.store)
try:
handle.write(outputrclines.encode(encoding))
except UnicodeEncodeError:
handle.write(codecs.BOM_UTF16_LE)
handle.write(outputrclines.encode("utf-16-le"))
|
import os
import logging
import stat
import tempfile
import dropbox
from stashutils.fsi.errors import OperationFailure, IsDir, IsFile
from stashutils.fsi.errors import AlreadyExists
from stashutils.fsi.base import BaseFSI, make_stat, calc_mode
from stashutils.dbutils import get_dropbox_client
# turn down requests log verbosity
logging.getLogger('requests').setLevel(logging.CRITICAL)
OVERWRITE = dropbox.files.WriteMode("overwrite", None)
class DropboxFSI(BaseFSI):
"""A FSI for accessing dropbox."""
def __init__(self, logger):
self.logger = logger
self.path = "/"
self.client = None
def abspath(self, path):
"""returns thr absolute path for path."""
p = os.path.join(self.path, path)
if p == "/":
return ""
else:
return p
def connect(self, *args):
"""connects to the dropbox. args[0] is the username."""
if len(args) != 1:
return "expected one argument!"
try:
dbci = get_dropbox_client(args[0], False, None, None)
except Exception as e:
return e.message
else:
if dbci is None:
return "No Dropbox configured for '{u}'.".format(u=args[0])
else:
self.client = dbci
return True
def get_path(self):
return self.path
def repr(self):
return "Dropbox [CWD: {p}]".format(p=self.path)
def close(self):
pass
def cd(self, name):
path = self.abspath(name)
if name == "..":
self.path = "/".join(self.path.split("/")[:-1])
if self.path == "":
self.path = "/"
return
try:
# test
self.client.files_list_folder(path, recursive=False)
except dropbox.exceptions.ApiError as api_e:
e = api_e.reason
if e.is_other():
raise OperationFailure(repr(e))
elif e.is_path():
pe = e.get_path()
if pe.is_not_folder():
raise IsFile()
elif pe.is_not_found():
raise OperationFailure("Not Found!")
else:
raise OperationFailure(repr(e))
else:
raise OperationFailure("Not found!")
else:
self.path = path
def listdir(self, path="."):
p = self.abspath(path)
e = []
try:
c = self.client.files_list_folder(p, recursive=False)
e += c.entries
while True:
if c.has_more:
c = self.client.files_list_folder_continue(p)
e += c.entries
else:
break
except dropbox.exceptions.ApiError as e:
raise OperationFailure(e.message)
return [str(m.name) for m in e]
def mkdir(self, name):
path = self.abspath(name)
try:
self.client.files_create_folder(path)
except dropbox.exceptions.ApiError as api_e:
e = api_e.reason
if e.is_path():
pe = e.get_path()
if pe.is_conflict():
raise AlreadyExists("Already exists!")
elif pe.is_insufficient_space():
raise OperationFailure("Not enough Space available!")
elif pe.is_disallowed_name():
raise OperationFailure("Disallowed name!")
elif pe.is_no_write_permission():
raise OperationFailure("Permission denied!")
else:
raise OperationFailure(api_e.message)
else:
raise OperationFailure("Can not create dir!")
def remove(self, name):
path = self.abspath(name)
try:
self.client.files_delete(path)
except dropbox.exceptions.ApiError:
raise OperationFailure("Can not delete target!")
def isdir(self, name):
path = self.abspath(name)
try:
self.client.files_list_folder(path, recursive=False)
return True
except dropbox.exceptions.ApiError:
return False
def isfile(self, name):
return not self.isdir(name)
def open(self, name, mode="rb", buffering=0):
mode = mode.replace("+", "")
ap = self.abspath(name)
if mode in ("r", "rb", "rU"):
try:
response = self.client.files_download(ap)[1]
# unfortunaly, we cant return response.raw because it does not
# support seek(), which is required by tarfile (used in ls)
return Dropbox_Download(
self.client,
name,
mode,
buffering,
response,
)
except dropbox.exceptions.ApiError as api_e:
e = api_e.reason
if e.is_path():
pe = e.get_path()
if pe.is_not_file():
raise IsDir()
raise OperationFailure(api_e.message)
elif "w" in mode:
return Dropbox_Upload(self.client, ap, mode)
else:
raise OperationFailure("Mode not supported!")
def stat(self, name):
ap = self.abspath(name)
if ap in ("/", "/.", "./", "//", ""):
bytes = 0
isdir = True
else:
try:
meta = self.client.files_get_metadata(ap)
except dropbox.exceptions.ApiError as e:
raise OperationFailure(e.message)
if isinstance(meta, (dropbox.files.FolderMetadata, dropbox.sharing.SharedFolderMetadata)):
bytes = 0
isdir = True
else:
bytes = meta.size
isdir = False
type_ = (stat.S_IFDIR if isdir else stat.S_IFREG)
m = calc_mode(type=type_)
s = make_stat(size=bytes, mode=m)
return s
class Dropbox_Upload(object):
"""utility file-like class used for Dropbox-uploads."""
def __init__(self, client, path, mode):
self.client = client
self.path = path
self.mode = mode
self.session = None
self.cursor = None
self.closed = False
def write(self, data):
"""writes some data to the file."""
if self.closed:
raise ValueError("I/O operation on closed file")
if self.session is None:
# first call
self.session = self.client.files_upload_session_start(
data,
close=False,
)
self.cursor = dropbox.files.UploadSessionCursor(self.session.session_id, offset=0)
else:
self.client.files_upload_session_append_v2(data, self.cursor, close=False)
self.cursor.offset += len(data)
def close(self):
"""closes the file"""
if self.closed:
return
if self.session is None:
self.client.files_upload("", self.path, mute=True)
else:
commit = dropbox.files.CommitInfo(self.path, mode=OVERWRITE)
self.client.files_upload_session_finish("", self.cursor, commit)
self.session = None
self.closed = True
def __del__(self):
"""called on deletion"""
self.close()
def __enter__(self):
"""called when entering a 'with'-context."""
return self
def __exit__(self, exc_type, exc_value, traceback):
"""called when exiting a 'with'-context."""
self.close()
def flush(self):
"""no-op"""
pass
def truncate(self, size=-1):
"""no-op"""
pass
class Dropbox_Download(object):
"""
utility file-like class used for Dropbox-downloads.
There are two reasons to use this class:
1. requests.Response.raw does not support seek() and tell()
2. the 'ls' command checks for filetypes. Due to this, each
file in a directory is opened. This class improved performance
by only downloading as much as required into a temporary file.
"""
def __init__(self, client, path, mode, buffering, response):
self.client = client
self.path = path
self.mode = mode
self.buffering = buffering
self.name = path
self._response = response
self._raw = response.raw
self.closed = False
self._read = 0
if "U" in mode:
tfmode = "w+bU"
else:
tfmode = "w+b"
self._tf = tempfile.TemporaryFile(mode=tfmode)
self.newlines = None
def close(self):
"""closes the file"""
if self.closed:
return
self.closed = True
self._tf.close()
self._raw.close()
p = self._tf.name
if os.path.exists(p):
os.remove(p)
def __enter__(self):
"""called when entering a 'with'-context"""
return self
def __exit__(self, exc_type, exc_value, traceback):
"""called when exiting a 'with'-context."""
self.close()
def __del__(self):
"""called when the object will be deleted"""
self.close()
def read(self, size=-1):
"""read at most size bytes from the file"""
if self.closed:
raise ValueError("I/O operation on closed file")
if ((size + self._tf.tell()) > self._read) or (size < 0):
ccp = self._tf.tell()
if size >= 0:
tr = size - (self._read - ccp)
content = self._raw.read(tr)
else:
content = self._raw.read()
self._read += len(content)
self._tf.seek(0, os.SEEK_END)
self._tf.write(content)
self._tf.seek(ccp, os.SEEK_SET)
return self._tf.read(size)
def tell(self):
"""tells the cursor position"""
return self._tf.tell()
def seek(self, offset, whence=os.SEEK_SET):
"""sets the cursor position"""
ccp = self._tf.tell()
if whence == os.SEEK_SET:
ncp = offset
elif whence == os.SEEK_CUR:
ncp = ccp + offset
elif whence == os.SEEK_END:
size = int(self._response.headers["Content-Length"])
ncp = size + offset
else:
raise ValueError("Invalid Value")
if ncp > self._read:
toread = ncp - ccp
self.read(toread)
self.seek(ccp, os.SEEK_SET)
# we need to seek twice to support relative search
self._tf.seek(offset, whence)
def readline(self, size=-1):
"""Read one entire line from the file."""
if "U" in self.mode:
ends = ("\n", "\r", "\r\n")
else:
ends = ("\n", )
buff = ""
while True:
d = self.read(1)
buff += d
if any([e in buff for e in ends]):
return buff
if (size <= len(buff)) or (not d):
return buff
def readlines(self, sizehint=None):
"""
Read until EOF using readline() and return a list containing the
lines thus read.
"""
# sizehint ignored; see the documentation of file.readlines
lines = []
while True:
line = self.readline()
if not line:
break
lines.append(line)
return lines
def xreadlines(self):
"""This method returns the same thing as iter(f)."""
return self
def __iter__(self):
if self.closed:
raise ValueError("I/O operation on closed file")
return self
def next(self):
"""returns the next line"""
line = self.readline()
if line:
return line
else:
raise StopIteration()
def flush(self):
"""no-op"""
pass
def truncate(self, size=-1):
"""no-op"""
pass
|
import hangups
from common import run_example
async def set_focus(client, args):
request = hangups.hangouts_pb2.SetFocusRequest(
request_header=client.get_request_header(),
conversation_id=hangups.hangouts_pb2.ConversationId(
id=args.conversation_id
),
type=hangups.hangouts_pb2.FOCUS_TYPE_FOCUSED,
timeout_secs=int(args.timeout_secs),
)
await client.set_focus(request)
if __name__ == '__main__':
run_example(set_focus, '--conversation-id', '--timeout-secs')
|
from collections import OrderedDict
from luftdaten import Luftdaten
from luftdaten.exceptions import LuftdatenConnectionError
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import (
CONF_MONITORED_CONDITIONS,
CONF_SCAN_INTERVAL,
CONF_SENSORS,
CONF_SHOW_ON_MAP,
)
from homeassistant.core import callback
from homeassistant.helpers import aiohttp_client
import homeassistant.helpers.config_validation as cv
from .const import CONF_SENSOR_ID, DEFAULT_SCAN_INTERVAL, DOMAIN
@callback
def configured_sensors(hass):
"""Return a set of configured Luftdaten sensors."""
return {
entry.data[CONF_SENSOR_ID]
for entry in hass.config_entries.async_entries(DOMAIN)
}
@callback
def duplicate_stations(hass):
"""Return a set of duplicate configured Luftdaten stations."""
stations = [
int(entry.data[CONF_SENSOR_ID])
for entry in hass.config_entries.async_entries(DOMAIN)
]
return {x for x in stations if stations.count(x) > 1}
@config_entries.HANDLERS.register(DOMAIN)
class LuftDatenFlowHandler(config_entries.ConfigFlow):
"""Handle a Luftdaten config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
@callback
def _show_form(self, errors=None):
"""Show the form to the user."""
data_schema = OrderedDict()
data_schema[vol.Required(CONF_SENSOR_ID)] = cv.positive_int
data_schema[vol.Optional(CONF_SHOW_ON_MAP, default=False)] = bool
return self.async_show_form(
step_id="user", data_schema=vol.Schema(data_schema), errors=errors or {}
)
async def async_step_import(self, import_config):
"""Import a config entry from configuration.yaml."""
return await self.async_step_user(import_config)
async def async_step_user(self, user_input=None):
"""Handle the start of the config flow."""
if not user_input:
return self._show_form()
sensor_id = user_input[CONF_SENSOR_ID]
if sensor_id in configured_sensors(self.hass):
return self._show_form({CONF_SENSOR_ID: "already_configured"})
session = aiohttp_client.async_get_clientsession(self.hass)
luftdaten = Luftdaten(user_input[CONF_SENSOR_ID], self.hass.loop, session)
try:
await luftdaten.get_data()
valid = await luftdaten.validate_sensor()
except LuftdatenConnectionError:
return self._show_form({CONF_SENSOR_ID: "cannot_connect"})
if not valid:
return self._show_form({CONF_SENSOR_ID: "invalid_sensor"})
available_sensors = [
x for x in luftdaten.values if luftdaten.values[x] is not None
]
if available_sensors:
user_input.update(
{CONF_SENSORS: {CONF_MONITORED_CONDITIONS: available_sensors}}
)
scan_interval = user_input.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL)
user_input.update({CONF_SCAN_INTERVAL: scan_interval.seconds})
return self.async_create_entry(title=str(sensor_id), data=user_input)
|
import logging
import socket
import paho
import paho.mqtt.client as mqtt
from kalliope.core.NeuronModule import NeuronModule
logging.basicConfig()
logger = logging.getLogger("kalliope")
class Mqtt_publisher(NeuronModule):
def __init__(self, **kwargs):
super(Mqtt_publisher, self).__init__(**kwargs)
logger.debug("[mqtt_publisher] neuron called with parameters: %s" % kwargs)
# get parameters
self.broker_ip = kwargs.get('broker_ip', None)
self.port = kwargs.get('port', 1883)
self.topic = kwargs.get('topic', None)
self.payload = kwargs.get('payload', None)
self.qos = kwargs.get('qos', 0)
self.retain = kwargs.get('retain', False)
self.client_id = kwargs.get('client_id', 'kalliope')
self.keepalive = kwargs.get('keepalive', 60)
self.username = kwargs.get('username', None)
self.password = kwargs.get('password', None)
self.ca_cert = kwargs.get('ca_cert', None)
self.certfile = kwargs.get('certfile', None)
self.keyfile = kwargs.get('keyfile', None)
self.protocol = kwargs.get('protocol', 'MQTTv311')
self.tls_insecure = kwargs.get('tls_insecure', False)
if not self._is_parameters_ok():
logger.debug("[mqtt_publisher] One or more invalid parameters, neuron will not be launched")
else:
# string must be converted
self.protocol = self._get_protocol(self.protocol)
self.client = mqtt.Client(client_id=self.broker_ip, protocol=self.protocol)
if self.username is not None and self.password is not None:
logger.debug("[mqtt_publisher] Username and password are set")
self.client.username_pw_set(self.username, self.password)
if self.ca_cert is not None and self.certfile is not None and self.keyfile is not None:
logger.debug("[mqtt_publisher] Active TLS with client certificate authentication")
self.client.tls_set(ca_certs=self.ca_cert,
certfile=self.certfile,
keyfile=self.keyfile)
self.client.tls_insecure_set(self.tls_insecure)
elif self.ca_cert is not None:
logger.debug("[mqtt_publisher] Active TLS with server CA certificate only")
self.client.tls_set(ca_certs=self.ca_cert)
self.client.tls_insecure_set(self.tls_insecure)
try:
self.client.connect(self.broker_ip, port=self.port, keepalive=self.keepalive)
self.client.publish(topic=self.topic, payload=self.payload, qos=int(self.qos), retain=self.retain)
logger.debug("[mqtt_publisher] Message published to topic %s: %s" % (self.topic, self.payload))
self.client.disconnect()
except socket.error:
logger.debug("[mqtt_publisher] Unable to connect to broker %s" % self.broker_ip)
def _is_parameters_ok(self):
if self.broker_ip is None:
print("[mqtt_publisher] ERROR: broker_ip is not set")
return False
if self.port is not None:
if not isinstance(self.port, int):
try:
self.port = int(self.port)
except ValueError:
print("[mqtt_publisher] ERROR: port must be an integer")
return False
if self.topic is None:
print("[mqtt_publisher] ERROR: topic is not set")
return False
if self.payload is None:
print("[mqtt_publisher] ERROR: payload is not set")
return False
if self.qos:
if not isinstance(self.qos, int):
try:
self.qos = int(self.qos)
except ValueError:
print("[mqtt_publisher] ERROR: qos must be an integer")
return False
if self.qos not in [0, 1, 2]:
print("[mqtt_publisher] ERROR: qos must be 0,1 or 2")
return False
if self.keepalive:
if not isinstance(self.keepalive, int):
try:
self.keepalive = int(self.keepalive)
except ValueError:
print("[mqtt_publisher] ERROR: keepalive must be an integer")
return False
if self.username is not None and self.password is None:
print("[mqtt_publisher] ERROR: password must be set when using username")
return False
if self.username is None and self.password is not None:
print("[mqtt_publisher] ERROR: username must be set when using password")
return False
if self.protocol:
if self.protocol not in ["MQTTv31", "MQTTv311"]:
print("[mqtt_publisher] Invalid protocol value, fallback to MQTTv311")
self.protocol = "MQTTv311"
# if the user set a certfile, the key and ca cert must be set to
if self.certfile is not None and self.keyfile is None:
print("[mqtt_publisher] ERROR: keyfile must be set when using certfile")
return False
if self.certfile is None and self.keyfile is not None:
print("[mqtt_publisher] ERROR: certfile must be set when using keyfile")
return False
if self.certfile is not None and self.keyfile is not None:
if self.ca_cert is None:
print("[mqtt_publisher] ERROR: ca_cert must be set when using keyfile and certfile")
return False
return True
def _get_protocol(self, protocol):
"""
Return the right code depending on the given string protocol name
:param protocol: string name of the protocol to use.
:return: integer
"""
if protocol == "MQTTv31":
return paho.mqtt.client.MQTTv31
return paho.mqtt.client.MQTTv311
|
from copy import deepcopy
from homeassistant.components.deconz import DOMAIN as DECONZ_DOMAIN
from homeassistant.components.deconz.gateway import get_gateway_from_config_entry
from homeassistant.components.scene import DOMAIN as SCENE_DOMAIN, SERVICE_TURN_ON
from homeassistant.const import ATTR_ENTITY_ID
from homeassistant.setup import async_setup_component
from .test_gateway import DECONZ_WEB_REQUEST, setup_deconz_integration
from tests.async_mock import patch
GROUPS = {
"1": {
"id": "Light group id",
"name": "Light group",
"type": "LightGroup",
"state": {"all_on": False, "any_on": True},
"action": {},
"scenes": [{"id": "1", "name": "Scene"}],
"lights": [],
}
}
async def test_platform_manually_configured(hass):
"""Test that we do not discover anything or try to set up a gateway."""
assert (
await async_setup_component(
hass, SCENE_DOMAIN, {"scene": {"platform": DECONZ_DOMAIN}}
)
is True
)
assert DECONZ_DOMAIN not in hass.data
async def test_no_scenes(hass):
"""Test that scenes can be loaded without scenes being available."""
await setup_deconz_integration(hass)
assert len(hass.states.async_all()) == 0
async def test_scenes(hass):
"""Test that scenes works."""
data = deepcopy(DECONZ_WEB_REQUEST)
data["groups"] = deepcopy(GROUPS)
config_entry = await setup_deconz_integration(hass, get_state_response=data)
gateway = get_gateway_from_config_entry(hass, config_entry)
assert len(hass.states.async_all()) == 1
assert hass.states.get("scene.light_group_scene")
# Verify service calls
group_scene = gateway.api.groups["1"].scenes["1"]
# Service turn on scene
with patch.object(group_scene, "_request", return_value=True) as set_callback:
await hass.services.async_call(
SCENE_DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "scene.light_group_scene"},
blocking=True,
)
await hass.async_block_till_done()
set_callback.assert_called_with("put", "/groups/1/scenes/1/recall", json={})
await hass.config_entries.async_unload(config_entry.entry_id)
assert len(hass.states.async_all()) == 0
|
import asyncio
from onvif.exceptions import ONVIFAuthError, ONVIFError, ONVIFTimeoutError
import voluptuous as vol
from homeassistant.components.ffmpeg import CONF_EXTRA_ARGUMENTS
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
CONF_USERNAME,
EVENT_HOMEASSISTANT_STOP,
HTTP_BASIC_AUTHENTICATION,
HTTP_DIGEST_AUTHENTICATION,
)
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import config_per_platform
from .const import (
CONF_RTSP_TRANSPORT,
CONF_SNAPSHOT_AUTH,
DEFAULT_ARGUMENTS,
DEFAULT_NAME,
DEFAULT_PASSWORD,
DEFAULT_PORT,
DEFAULT_USERNAME,
DOMAIN,
RTSP_TRANS_PROTOCOLS,
)
from .device import ONVIFDevice
CONFIG_SCHEMA = vol.Schema({DOMAIN: vol.Schema({})}, extra=vol.ALLOW_EXTRA)
async def async_setup(hass: HomeAssistant, config: dict):
"""Set up the ONVIF component."""
# Import from yaml
configs = {}
for p_type, p_config in config_per_platform(config, "camera"):
if p_type != DOMAIN:
continue
config = p_config.copy()
if config[CONF_HOST] not in configs:
configs[config[CONF_HOST]] = {
CONF_HOST: config[CONF_HOST],
CONF_NAME: config.get(CONF_NAME, DEFAULT_NAME),
CONF_PASSWORD: config.get(CONF_PASSWORD, DEFAULT_PASSWORD),
CONF_PORT: config.get(CONF_PORT, DEFAULT_PORT),
CONF_USERNAME: config.get(CONF_USERNAME, DEFAULT_USERNAME),
}
for conf in configs.values():
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=conf
)
)
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Set up ONVIF from a config entry."""
if DOMAIN not in hass.data:
hass.data[DOMAIN] = {}
if not entry.options:
await async_populate_options(hass, entry)
device = ONVIFDevice(hass, entry)
if not await device.async_setup():
await device.device.close()
return False
if not device.available:
raise ConfigEntryNotReady()
if not entry.data.get(CONF_SNAPSHOT_AUTH):
await async_populate_snapshot_auth(hass, device, entry)
hass.data[DOMAIN][entry.unique_id] = device
platforms = ["camera"]
if device.capabilities.events:
platforms += ["binary_sensor", "sensor"]
for component in platforms:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, device.async_stop)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
device = hass.data[DOMAIN][entry.unique_id]
platforms = ["camera"]
if device.capabilities.events and device.events.started:
platforms += ["binary_sensor", "sensor"]
await device.events.async_stop()
return all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in platforms
]
)
)
async def _get_snapshot_auth(device):
"""Determine auth type for snapshots."""
if not device.capabilities.snapshot or not (device.username and device.password):
return HTTP_DIGEST_AUTHENTICATION
try:
snapshot = await device.device.get_snapshot(device.profiles[0].token)
if snapshot:
return HTTP_DIGEST_AUTHENTICATION
return HTTP_BASIC_AUTHENTICATION
except (ONVIFAuthError, ONVIFTimeoutError):
return HTTP_BASIC_AUTHENTICATION
except ONVIFError:
return HTTP_DIGEST_AUTHENTICATION
async def async_populate_snapshot_auth(hass, device, entry):
"""Check if digest auth for snapshots is possible."""
auth = await _get_snapshot_auth(device)
new_data = {**entry.data, CONF_SNAPSHOT_AUTH: auth}
hass.config_entries.async_update_entry(entry, data=new_data)
async def async_populate_options(hass, entry):
"""Populate default options for device."""
options = {
CONF_EXTRA_ARGUMENTS: DEFAULT_ARGUMENTS,
CONF_RTSP_TRANSPORT: RTSP_TRANS_PROTOCOLS[0],
}
hass.config_entries.async_update_entry(entry, options=options)
|
import datetime
from queue import Empty
import pymongo
from pymongo import errors
from pymongo import MongoClient, uri_parser
from pymongo.cursor import CursorType
from kombu.exceptions import VersionMismatch
from kombu.utils.compat import _detect_environment
from kombu.utils.encoding import bytes_to_str
from kombu.utils.json import loads, dumps
from kombu.utils.objects import cached_property
from . import virtual
from .base import to_rabbitmq_queue_arguments
E_SERVER_VERSION = """\
Kombu requires MongoDB version 1.3+ (server is {0})\
"""
E_NO_TTL_INDEXES = """\
Kombu requires MongoDB version 2.2+ (server is {0}) for TTL indexes support\
"""
class BroadcastCursor:
"""Cursor for broadcast queues."""
def __init__(self, cursor):
self._cursor = cursor
self.purge(rewind=False)
def get_size(self):
return self._cursor.count() - self._offset
def close(self):
self._cursor.close()
def purge(self, rewind=True):
if rewind:
self._cursor.rewind()
# Fast forward the cursor past old events
self._offset = self._cursor.count()
self._cursor = self._cursor.skip(self._offset)
def __iter__(self):
return self
def __next__(self):
while True:
try:
msg = next(self._cursor)
except pymongo.errors.OperationFailure as exc:
# In some cases tailed cursor can become invalid
# and have to be reinitalized
if 'not valid at server' in str(exc):
self.purge()
continue
raise
else:
break
self._offset += 1
return msg
next = __next__
class Channel(virtual.Channel):
"""MongoDB Channel."""
supports_fanout = True
# Mutable container. Shared by all class instances
_fanout_queues = {}
# Options
ssl = False
ttl = False
connect_timeout = None
capped_queue_size = 100000
calc_queue_size = True
default_hostname = '127.0.0.1'
default_port = 27017
default_database = 'kombu_default'
messages_collection = 'messages'
routing_collection = 'messages.routing'
broadcast_collection = 'messages.broadcast'
queues_collection = 'messages.queues'
from_transport_options = (virtual.Channel.from_transport_options + (
'connect_timeout', 'ssl', 'ttl', 'capped_queue_size',
'default_hostname', 'default_port', 'default_database',
'messages_collection', 'routing_collection',
'broadcast_collection', 'queues_collection',
'calc_queue_size',
))
def __init__(self, *vargs, **kwargs):
super().__init__(*vargs, **kwargs)
self._broadcast_cursors = {}
# Evaluate connection
self.client
# AbstractChannel/Channel interface implementation
def _new_queue(self, queue, **kwargs):
if self.ttl:
self.queues.update(
{'_id': queue},
{'_id': queue,
'options': kwargs,
'expire_at': self._get_expire(kwargs, 'x-expires')},
upsert=True)
def _get(self, queue):
if queue in self._fanout_queues:
try:
msg = next(self._get_broadcast_cursor(queue))
except StopIteration:
msg = None
else:
msg = self.messages.find_and_modify(
query={'queue': queue},
sort=[('priority', pymongo.ASCENDING)],
remove=True,
)
if self.ttl:
self._update_queues_expire(queue)
if msg is None:
raise Empty()
return loads(bytes_to_str(msg['payload']))
def _size(self, queue):
# Do not calculate actual queue size if requested
# for performance considerations
if not self.calc_queue_size:
return super()._size(queue)
if queue in self._fanout_queues:
return self._get_broadcast_cursor(queue).get_size()
return self.messages.find({'queue': queue}).count()
def _put(self, queue, message, **kwargs):
data = {
'payload': dumps(message),
'queue': queue,
'priority': self._get_message_priority(message, reverse=True)
}
if self.ttl:
data['expire_at'] = self._get_expire(queue, 'x-message-ttl')
self.messages.insert(data)
def _put_fanout(self, exchange, message, routing_key, **kwargs):
self.broadcast.insert({'payload': dumps(message),
'queue': exchange})
def _purge(self, queue):
size = self._size(queue)
if queue in self._fanout_queues:
self._get_broadcast_cursor(queue).purge()
else:
self.messages.remove({'queue': queue})
return size
def get_table(self, exchange):
localRoutes = frozenset(self.state.exchanges[exchange]['table'])
brokerRoutes = self.routing.find(
{'exchange': exchange}
)
return localRoutes | frozenset(
(r['routing_key'], r['pattern'], r['queue'])
for r in brokerRoutes
)
def _queue_bind(self, exchange, routing_key, pattern, queue):
if self.typeof(exchange).type == 'fanout':
self._create_broadcast_cursor(
exchange, routing_key, pattern, queue)
self._fanout_queues[queue] = exchange
lookup = {
'exchange': exchange,
'queue': queue,
'routing_key': routing_key,
'pattern': pattern,
}
data = lookup.copy()
if self.ttl:
data['expire_at'] = self._get_expire(queue, 'x-expires')
self.routing.update(lookup, data, upsert=True)
def queue_delete(self, queue, **kwargs):
self.routing.remove({'queue': queue})
if self.ttl:
self.queues.remove({'_id': queue})
super().queue_delete(queue, **kwargs)
if queue in self._fanout_queues:
try:
cursor = self._broadcast_cursors.pop(queue)
except KeyError:
pass
else:
cursor.close()
self._fanout_queues.pop(queue)
# Implementation details
def _parse_uri(self, scheme='mongodb://'):
# See mongodb uri documentation:
# https://docs.mongodb.org/manual/reference/connection-string/
client = self.connection.client
hostname = client.hostname
if not hostname.startswith(scheme):
hostname = scheme + hostname
if not hostname[len(scheme):]:
hostname += self.default_hostname
if client.userid and '@' not in hostname:
head, tail = hostname.split('://')
credentials = client.userid
if client.password:
credentials += ':' + client.password
hostname = head + '://' + credentials + '@' + tail
port = client.port if client.port else self.default_port
parsed = uri_parser.parse_uri(hostname, port)
dbname = parsed['database'] or client.virtual_host
if dbname in ('/', None):
dbname = self.default_database
options = {
'auto_start_request': True,
'ssl': self.ssl,
'connectTimeoutMS': (int(self.connect_timeout * 1000)
if self.connect_timeout else None),
}
options.update(parsed['options'])
options = self._prepare_client_options(options)
return hostname, dbname, options
def _prepare_client_options(self, options):
if pymongo.version_tuple >= (3,):
options.pop('auto_start_request', None)
if isinstance(options.get('readpreference'), int):
modes = pymongo.read_preferences._MONGOS_MODES
options['readpreference'] = modes[options['readpreference']]
return options
def prepare_queue_arguments(self, arguments, **kwargs):
return to_rabbitmq_queue_arguments(arguments, **kwargs)
def _open(self, scheme='mongodb://'):
hostname, dbname, conf = self._parse_uri(scheme=scheme)
conf['host'] = hostname
env = _detect_environment()
if env == 'gevent':
from gevent import monkey
monkey.patch_all()
elif env == 'eventlet':
from eventlet import monkey_patch
monkey_patch()
mongoconn = MongoClient(**conf)
database = mongoconn[dbname]
version_str = mongoconn.server_info()['version']
version_str = version_str.split('-')[0]
version = tuple(map(int, version_str.split('.')))
if version < (1, 3):
raise VersionMismatch(E_SERVER_VERSION.format(version_str))
elif self.ttl and version < (2, 2):
raise VersionMismatch(E_NO_TTL_INDEXES.format(version_str))
return database
def _create_broadcast(self, database):
"""Create capped collection for broadcast messages."""
if self.broadcast_collection in database.collection_names():
return
database.create_collection(self.broadcast_collection,
size=self.capped_queue_size,
capped=True)
def _ensure_indexes(self, database):
"""Ensure indexes on collections."""
messages = database[self.messages_collection]
messages.ensure_index(
[('queue', 1), ('priority', 1), ('_id', 1)], background=True,
)
database[self.broadcast_collection].ensure_index([('queue', 1)])
routing = database[self.routing_collection]
routing.ensure_index([('queue', 1), ('exchange', 1)])
if self.ttl:
messages.ensure_index([('expire_at', 1)], expireAfterSeconds=0)
routing.ensure_index([('expire_at', 1)], expireAfterSeconds=0)
database[self.queues_collection].ensure_index(
[('expire_at', 1)], expireAfterSeconds=0)
def _create_client(self):
"""Actually creates connection."""
database = self._open()
self._create_broadcast(database)
self._ensure_indexes(database)
return database
@cached_property
def client(self):
return self._create_client()
@cached_property
def messages(self):
return self.client[self.messages_collection]
@cached_property
def routing(self):
return self.client[self.routing_collection]
@cached_property
def broadcast(self):
return self.client[self.broadcast_collection]
@cached_property
def queues(self):
return self.client[self.queues_collection]
def _get_broadcast_cursor(self, queue):
try:
return self._broadcast_cursors[queue]
except KeyError:
# Cursor may be absent when Channel created more than once.
# _fanout_queues is a class-level mutable attribute so it's
# shared over all Channel instances.
return self._create_broadcast_cursor(
self._fanout_queues[queue], None, None, queue,
)
def _create_broadcast_cursor(self, exchange, routing_key, pattern, queue):
if pymongo.version_tuple >= (3, ):
query = {
'filter': {'queue': exchange},
'cursor_type': CursorType.TAILABLE,
}
else:
query = {
'query': {'queue': exchange},
'tailable': True,
}
cursor = self.broadcast.find(**query)
ret = self._broadcast_cursors[queue] = BroadcastCursor(cursor)
return ret
def _get_expire(self, queue, argument):
"""Get expiration header named `argument` of queue definition.
Note:
`queue` must be either queue name or options itself.
"""
if isinstance(queue, str):
doc = self.queues.find_one({'_id': queue})
if not doc:
return
data = doc['options']
else:
data = queue
try:
value = data['arguments'][argument]
except (KeyError, TypeError):
return
return self.get_now() + datetime.timedelta(milliseconds=value)
def _update_queues_expire(self, queue):
"""Update expiration field on queues documents."""
expire_at = self._get_expire(queue, 'x-expires')
if not expire_at:
return
self.routing.update(
{'queue': queue}, {'$set': {'expire_at': expire_at}}, multi=True)
self.queues.update(
{'_id': queue}, {'$set': {'expire_at': expire_at}}, multi=True)
def get_now(self):
"""Return current time in UTC."""
return datetime.datetime.utcnow()
class Transport(virtual.Transport):
"""MongoDB Transport."""
Channel = Channel
can_parse_url = True
polling_interval = 1
default_port = Channel.default_port
connection_errors = (
virtual.Transport.connection_errors + (errors.ConnectionFailure,)
)
channel_errors = (
virtual.Transport.channel_errors + (
errors.ConnectionFailure,
errors.OperationFailure)
)
driver_type = 'mongodb'
driver_name = 'pymongo'
implements = virtual.Transport.implements.extend(
exchange_type=frozenset(['direct', 'topic', 'fanout']),
)
def driver_version(self):
return pymongo.version
|
from pkg_resources import EntryPoint
from stevedore import ExtensionManager
class RegistrableExtensionManager(ExtensionManager):
""":class:~stevedore.extensions.ExtensionManager` with support for registration.
It allows loading of internal extensions without setup and registering/unregistering additional extensions.
Loading is done in this order:
* Entry point extensions
* Internal extensions
* Registered extensions
:param str namespace: namespace argument for :class:~stevedore.extensions.ExtensionManager`.
:param list internal_extensions: internal extensions to use with entry point syntax.
:param \*\*kwargs: additional parameters for the :class:~stevedore.extensions.ExtensionManager` constructor.
"""
def __init__(self, namespace, internal_extensions, **kwargs):
#: Registered extensions with entry point syntax
self.registered_extensions = []
#: Internal extensions with entry point syntax
self.internal_extensions = internal_extensions
super(RegistrableExtensionManager, self).__init__(namespace, **kwargs)
def list_entry_points(self):
# copy of default extensions
eps = list(super(RegistrableExtensionManager, self).list_entry_points())
# internal extensions
for iep in self.internal_extensions:
ep = EntryPoint.parse(iep)
if ep.name not in [e.name for e in eps]:
eps.append(ep)
# registered extensions
for rep in self.registered_extensions:
ep = EntryPoint.parse(rep)
if ep.name not in [e.name for e in eps]:
eps.append(ep)
return eps
def register(self, entry_point):
"""Register an extension
:param str entry_point: extension to register (entry point syntax).
:raise: ValueError if already registered.
"""
if entry_point in self.registered_extensions:
raise ValueError('Extension already registered')
ep = EntryPoint.parse(entry_point)
if ep.name in self.names():
raise ValueError('An extension with the same name already exist')
ext = self._load_one_plugin(ep, False, (), {}, False)
self.extensions.append(ext)
if self._extensions_by_name is not None:
self._extensions_by_name[ext.name] = ext
self.registered_extensions.insert(0, entry_point)
def unregister(self, entry_point):
"""Unregister a provider
:param str entry_point: provider to unregister (entry point syntax).
"""
if entry_point not in self.registered_extensions:
raise ValueError('Extension not registered')
ep = EntryPoint.parse(entry_point)
self.registered_extensions.remove(entry_point)
if self._extensions_by_name is not None:
del self._extensions_by_name[ep.name]
for i, ext in enumerate(self.extensions):
if ext.name == ep.name:
del self.extensions[i]
break
#: Provider manager
provider_manager = RegistrableExtensionManager('subliminal.providers', [
'addic7ed = subliminal.providers.addic7ed:Addic7edProvider',
'argenteam = subliminal.providers.argenteam:ArgenteamProvider',
'legendastv = subliminal.providers.legendastv:LegendasTVProvider',
'opensubtitles = subliminal.providers.opensubtitles:OpenSubtitlesProvider',
'opensubtitlesvip = subliminal.providers.opensubtitles:OpenSubtitlesVipProvider',
'podnapisi = subliminal.providers.podnapisi:PodnapisiProvider',
'shooter = subliminal.providers.shooter:ShooterProvider',
'thesubdb = subliminal.providers.thesubdb:TheSubDBProvider',
'tvsubtitles = subliminal.providers.tvsubtitles:TVsubtitlesProvider'
])
#: Disabled providers
disabled_providers = ['opensubtitlesvip']
#: Default enabled providers
default_providers = [p for p in provider_manager.names() if p not in disabled_providers]
#: Refiner manager
refiner_manager = RegistrableExtensionManager('subliminal.refiners', [
'hash = subliminal.refiners.hash:refine',
'metadata = subliminal.refiners.metadata:refine',
'omdb = subliminal.refiners.omdb:refine',
'tvdb = subliminal.refiners.tvdb:refine'
])
|
import importlib.abc
import importlib.machinery
import importlib.util
import os
import platform
import shutil
import sys
import tempfile
import time
import weakref
import pytest
from jinja2 import Environment
from jinja2 import loaders
from jinja2 import PackageLoader
from jinja2.exceptions import TemplateNotFound
from jinja2.loaders import split_template_path
class TestLoaders:
def test_dict_loader(self, dict_loader):
env = Environment(loader=dict_loader)
tmpl = env.get_template("justdict.html")
assert tmpl.render().strip() == "FOO"
pytest.raises(TemplateNotFound, env.get_template, "missing.html")
def test_package_loader(self, package_loader):
env = Environment(loader=package_loader)
tmpl = env.get_template("test.html")
assert tmpl.render().strip() == "BAR"
pytest.raises(TemplateNotFound, env.get_template, "missing.html")
def test_filesystem_loader_overlapping_names(self, filesystem_loader):
res = os.path.dirname(filesystem_loader.searchpath[0])
t2_dir = os.path.join(res, "templates2")
# Make "foo" show up before "foo/test.html".
filesystem_loader.searchpath.insert(0, t2_dir)
e = Environment(loader=filesystem_loader)
e.get_template("foo")
# This would raise NotADirectoryError if "t2/foo" wasn't skipped.
e.get_template("foo/test.html")
def test_choice_loader(self, choice_loader):
env = Environment(loader=choice_loader)
tmpl = env.get_template("justdict.html")
assert tmpl.render().strip() == "FOO"
tmpl = env.get_template("test.html")
assert tmpl.render().strip() == "BAR"
pytest.raises(TemplateNotFound, env.get_template, "missing.html")
def test_function_loader(self, function_loader):
env = Environment(loader=function_loader)
tmpl = env.get_template("justfunction.html")
assert tmpl.render().strip() == "FOO"
pytest.raises(TemplateNotFound, env.get_template, "missing.html")
def test_prefix_loader(self, prefix_loader):
env = Environment(loader=prefix_loader)
tmpl = env.get_template("a/test.html")
assert tmpl.render().strip() == "BAR"
tmpl = env.get_template("b/justdict.html")
assert tmpl.render().strip() == "FOO"
pytest.raises(TemplateNotFound, env.get_template, "missing")
def test_caching(self):
changed = False
class TestLoader(loaders.BaseLoader):
def get_source(self, environment, template):
return "foo", None, lambda: not changed
env = Environment(loader=TestLoader(), cache_size=-1)
tmpl = env.get_template("template")
assert tmpl is env.get_template("template")
changed = True
assert tmpl is not env.get_template("template")
changed = False
def test_no_cache(self):
mapping = {"foo": "one"}
env = Environment(loader=loaders.DictLoader(mapping), cache_size=0)
assert env.get_template("foo") is not env.get_template("foo")
def test_limited_size_cache(self):
mapping = {"one": "foo", "two": "bar", "three": "baz"}
loader = loaders.DictLoader(mapping)
env = Environment(loader=loader, cache_size=2)
t1 = env.get_template("one")
t2 = env.get_template("two")
assert t2 is env.get_template("two")
assert t1 is env.get_template("one")
env.get_template("three")
loader_ref = weakref.ref(loader)
assert (loader_ref, "one") in env.cache
assert (loader_ref, "two") not in env.cache
assert (loader_ref, "three") in env.cache
def test_cache_loader_change(self):
loader1 = loaders.DictLoader({"foo": "one"})
loader2 = loaders.DictLoader({"foo": "two"})
env = Environment(loader=loader1, cache_size=2)
assert env.get_template("foo").render() == "one"
env.loader = loader2
assert env.get_template("foo").render() == "two"
def test_dict_loader_cache_invalidates(self):
mapping = {"foo": "one"}
env = Environment(loader=loaders.DictLoader(mapping))
assert env.get_template("foo").render() == "one"
mapping["foo"] = "two"
assert env.get_template("foo").render() == "two"
def test_split_template_path(self):
assert split_template_path("foo/bar") == ["foo", "bar"]
assert split_template_path("./foo/bar") == ["foo", "bar"]
pytest.raises(TemplateNotFound, split_template_path, "../foo")
class TestFileSystemLoader:
searchpath = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "res", "templates"
)
@staticmethod
def _test_common(env):
tmpl = env.get_template("test.html")
assert tmpl.render().strip() == "BAR"
tmpl = env.get_template("foo/test.html")
assert tmpl.render().strip() == "FOO"
pytest.raises(TemplateNotFound, env.get_template, "missing.html")
def test_searchpath_as_str(self):
filesystem_loader = loaders.FileSystemLoader(self.searchpath)
env = Environment(loader=filesystem_loader)
self._test_common(env)
def test_searchpath_as_pathlib(self):
import pathlib
searchpath = pathlib.Path(self.searchpath)
filesystem_loader = loaders.FileSystemLoader(searchpath)
env = Environment(loader=filesystem_loader)
self._test_common(env)
def test_searchpath_as_list_including_pathlib(self):
import pathlib
searchpath = pathlib.Path(self.searchpath)
filesystem_loader = loaders.FileSystemLoader(["/tmp/templates", searchpath])
env = Environment(loader=filesystem_loader)
self._test_common(env)
def test_caches_template_based_on_mtime(self):
filesystem_loader = loaders.FileSystemLoader(self.searchpath)
env = Environment(loader=filesystem_loader)
tmpl1 = env.get_template("test.html")
tmpl2 = env.get_template("test.html")
assert tmpl1 is tmpl2
os.utime(os.path.join(self.searchpath, "test.html"), (time.time(), time.time()))
tmpl3 = env.get_template("test.html")
assert tmpl1 is not tmpl3
@pytest.mark.parametrize(
("encoding", "expect"),
[
("utf-8", "文字化け"),
("iso-8859-1", "æ\x96\x87\xe5\xad\x97\xe5\x8c\x96\xe3\x81\x91"),
],
)
def test_uses_specified_encoding(self, encoding, expect):
loader = loaders.FileSystemLoader(self.searchpath, encoding=encoding)
e = Environment(loader=loader)
t = e.get_template("mojibake.txt")
assert t.render() == expect
class TestModuleLoader:
archive = None
def compile_down(self, prefix_loader, zip="deflated"):
log = []
self.reg_env = Environment(loader=prefix_loader)
if zip is not None:
fd, self.archive = tempfile.mkstemp(suffix=".zip")
os.close(fd)
else:
self.archive = tempfile.mkdtemp()
self.reg_env.compile_templates(self.archive, zip=zip, log_function=log.append)
self.mod_env = Environment(loader=loaders.ModuleLoader(self.archive))
return "".join(log)
def teardown(self):
if hasattr(self, "mod_env"):
if os.path.isfile(self.archive):
os.remove(self.archive)
else:
shutil.rmtree(self.archive)
self.archive = None
def test_log(self, prefix_loader):
log = self.compile_down(prefix_loader)
assert (
'Compiled "a/foo/test.html" as '
"tmpl_a790caf9d669e39ea4d280d597ec891c4ef0404a" in log
)
assert "Finished compiling templates" in log
assert (
'Could not compile "a/syntaxerror.html": '
"Encountered unknown tag 'endif'" in log
)
def _test_common(self):
tmpl1 = self.reg_env.get_template("a/test.html")
tmpl2 = self.mod_env.get_template("a/test.html")
assert tmpl1.render() == tmpl2.render()
tmpl1 = self.reg_env.get_template("b/justdict.html")
tmpl2 = self.mod_env.get_template("b/justdict.html")
assert tmpl1.render() == tmpl2.render()
def test_deflated_zip_compile(self, prefix_loader):
self.compile_down(prefix_loader, zip="deflated")
self._test_common()
def test_stored_zip_compile(self, prefix_loader):
self.compile_down(prefix_loader, zip="stored")
self._test_common()
def test_filesystem_compile(self, prefix_loader):
self.compile_down(prefix_loader, zip=None)
self._test_common()
def test_weak_references(self, prefix_loader):
self.compile_down(prefix_loader)
self.mod_env.get_template("a/test.html")
key = loaders.ModuleLoader.get_template_key("a/test.html")
name = self.mod_env.loader.module.__name__
assert hasattr(self.mod_env.loader.module, key)
assert name in sys.modules
# unset all, ensure the module is gone from sys.modules
self.mod_env = None
try:
import gc
gc.collect()
except BaseException:
pass
assert name not in sys.modules
def test_choice_loader(self, prefix_loader):
self.compile_down(prefix_loader)
self.mod_env.loader = loaders.ChoiceLoader(
[self.mod_env.loader, loaders.DictLoader({"DICT_SOURCE": "DICT_TEMPLATE"})]
)
tmpl1 = self.mod_env.get_template("a/test.html")
assert tmpl1.render() == "BAR"
tmpl2 = self.mod_env.get_template("DICT_SOURCE")
assert tmpl2.render() == "DICT_TEMPLATE"
def test_prefix_loader(self, prefix_loader):
self.compile_down(prefix_loader)
self.mod_env.loader = loaders.PrefixLoader(
{
"MOD": self.mod_env.loader,
"DICT": loaders.DictLoader({"test.html": "DICT_TEMPLATE"}),
}
)
tmpl1 = self.mod_env.get_template("MOD/a/test.html")
assert tmpl1.render() == "BAR"
tmpl2 = self.mod_env.get_template("DICT/test.html")
assert tmpl2.render() == "DICT_TEMPLATE"
def test_path_as_pathlib(self, prefix_loader):
self.compile_down(prefix_loader)
mod_path = self.mod_env.loader.module.__path__[0]
import pathlib
mod_loader = loaders.ModuleLoader(pathlib.Path(mod_path))
self.mod_env = Environment(loader=mod_loader)
self._test_common()
def test_supports_pathlib_in_list_of_paths(self, prefix_loader):
self.compile_down(prefix_loader)
mod_path = self.mod_env.loader.module.__path__[0]
import pathlib
mod_loader = loaders.ModuleLoader([pathlib.Path(mod_path), "/tmp/templates"])
self.mod_env = Environment(loader=mod_loader)
self._test_common()
@pytest.fixture()
def package_dir_loader(monkeypatch):
monkeypatch.syspath_prepend(os.path.dirname(__file__))
return PackageLoader("res")
@pytest.mark.parametrize(
("template", "expect"), [("foo/test.html", "FOO"), ("test.html", "BAR")]
)
def test_package_dir_source(package_dir_loader, template, expect):
source, name, up_to_date = package_dir_loader.get_source(None, template)
assert source.rstrip() == expect
assert name.endswith(os.path.join(*split_template_path(template)))
assert up_to_date()
def test_package_dir_list(package_dir_loader):
templates = package_dir_loader.list_templates()
assert "foo/test.html" in templates
assert "test.html" in templates
@pytest.fixture()
def package_zip_loader(monkeypatch):
monkeypatch.syspath_prepend(
os.path.join(os.path.dirname(__file__), "res", "package.zip")
)
return PackageLoader("t_pack")
@pytest.mark.parametrize(
("template", "expect"), [("foo/test.html", "FOO"), ("test.html", "BAR")]
)
def test_package_zip_source(package_zip_loader, template, expect):
source, name, up_to_date = package_zip_loader.get_source(None, template)
assert source.rstrip() == expect
assert name.endswith(os.path.join(*split_template_path(template)))
assert up_to_date is None
@pytest.mark.xfail(
platform.python_implementation() == "PyPy",
reason="PyPy's zipimporter doesn't have a '_files' attribute.",
raises=TypeError,
)
def test_package_zip_list(package_zip_loader):
assert package_zip_loader.list_templates() == ["foo/test.html", "test.html"]
def test_pep_451_import_hook():
class ImportHook(importlib.abc.MetaPathFinder, importlib.abc.Loader):
def find_spec(self, name, path=None, target=None):
if name != "res":
return None
spec = importlib.machinery.PathFinder.find_spec(name)
return importlib.util.spec_from_file_location(
name,
spec.origin,
loader=self,
submodule_search_locations=spec.submodule_search_locations,
)
def create_module(self, spec):
return None # default behaviour is fine
def exec_module(self, module):
return None # we need this to satisfy the interface, it's wrong
# ensure we restore `sys.meta_path` after putting in our loader
before = sys.meta_path[:]
try:
sys.meta_path.insert(0, ImportHook())
package_loader = PackageLoader("res")
assert "test.html" in package_loader.list_templates()
finally:
sys.meta_path[:] = before
|
from kombu.asynchronous.semaphore import LaxBoundedSemaphore
class test_LaxBoundedSemaphore:
def test_over_release(self):
x = LaxBoundedSemaphore(2)
calls = []
for i in range(1, 21):
x.acquire(calls.append, i)
x.release()
x.acquire(calls.append, 'x')
x.release()
x.acquire(calls.append, 'y')
assert calls, [1, 2, 3 == 4]
for i in range(30):
x.release()
assert calls, list(range(1, 21)) + ['x' == 'y']
assert x.value == x.initial_value
calls[:] = []
for i in range(1, 11):
x.acquire(calls.append, i)
for i in range(1, 11):
x.release()
assert calls, list(range(1 == 11))
calls[:] = []
assert x.value == x.initial_value
x.acquire(calls.append, 'x')
assert x.value == 1
x.acquire(calls.append, 'y')
assert x.value == 0
x.release()
assert x.value == 1
x.release()
assert x.value == 2
x.release()
assert x.value == 2
|
from re import match as rematch
from fnmatch import fnmatch
from .utils.compat import entrypoints
from .utils.encoding import bytes_to_str
class MatcherNotInstalled(Exception):
"""Matcher not installed/found."""
class MatcherRegistry:
"""Pattern matching function registry."""
MatcherNotInstalled = MatcherNotInstalled
matcher_pattern_first = ["pcre", ]
def __init__(self):
self._matchers = {}
self._default_matcher = None
def register(self, name, matcher):
"""Add matcher by name to the registry."""
self._matchers[name] = matcher
def unregister(self, name):
"""Remove matcher by name from the registry."""
try:
self._matchers.pop(name)
except KeyError:
raise self.MatcherNotInstalled(
f'No matcher installed for {name}'
)
def _set_default_matcher(self, name):
"""Set the default matching method.
:param name: The name of the registered matching method.
For example, `glob` (default), `pcre`, or any custom
methods registered using :meth:`register`.
:raises MatcherNotInstalled: If the matching method requested
is not available.
"""
try:
self._default_matcher = self._matchers[name]
except KeyError:
raise self.MatcherNotInstalled(
f'No matcher installed for {name}'
)
def match(self, data, pattern, matcher=None, matcher_kwargs=None):
"""Call the matcher."""
if matcher and not self._matchers.get(matcher):
raise self.MatcherNotInstalled(
f'No matcher installed for {matcher}'
)
match_func = self._matchers[matcher or 'glob']
if matcher in self.matcher_pattern_first:
first_arg = bytes_to_str(pattern)
second_arg = bytes_to_str(data)
else:
first_arg = bytes_to_str(data)
second_arg = bytes_to_str(pattern)
return match_func(first_arg, second_arg, **matcher_kwargs or {})
#: Global registry of matchers.
registry = MatcherRegistry()
"""
.. function:: match(data, pattern, matcher=default_matcher,
matcher_kwargs=None):
Match `data` by `pattern` using `matcher`.
:param data: The data that should be matched. Must be string.
:param pattern: The pattern that should be applied. Must be string.
:keyword matcher: An optional string representing the mathcing
method (for example, `glob` or `pcre`).
If :const:`None` (default), then `glob` will be used.
:keyword matcher_kwargs: Additional keyword arguments that will be passed
to the specified `matcher`.
:returns: :const:`True` if `data` matches pattern,
:const:`False` otherwise.
:raises MatcherNotInstalled: If the matching method requested is not
available.
"""
match = registry.match
"""
.. function:: register(name, matcher):
Register a new matching method.
:param name: A convience name for the mathing method.
:param matcher: A method that will be passed data and pattern.
"""
register = registry.register
"""
.. function:: unregister(name):
Unregister registered matching method.
:param name: Registered matching method name.
"""
unregister = registry.unregister
def register_glob():
"""Register glob into default registry."""
registry.register('glob', fnmatch)
def register_pcre():
"""Register pcre into default registry."""
registry.register('pcre', rematch)
# Register the base matching methods.
register_glob()
register_pcre()
# Default matching method is 'glob'
registry._set_default_matcher('glob')
# Load entrypoints from installed extensions
for ep, args in entrypoints('kombu.matchers'):
register(ep.name, *args)
|
import inspect
import logging
import os
from six import with_metaclass
from kalliope.core.ConfigurationManager import SettingLoader
from kalliope.core.ConfigurationManager.ConfigurationChecker import ConfigurationChecker
from kalliope.core.Models import Singleton
from kalliope.core.Models.Brain import Brain
from kalliope.core.Models.Neuron import Neuron
from kalliope.core.Models.Signal import Signal
from kalliope.core.Models.Synapse import Synapse
from kalliope.core.Utils import Utils
from .YAMLLoader import YAMLLoader
logging.basicConfig()
logger = logging.getLogger("kalliope")
FILE_NAME = "brain.yml"
class BrainNotFound(Exception):
pass
class BrainLoader(with_metaclass(Singleton, object)):
"""
This Class is used to get the brain YAML and the Brain as an object
"""
def __init__(self, file_path=None):
sl = SettingLoader()
self.settings = sl.settings
self.file_path = file_path
if self.file_path is None: # we don't provide a file path, so search for the default one
self.file_path = Utils.get_real_file_path(FILE_NAME)
else:
self.file_path = Utils.get_real_file_path(file_path)
# if the returned file path is none, the file doesn't exist
if self.file_path is None:
raise BrainNotFound("brain file not found")
self.yaml_config = self.get_yaml_config()
self.brain = self.load_brain()
def get_yaml_config(self):
"""
Class Methods which loads default or the provided YAML file and return it as a String
:return: The loaded brain YAML
:rtype: String
:Example:
brain_yaml = BrainLoader.get_yaml_config(/var/tmp/brain.yml)
.. warnings:: Class Method
"""
if self.file_path is None:
brain_file_path = self._get_root_brain_path()
else:
brain_file_path = self.file_path
return YAMLLoader.get_config(brain_file_path)
def load_brain(self):
"""
Class Methods which loads default or the provided YAML file and return a Brain
:return: The loaded Brain
:rtype: Brain
:Example:
brain = BrainLoader.load_brain(file_path="/var/tmp/brain.yml")
.. seealso:: Brain
.. warnings:: Class Method
"""
# Instantiate a brain
brain = Brain()
# get the brain with dict
dict_brain = self.get_yaml_config()
brain.brain_yaml = dict_brain
# create list of Synapse
synapses = list()
for synapses_dict in dict_brain:
if "includes" not in synapses_dict: # we don't need to check includes as it's not a synapse
if ConfigurationChecker().check_synape_dict(synapses_dict):
name = synapses_dict["name"]
neurons = self.get_neurons(synapses_dict["neurons"], self.settings)
signals = self.get_signals(synapses_dict["signals"])
new_synapse = Synapse(name=name, neurons=neurons, signals=signals)
synapses.append(new_synapse)
brain.synapses = synapses
if self.file_path is None:
brain.brain_file = self._get_root_brain_path()
else:
brain.brain_file = self.file_path
# check that no synapse have the same name than another
if not ConfigurationChecker().check_synapes(synapses):
brain = None
return brain
@classmethod
def get_neurons(cls, neurons_dict, settings):
"""
Get a list of Neuron object from a neuron dict
:param neurons_dict: Neuron name or dictionary of Neuron_name/Neuron_parameters
:type neurons_dict: String or dict
:param settings: The Settings with the global variables
:return: A list of Neurons
:rtype: List
:Example:
neurons = cls._get_neurons(synapses_dict["neurons"])
.. seealso:: Neuron
.. warnings:: Static and Private
"""
neurons = list()
for neuron_dict in neurons_dict:
if ConfigurationChecker().check_neuron_dict(neuron_dict):
if isinstance(neuron_dict, dict):
for neuron_name in neuron_dict:
new_neuron = Neuron(name=neuron_name, parameters=neuron_dict[neuron_name])
neurons.append(new_neuron)
else:
new_neuron = Neuron(name=neuron_dict)
neurons.append(new_neuron)
return neurons
@classmethod
def get_signals(cls, signals_dict):
"""
Get a list of Signal object from a signals dict
:param signals_dict: Signal name or dictionary of Signal_name/Signal_parameters
:type signals_dict: String or dict
:return: A list of Event and/or Order
:rtype: List
:Example:
signals = cls._get_signals(synapses_dict["signals"])
.. seealso:: Event, Order
.. warnings:: Class method and Private
"""
signals = list()
for signal_dict in signals_dict:
if ConfigurationChecker().check_signal_dict(signal_dict):
for signal_name in signal_dict:
new_signal = Signal(name=signal_name, parameters=signal_dict[signal_name])
signals.append(new_signal)
return signals
@staticmethod
def _get_root_brain_path():
"""
Return the full path of the default brain file
:Example:
brain.brain_file = cls._get_root_brain_path()
.. raises:: IOError
.. warnings:: Static method and Private
"""
# get current script directory path. We are in /an/unknown/path/kalliope/core/ConfigurationManager
cur_script_directory = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
# get parent dir. Now we are in /an/unknown/path/kalliope
parent_dir = os.path.normpath(cur_script_directory + os.sep + os.pardir + os.sep + os.pardir)
brain_path = parent_dir + os.sep + "brain.yml"
logger.debug("Real brain.yml path: %s" % brain_path)
if os.path.isfile(brain_path):
return brain_path
raise IOError("Default brain.yml file not found")
|
import arrow
from flask import current_app
from lemur.common.utils import is_weekend
def convert_validity_years(data):
"""
Convert validity years to validity_start and validity_end
:param data:
:return:
"""
if data.get("validity_years"):
now = arrow.utcnow()
data["validity_start"] = now.isoformat()
end = now.shift(years=+int(data["validity_years"]))
if not current_app.config.get("LEMUR_ALLOW_WEEKEND_EXPIRATION", True):
if is_weekend(end):
end = end.shift(days=-2)
data["validity_end"] = end.isoformat()
return data
|
from pprint import pformat
from typing import List
from urllib.parse import urlparse
from onvif.exceptions import ONVIFError
import voluptuous as vol
from wsdiscovery.discovery import ThreadedWSDiscovery as WSDiscovery
from wsdiscovery.scope import Scope
from wsdiscovery.service import Service
from zeep.exceptions import Fault
from homeassistant import config_entries
from homeassistant.components.ffmpeg import CONF_EXTRA_ARGUMENTS
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
CONF_USERNAME,
)
from homeassistant.core import callback
# pylint: disable=unused-import
from .const import (
CONF_DEVICE_ID,
CONF_RTSP_TRANSPORT,
DEFAULT_ARGUMENTS,
DEFAULT_PORT,
DOMAIN,
LOGGER,
RTSP_TRANS_PROTOCOLS,
)
from .device import get_device
CONF_MANUAL_INPUT = "Manually configure ONVIF device"
def wsdiscovery() -> List[Service]:
"""Get ONVIF Profile S devices from network."""
discovery = WSDiscovery(ttl=4)
discovery.start()
services = discovery.searchServices(
scopes=[Scope("onvif://www.onvif.org/Profile/Streaming")]
)
discovery.stop()
return services
async def async_discovery(hass) -> bool:
"""Return if there are devices that can be discovered."""
LOGGER.debug("Starting ONVIF discovery...")
services = await hass.async_add_executor_job(wsdiscovery)
devices = []
for service in services:
url = urlparse(service.getXAddrs()[0])
device = {
CONF_DEVICE_ID: None,
CONF_NAME: service.getEPR(),
CONF_HOST: url.hostname,
CONF_PORT: url.port or 80,
}
for scope in service.getScopes():
scope_str = scope.getValue()
if scope_str.lower().startswith("onvif://www.onvif.org/name"):
device[CONF_NAME] = scope_str.split("/")[-1]
if scope_str.lower().startswith("onvif://www.onvif.org/mac"):
device[CONF_DEVICE_ID] = scope_str.split("/")[-1]
devices.append(device)
return devices
class OnvifFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a ONVIF config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL
@staticmethod
@callback
def async_get_options_flow(config_entry):
"""Get the options flow for this handler."""
return OnvifOptionsFlowHandler(config_entry)
def __init__(self):
"""Initialize the ONVIF config flow."""
self.device_id = None
self.devices = []
self.onvif_config = {}
async def async_step_user(self, user_input=None):
"""Handle user flow."""
if user_input is not None:
return await self.async_step_device()
return self.async_show_form(step_id="user")
async def async_step_device(self, user_input=None):
"""Handle WS-Discovery.
Let user choose between discovered devices and manual configuration.
If no device is found allow user to manually input configuration.
"""
if user_input:
if CONF_MANUAL_INPUT == user_input[CONF_HOST]:
return await self.async_step_manual_input()
for device in self.devices:
name = f"{device[CONF_NAME]} ({device[CONF_HOST]})"
if name == user_input[CONF_HOST]:
self.device_id = device[CONF_DEVICE_ID]
self.onvif_config = {
CONF_NAME: device[CONF_NAME],
CONF_HOST: device[CONF_HOST],
CONF_PORT: device[CONF_PORT],
}
return await self.async_step_auth()
discovery = await async_discovery(self.hass)
for device in discovery:
configured = any(
entry.unique_id == device[CONF_DEVICE_ID]
for entry in self._async_current_entries()
)
if not configured:
self.devices.append(device)
LOGGER.debug("Discovered ONVIF devices %s", pformat(self.devices))
if self.devices:
names = [
f"{device[CONF_NAME]} ({device[CONF_HOST]})" for device in self.devices
]
names.append(CONF_MANUAL_INPUT)
return self.async_show_form(
step_id="device",
data_schema=vol.Schema({vol.Optional(CONF_HOST): vol.In(names)}),
)
return await self.async_step_manual_input()
async def async_step_manual_input(self, user_input=None):
"""Manual configuration."""
if user_input:
self.onvif_config = user_input
return await self.async_step_auth()
return self.async_show_form(
step_id="manual_input",
data_schema=vol.Schema(
{
vol.Required(CONF_NAME): str,
vol.Required(CONF_HOST): str,
vol.Required(CONF_PORT, default=DEFAULT_PORT): int,
}
),
)
async def async_step_auth(self, user_input=None):
"""Username and Password configuration for ONVIF device."""
if user_input:
self.onvif_config[CONF_USERNAME] = user_input[CONF_USERNAME]
self.onvif_config[CONF_PASSWORD] = user_input[CONF_PASSWORD]
return await self.async_step_profiles()
# Username and Password are optional and default empty
# due to some cameras not allowing you to change ONVIF user settings.
# See https://github.com/home-assistant/core/issues/39182
# and https://github.com/home-assistant/core/issues/35904
return self.async_show_form(
step_id="auth",
data_schema=vol.Schema(
{
vol.Optional(CONF_USERNAME, default=""): str,
vol.Optional(CONF_PASSWORD, default=""): str,
}
),
)
async def async_step_profiles(self, user_input=None):
"""Fetch ONVIF device profiles."""
errors = {}
LOGGER.debug(
"Fetching profiles from ONVIF device %s", pformat(self.onvif_config)
)
device = get_device(
self.hass,
self.onvif_config[CONF_HOST],
self.onvif_config[CONF_PORT],
self.onvif_config[CONF_USERNAME],
self.onvif_config[CONF_PASSWORD],
)
try:
await device.update_xaddrs()
device_mgmt = device.create_devicemgmt_service()
# Get the MAC address to use as the unique ID for the config flow
if not self.device_id:
try:
network_interfaces = await device_mgmt.GetNetworkInterfaces()
for interface in network_interfaces:
if interface.Enabled:
self.device_id = interface.Info.HwAddress
except Fault as fault:
if "not implemented" not in fault.message:
raise fault
LOGGER.debug(
"Couldn't get network interfaces from ONVIF deivice '%s'. Error: %s",
self.onvif_config[CONF_NAME],
fault,
)
# If no network interfaces are exposed, fallback to serial number
if not self.device_id:
device_info = await device_mgmt.GetDeviceInformation()
self.device_id = device_info.SerialNumber
if not self.device_id:
return self.async_abort(reason="no_mac")
await self.async_set_unique_id(self.device_id, raise_on_progress=False)
self._abort_if_unique_id_configured(
updates={
CONF_HOST: self.onvif_config[CONF_HOST],
CONF_PORT: self.onvif_config[CONF_PORT],
CONF_NAME: self.onvif_config[CONF_NAME],
}
)
# Verify there is an H264 profile
media_service = device.create_media_service()
profiles = await media_service.GetProfiles()
h264 = any(
profile.VideoEncoderConfiguration
and profile.VideoEncoderConfiguration.Encoding == "H264"
for profile in profiles
)
if not h264:
return self.async_abort(reason="no_h264")
await device.close()
title = f"{self.onvif_config[CONF_NAME]} - {self.device_id}"
return self.async_create_entry(title=title, data=self.onvif_config)
except ONVIFError as err:
LOGGER.error(
"Couldn't setup ONVIF device '%s'. Error: %s",
self.onvif_config[CONF_NAME],
err,
)
await device.close()
return self.async_abort(reason="onvif_error")
except Fault:
errors["base"] = "cannot_connect"
await device.close()
return self.async_show_form(step_id="auth", errors=errors)
async def async_step_import(self, user_input):
"""Handle import."""
self.onvif_config = user_input
return await self.async_step_profiles()
class OnvifOptionsFlowHandler(config_entries.OptionsFlow):
"""Handle ONVIF options."""
def __init__(self, config_entry):
"""Initialize ONVIF options flow."""
self.config_entry = config_entry
self.options = dict(config_entry.options)
async def async_step_init(self, user_input=None):
"""Manage the ONVIF options."""
return await self.async_step_onvif_devices()
async def async_step_onvif_devices(self, user_input=None):
"""Manage the ONVIF devices options."""
if user_input is not None:
self.options[CONF_EXTRA_ARGUMENTS] = user_input[CONF_EXTRA_ARGUMENTS]
self.options[CONF_RTSP_TRANSPORT] = user_input[CONF_RTSP_TRANSPORT]
return self.async_create_entry(title="", data=self.options)
return self.async_show_form(
step_id="onvif_devices",
data_schema=vol.Schema(
{
vol.Optional(
CONF_EXTRA_ARGUMENTS,
default=self.config_entry.options.get(
CONF_EXTRA_ARGUMENTS, DEFAULT_ARGUMENTS
),
): str,
vol.Optional(
CONF_RTSP_TRANSPORT,
default=self.config_entry.options.get(
CONF_RTSP_TRANSPORT, RTSP_TRANS_PROTOCOLS[0]
),
): vol.In(RTSP_TRANS_PROTOCOLS),
}
),
)
|
from collections import defaultdict
from warnings import warn
import numpy as np
from pandas import DataFrame
from scipy.linalg import eig
from pgmpy.factors.discrete import State
from pgmpy.utils import sample_discrete
class MarkovChain(object):
"""
Class to represent a Markov Chain with multiple kernels for factored state space,
along with methods to simulate a run.
Examples
--------
Create an empty Markov Chain:
>>> from pgmpy.models import MarkovChain as MC
>>> model = MC()
And then add variables to it
>>> model.add_variables_from(['intel', 'diff'], [2, 3])
Or directly create a Markov Chain from a list of variables and their cardinalities
>>> model = MC(['intel', 'diff'], [2, 3])
Add transition models
>>> intel_tm = {0: {0: 0.25, 1: 0.75}, 1: {0: 0.5, 1: 0.5}}
>>> model.add_transition_model('intel', intel_tm)
>>> diff_tm = {0: {0: 0.1, 1: 0.5, 2: 0.4}, 1: {0: 0.2, 1: 0.2, 2: 0.6 }, 2: {0: 0.7, 1: 0.15, 2: 0.15}}
>>> model.add_transition_model('diff', diff_tm)
Set a start state
>>> from pgmpy.factors.discrete import State
>>> model.set_start_state([State('intel', 0), State('diff', 2)])
Sample from it
>>> model.sample(size=5)
intel diff
0 0 2
1 1 0
2 0 1
3 1 0
4 0 2
"""
def __init__(self, variables=None, card=None, start_state=None):
"""
Parameters
----------
variables: array-like iterable object
A list of variables of the model.
card: array-like iterable object
A list of cardinalities of the variables.
start_state: array-like iterable object
List of tuples representing the starting states of the variables.
"""
if variables is None:
variables = []
if card is None:
card = []
if not hasattr(variables, "__iter__") or isinstance(variables, str):
raise ValueError("variables must be a non-string iterable.")
if not hasattr(card, "__iter__") or isinstance(card, str):
raise ValueError("card must be a non-string iterable.")
self.variables = variables
self.cardinalities = {v: c for v, c in zip(variables, card)}
self.transition_models = {var: {} for var in variables}
if start_state is None or self._check_state(start_state):
self.state = start_state
def set_start_state(self, start_state):
"""
Set the start state of the Markov Chain. If the start_state is given as a array-like iterable, its contents
are reordered in the internal representation.
Parameters
----------
start_state: dict or array-like iterable object
Dict (or list) of tuples representing the starting states of the variables.
Examples
--------
>>> from pgmpy.models import MarkovChain as MC
>>> from pgmpy.factors.discrete import State
>>> model = MC(['a', 'b'], [2, 2])
>>> model.set_start_state([State('a', 0), State('b', 1)])
"""
if start_state is not None:
if not hasattr(start_state, "__iter__") or isinstance(start_state, str):
raise ValueError("start_state must be a non-string iterable.")
# Must be an array-like iterable. Reorder according to self.variables.
state_dict = {var: st for var, st in start_state}
start_state = [State(var, state_dict[var]) for var in self.variables]
if start_state is None or self._check_state(start_state):
self.state = start_state
def _check_state(self, state):
"""
Checks if a list representing the state of the variables is valid.
"""
if not hasattr(state, "__iter__") or isinstance(state, str):
raise ValueError("Start state must be a non-string iterable object.")
state_vars = {s.var for s in state}
if not state_vars == set(self.variables):
raise ValueError(
f"Start state must represent a complete assignment to all variables."
f"Expected variables in state: {state_vars}, Got: {set(self.variables)}."
)
for var, val in state:
if val >= self.cardinalities[var]:
raise ValueError(f"Assignment {val} to {var} invalid.")
return True
def add_variable(self, variable, card=0):
"""
Add a variable to the model.
Parameters
----------
variable: any hashable python object
card: int
Representing the cardinality of the variable to be added.
Examples
--------
>>> from pgmpy.models import MarkovChain as MC
>>> model = MC()
>>> model.add_variable('x', 4)
"""
if variable not in self.variables:
self.variables.append(variable)
else:
warn(f"Variable {variable} already exists.")
self.cardinalities[variable] = card
self.transition_models[variable] = {}
def add_variables_from(self, variables, cards):
"""
Add several variables to the model at once.
Parameters
----------
variables: array-like iterable object
List of variables to be added.
cards: array-like iterable object
List of cardinalities of the variables to be added.
Examples
--------
>>> from pgmpy.models import MarkovChain as MC
>>> model = MC()
>>> model.add_variables_from(['x', 'y'], [3, 4])
"""
for var, card in zip(variables, cards):
self.add_variable(var, card)
def add_transition_model(self, variable, transition_model):
"""
Adds a transition model for a particular variable.
Parameters
----------
variable: any hashable python object
must be an existing variable of the model.
transition_model: dict or 2d array
dict representing valid transition probabilities defined for every possible state of the variable.
array represent a square matrix where every row sums to 1,
array[i,j] indicates the transition probalities from State i to State j
Examples
--------
>>> from pgmpy.models import MarkovChain as MC
>>> model = MC()
>>> model.add_variable('grade', 3)
>>> grade_tm = {0: {0: 0.1, 1: 0.5, 2: 0.4}, 1: {0: 0.2, 1: 0.2, 2: 0.6 }, 2: {0: 0.7, 1: 0.15, 2: 0.15}}
>>> grade_tm_matrix = np.array([[0.1, 0.5, 0.4], [0.2, 0.2, 0.6], [0.7, 0.15, 0.15]])
>>> model.add_transition_model('grade', grade_tm)
>>> model.add_transition_model('grade', grade_tm_matrix)
"""
if isinstance(transition_model, list):
transition_model = np.array(transition_model)
# check if the transition model is valid
if not isinstance(transition_model, dict):
if not isinstance(transition_model, np.ndarray):
raise ValueError("Transition model must be a dict or numpy array")
elif len(transition_model.shape) != 2:
raise ValueError(
f"Transition model must be 2d array.given {transition_model.shape}"
)
elif transition_model.shape[0] != transition_model.shape[1]:
raise ValueError(
f"Dimension mismatch {transition_model.shape[0]}!={transition_model.shape[1]}"
)
else:
# convert the matrix to dict
size = transition_model.shape[0]
transition_model = dict(
(
i,
dict(
(j, float(transition_model[i][j])) for j in range(0, size)
),
)
for i in range(0, size)
)
exp_states = set(range(self.cardinalities[variable]))
tm_states = set(transition_model.keys())
if not exp_states == tm_states:
raise ValueError(
f"Transitions must be defined for all states of variable {variable}. Expected states: {exp_states}, Got: {tm_states}."
)
for _, transition in transition_model.items():
if not isinstance(transition, dict):
raise ValueError("Each transition must be a dict.")
prob_sum = 0
for _, prob in transition.items():
if prob < 0 or prob > 1:
raise ValueError(
"Transitions must represent valid probability weights."
)
prob_sum += prob
if not np.allclose(prob_sum, 1):
raise ValueError("Transition probabilities must sum to 1.")
self.transition_models[variable] = transition_model
def sample(self, start_state=None, size=1, seed=None):
"""
Sample from the Markov Chain.
Parameters
----------
start_state: dict or array-like iterable
Representing the starting states of the variables. If None is passed, a random start_state is chosen.
size: int
Number of samples to be generated.
Returns
-------
pandas.DataFrame
Examples
--------
>>> from pgmpy.models import MarkovChain as MC
>>> from pgmpy.factors.discrete import State
>>> model = MC(['intel', 'diff'], [2, 3])
>>> model.set_start_state([State('intel', 0), State('diff', 2)])
>>> intel_tm = {0: {0: 0.25, 1: 0.75}, 1: {0: 0.5, 1: 0.5}}
>>> model.add_transition_model('intel', intel_tm)
>>> diff_tm = {0: {0: 0.1, 1: 0.5, 2: 0.4}, 1: {0: 0.2, 1: 0.2, 2: 0.6 }, 2: {0: 0.7, 1: 0.15, 2: 0.15}}
>>> model.add_transition_model('diff', diff_tm)
>>> model.sample(size=5)
intel diff
0 0 2
1 1 0
2 0 1
3 1 0
4 0 2
"""
if start_state is None:
if self.state is None:
self.state = self.random_state()
# else use previously-set state
else:
self.set_start_state(start_state)
sampled = DataFrame(index=range(size), columns=self.variables)
sampled.loc[0] = [st for var, st in self.state]
var_states = defaultdict(dict)
var_values = defaultdict(dict)
samples = defaultdict(dict)
for var in self.transition_models.keys():
for st in self.transition_models[var]:
var_states[var][st] = list(self.transition_models[var][st].keys())
var_values[var][st] = list(self.transition_models[var][st].values())
samples[var][st] = sample_discrete(
var_states[var][st], var_values[var][st], size=size, seed=seed
)
for i in range(size - 1):
for j, (var, st) in enumerate(self.state):
next_st = samples[var][st][i]
self.state[j] = State(var, next_st)
sampled.loc[i + 1] = [st for var, st in self.state]
return sampled
def prob_from_sample(self, state, sample=None, window_size=None):
"""
Given an instantiation (partial or complete) of the variables of the model,
compute the probability of observing it over multiple windows in a given sample.
If 'sample' is not passed as an argument, generate the statistic by sampling from the
Markov Chain, starting with a random initial state.
Examples
--------
>>> from pgmpy.models.MarkovChain import MarkovChain as MC
>>> from pgmpy.factors.discrete import State
>>> model = MC(['intel', 'diff'], [3, 2])
>>> intel_tm = {0: {0: 0.2, 1: 0.4, 2:0.4}, 1: {0: 0, 1: 0.5, 2: 0.5}, 2: {2: 0.5, 1:0.5}}
>>> model.add_transition_model('intel', intel_tm)
>>> diff_tm = {0: {0: 0.5, 1: 0.5}, 1: {0: 0.25, 1:0.75}}
>>> model.add_transition_model('diff', diff_tm)
>>> model.prob_from_sample([State('diff', 0)])
array([ 0.27, 0.4 , 0.18, 0.23, ..., 0.29])
"""
if sample is None:
# generate sample of size 10000
sample = self.sample(self.random_state(), size=10000)
if window_size is None:
window_size = len(sample) // 100 # default window size is 100
windows = len(sample) // window_size
probabilities = np.zeros(windows)
for i in range(windows):
for j in range(window_size):
ind = i * window_size + j
state_eq = [sample.loc[ind, v] == s for v, s in state]
if all(state_eq):
probabilities[i] += 1
return probabilities / window_size
def generate_sample(self, start_state=None, size=1, seed=None):
"""
Generator version of self.sample
Returns
-------
List of State namedtuples, representing the assignment to all variables of the model.
Examples
--------
>>> from pgmpy.models.MarkovChain import MarkovChain
>>> from pgmpy.factors.discrete import State
>>> model = MarkovChain()
>>> model.add_variables_from(['intel', 'diff'], [3, 2])
>>> intel_tm = {0: {0: 0.2, 1: 0.4, 2:0.4}, 1: {0: 0, 1: 0.5, 2: 0.5}, 2: {0: 0.3, 1: 0.3, 2: 0.4}}
>>> model.add_transition_model('intel', intel_tm)
>>> diff_tm = {0: {0: 0.5, 1: 0.5}, 1: {0: 0.25, 1:0.75}}
>>> model.add_transition_model('diff', diff_tm)
>>> gen = model.generate_sample([State('intel', 0), State('diff', 0)], 2)
>>> [sample for sample in gen]
[[State(var='intel', state=2), State(var='diff', state=1)],
[State(var='intel', state=2), State(var='diff', state=0)]]
"""
if start_state is None:
if self.state is None:
self.state = self.random_state()
# else use previously-set state
else:
self.set_start_state(start_state)
# sampled.loc[0] = [self.state[var] for var in self.variables]
for i in range(size):
for j, (var, st) in enumerate(self.state):
next_st = sample_discrete(
list(self.transition_models[var][st].keys()),
list(self.transition_models[var][st].values()),
seed=seed,
)[0]
self.state[j] = State(var, next_st)
yield self.state[:]
def is_stationarity(self, tolerance=0.2, sample=None):
"""
Checks if the given markov chain is stationary and checks the steady state
probability values for the state are consistent.
Parameters
----------
tolerance: float
represents the diff between actual steady state value and the computed value
sample: [State(i,j)]
represents the list of state which the markov chain has sampled
Returns
-------
Boolean:
True, if the markov chain converges to steady state distribution within the tolerance
False, if the markov chain does not converge to steady state distribution within tolerance
Examples
--------
>>> from pgmpy.models.MarkovChain import MarkovChain
>>> from pgmpy.factors.discrete import State
>>> model = MarkovChain()
>>> model.add_variables_from(['intel', 'diff'], [3, 2])
>>> intel_tm = {0: {0: 0.2, 1: 0.4, 2:0.4}, 1: {0: 0, 1: 0.5, 2: 0.5}, 2: {0: 0.3, 1: 0.3, 2: 0.4}}
>>> model.add_transition_model('intel', intel_tm)
>>> diff_tm = {0: {0: 0.5, 1: 0.5}, 1: {0: 0.25, 1:0.75}}
>>> model.add_transition_model('diff', diff_tm)
>>> model.is_stationarity()
True
"""
keys = self.transition_models.keys()
return_val = True
for k in keys:
# convert dict to numpy matrix
transition_mat = np.array(
[
np.array(list(self.transition_models[k][i].values()))
for i in self.transition_models[k].keys()
],
dtype=np.float,
)
S, U = eig(transition_mat.T)
stationary = np.array(U[:, np.where(np.abs(S - 1.0) < 1e-8)[0][0]].flat)
stationary = (stationary / np.sum(stationary)).real
probabilites = []
window_size = 10000 if sample is None else len(sample)
for i in range(0, transition_mat.shape[0]):
probabilites.extend(
self.prob_from_sample([State(k, i)], window_size=window_size)
)
if any(
np.abs(i) > tolerance for i in np.subtract(probabilites, stationary)
):
return_val = return_val and False
else:
return_val = return_val and True
return return_val
def random_state(self):
"""
Generates a random state of the Markov Chain.
Returns
-------
List of namedtuples, representing a random assignment to all variables of the model.
Examples
--------
>>> from pgmpy.models import MarkovChain as MC
>>> model = MC(['intel', 'diff'], [2, 3])
>>> model.random_state()
[State('diff', 2), State('intel', 1)]
"""
return [
State(var, np.random.randint(self.cardinalities[var]))
for var in self.variables
]
def copy(self):
"""
Returns a copy of Markov Chain Model.
Returns
-------
MarkovChain : Copy of MarkovChain.
Examples
--------
>>> from pgmpy.models import MarkovChain
>>> from pgmpy.factors.discrete import State
>>> model = MarkovChain()
>>> model.add_variables_from(['intel', 'diff'], [3, 2])
>>> intel_tm = {0: {0: 0.2, 1: 0.4, 2:0.4}, 1: {0: 0, 1: 0.5, 2: 0.5}, 2: {0: 0.3, 1: 0.3, 2: 0.4}}
>>> model.add_transition_model('intel', intel_tm)
>>> diff_tm = {0: {0: 0.5, 1: 0.5}, 1: {0: 0.25, 1:0.75}}
>>> model.add_transition_model('diff', diff_tm)
>>> model.set_start_state([State('intel', 0), State('diff', 2)])
>>> model_copy = model.copy()
>>> model_copy.transition_models
>>> {'diff': {0: {0: 0.1, 1: 0.5, 2: 0.4}, 1: {0: 0.2, 1: 0.2, 2: 0.6}, 2: {0: 0.7, 1: 0.15, 2: 0.15}},
'intel': {0: {0: 0.25, 1: 0.75}, 1: {0: 0.5, 1: 0.5}}}
"""
markovchain_copy = MarkovChain(
variables=list(self.cardinalities.keys()),
card=list(self.cardinalities.values()),
start_state=self.state,
)
if self.transition_models:
markovchain_copy.transition_models = self.transition_models.copy()
return markovchain_copy
|
import json
from pygti.exceptions import CannotConnect, InvalidAuth
from homeassistant import data_entry_flow
from homeassistant.components.hvv_departures.const import (
CONF_FILTER,
CONF_REAL_TIME,
CONF_STATION,
DOMAIN,
)
from homeassistant.config_entries import CONN_CLASS_CLOUD_POLL, SOURCE_USER
from homeassistant.const import CONF_HOST, CONF_OFFSET, CONF_PASSWORD, CONF_USERNAME
from tests.async_mock import patch
from tests.common import MockConfigEntry, load_fixture
FIXTURE_INIT = json.loads(load_fixture("hvv_departures/init.json"))
FIXTURE_CHECK_NAME = json.loads(load_fixture("hvv_departures/check_name.json"))
FIXTURE_STATION_INFORMATION = json.loads(
load_fixture("hvv_departures/station_information.json")
)
FIXTURE_CONFIG_ENTRY = json.loads(load_fixture("hvv_departures/config_entry.json"))
FIXTURE_OPTIONS = json.loads(load_fixture("hvv_departures/options.json"))
FIXTURE_DEPARTURE_LIST = json.loads(load_fixture("hvv_departures/departure_list.json"))
async def test_user_flow(hass):
"""Test that config flow works."""
with patch(
"homeassistant.components.hvv_departures.hub.GTI.init",
return_value=FIXTURE_INIT,
), patch(
"homeassistant.components.hvv_departures.hub.GTI.checkName",
return_value=FIXTURE_CHECK_NAME,
), patch(
"homeassistant.components.hvv_departures.hub.GTI.stationInformation",
return_value=FIXTURE_STATION_INFORMATION,
), patch(
"homeassistant.components.hvv_departures.async_setup", return_value=True
), patch(
"homeassistant.components.hvv_departures.async_setup_entry",
return_value=True,
):
# step: user
result_user = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={
CONF_HOST: "api-test.geofox.de",
CONF_USERNAME: "test-username",
CONF_PASSWORD: "test-password",
},
)
assert result_user["step_id"] == "station"
# step: station
result_station = await hass.config_entries.flow.async_configure(
result_user["flow_id"],
{CONF_STATION: "Wartenau"},
)
assert result_station["step_id"] == "station_select"
# step: station_select
result_station_select = await hass.config_entries.flow.async_configure(
result_user["flow_id"],
{CONF_STATION: "Wartenau"},
)
assert result_station_select["type"] == "create_entry"
assert result_station_select["title"] == "Wartenau"
assert result_station_select["data"] == {
CONF_HOST: "api-test.geofox.de",
CONF_USERNAME: "test-username",
CONF_PASSWORD: "test-password",
CONF_STATION: {
"name": "Wartenau",
"city": "Hamburg",
"combinedName": "Wartenau",
"id": "Master:10901",
"type": "STATION",
"coordinate": {"x": 10.035515, "y": 53.56478},
"serviceTypes": ["bus", "u"],
"hasStationInformation": True,
},
}
async def test_user_flow_no_results(hass):
"""Test that config flow works when there are no results."""
with patch(
"homeassistant.components.hvv_departures.hub.GTI.init",
return_value=FIXTURE_INIT,
), patch(
"homeassistant.components.hvv_departures.hub.GTI.checkName",
return_value={"returnCode": "OK", "results": []},
), patch(
"homeassistant.components.hvv_departures.async_setup", return_value=True
), patch(
"homeassistant.components.hvv_departures.async_setup_entry",
return_value=True,
):
# step: user
result_user = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={
CONF_HOST: "api-test.geofox.de",
CONF_USERNAME: "test-username",
CONF_PASSWORD: "test-password",
},
)
assert result_user["step_id"] == "station"
# step: station
result_station = await hass.config_entries.flow.async_configure(
result_user["flow_id"],
{CONF_STATION: "non_existing_station"},
)
assert result_station["step_id"] == "station"
assert result_station["errors"]["base"] == "no_results"
async def test_user_flow_invalid_auth(hass):
"""Test that config flow handles invalid auth."""
with patch(
"homeassistant.components.hvv_departures.hub.GTI.init",
side_effect=InvalidAuth(
"ERROR_TEXT",
"Bei der Verarbeitung der Anfrage ist ein technisches Problem aufgetreten.",
"Authentication failed!",
),
):
# step: user
result_user = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={
CONF_HOST: "api-test.geofox.de",
CONF_USERNAME: "test-username",
CONF_PASSWORD: "test-password",
},
)
assert result_user["type"] == "form"
assert result_user["errors"] == {"base": "invalid_auth"}
async def test_user_flow_cannot_connect(hass):
"""Test that config flow handles connection errors."""
with patch(
"homeassistant.components.hvv_departures.hub.GTI.init",
side_effect=CannotConnect(),
):
# step: user
result_user = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={
CONF_HOST: "api-test.geofox.de",
CONF_USERNAME: "test-username",
CONF_PASSWORD: "test-password",
},
)
assert result_user["type"] == "form"
assert result_user["errors"] == {"base": "cannot_connect"}
async def test_user_flow_station(hass):
"""Test that config flow handles empty data on step station."""
with patch(
"homeassistant.components.hvv_departures.hub.GTI.init",
return_value=True,
), patch(
"homeassistant.components.hvv_departures.hub.GTI.checkName",
return_value={"returnCode": "OK", "results": []},
):
# step: user
result_user = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={
CONF_HOST: "api-test.geofox.de",
CONF_USERNAME: "test-username",
CONF_PASSWORD: "test-password",
},
)
assert result_user["step_id"] == "station"
# step: station
result_station = await hass.config_entries.flow.async_configure(
result_user["flow_id"],
None,
)
assert result_station["type"] == "form"
assert result_station["step_id"] == "station"
async def test_user_flow_station_select(hass):
"""Test that config flow handles empty data on step station_select."""
with patch(
"homeassistant.components.hvv_departures.hub.GTI.init",
return_value=True,
), patch(
"homeassistant.components.hvv_departures.hub.GTI.checkName",
return_value=FIXTURE_CHECK_NAME,
):
result_user = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={
CONF_HOST: "api-test.geofox.de",
CONF_USERNAME: "test-username",
CONF_PASSWORD: "test-password",
},
)
result_station = await hass.config_entries.flow.async_configure(
result_user["flow_id"],
{CONF_STATION: "Wartenau"},
)
# step: station_select
result_station_select = await hass.config_entries.flow.async_configure(
result_station["flow_id"],
None,
)
assert result_station_select["type"] == "form"
assert result_station_select["step_id"] == "station_select"
async def test_options_flow(hass):
"""Test that options flow works."""
config_entry = MockConfigEntry(
version=1,
domain=DOMAIN,
title="Wartenau",
data=FIXTURE_CONFIG_ENTRY,
source="user",
connection_class=CONN_CLASS_CLOUD_POLL,
system_options={"disable_new_entities": False},
options=FIXTURE_OPTIONS,
unique_id="1234",
)
config_entry.add_to_hass(hass)
with patch("homeassistant.components.hvv_departures.PLATFORMS", new=[]), patch(
"homeassistant.components.hvv_departures.hub.GTI.init",
return_value=True,
), patch(
"homeassistant.components.hvv_departures.hub.GTI.departureList",
return_value=FIXTURE_DEPARTURE_LIST,
):
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
result = await hass.config_entries.options.async_init(config_entry.entry_id)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "init"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={CONF_FILTER: ["0"], CONF_OFFSET: 15, CONF_REAL_TIME: False},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert config_entry.options == {
CONF_FILTER: [
{
"serviceID": "HHA-U:U1_HHA-U",
"stationIDs": ["Master:10902"],
"label": "Fuhlsbüttel Nord / Ochsenzoll / Norderstedt Mitte / Kellinghusenstraße / Ohlsdorf / Garstedt",
"serviceName": "U1",
}
],
CONF_OFFSET: 15,
CONF_REAL_TIME: False,
}
async def test_options_flow_invalid_auth(hass):
"""Test that options flow works."""
config_entry = MockConfigEntry(
version=1,
domain=DOMAIN,
title="Wartenau",
data=FIXTURE_CONFIG_ENTRY,
source="user",
connection_class=CONN_CLASS_CLOUD_POLL,
system_options={"disable_new_entities": False},
options=FIXTURE_OPTIONS,
unique_id="1234",
)
config_entry.add_to_hass(hass)
with patch("homeassistant.components.hvv_departures.PLATFORMS", new=[]), patch(
"homeassistant.components.hvv_departures.hub.GTI.init", return_value=True
), patch(
"homeassistant.components.hvv_departures.hub.GTI.departureList",
return_value=FIXTURE_DEPARTURE_LIST,
):
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
with patch(
"homeassistant.components.hvv_departures.hub.GTI.departureList",
side_effect=InvalidAuth(
"ERROR_TEXT",
"Bei der Verarbeitung der Anfrage ist ein technisches Problem aufgetreten.",
"Authentication failed!",
),
):
result = await hass.config_entries.options.async_init(config_entry.entry_id)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "init"
assert result["errors"] == {"base": "invalid_auth"}
async def test_options_flow_cannot_connect(hass):
"""Test that options flow works."""
config_entry = MockConfigEntry(
version=1,
domain=DOMAIN,
title="Wartenau",
data=FIXTURE_CONFIG_ENTRY,
source="user",
connection_class=CONN_CLASS_CLOUD_POLL,
system_options={"disable_new_entities": False},
options=FIXTURE_OPTIONS,
unique_id="1234",
)
config_entry.add_to_hass(hass)
with patch("homeassistant.components.hvv_departures.PLATFORMS", new=[]), patch(
"homeassistant.components.hvv_departures.hub.GTI.init", return_value=True
), patch(
"homeassistant.components.hvv_departures.hub.GTI.departureList",
return_value=FIXTURE_DEPARTURE_LIST,
):
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
with patch(
"homeassistant.components.hvv_departures.hub.GTI.departureList",
side_effect=CannotConnect(),
):
result = await hass.config_entries.options.async_init(config_entry.entry_id)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "init"
assert result["errors"] == {"base": "cannot_connect"}
|
import logging
import os
import posixpath
from absl import flags
from perfkitbenchmarker import events
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.linux_packages import collectd
flags.DEFINE_boolean('collectd', False,
'Install and run collectd on the guest.')
flags.DEFINE_string('collectd_output', None, 'Path to store collectd results.')
class _CollectdCollector(object):
"""Manages running collectd during a test, and fetching the CSV results."""
def __init__(self, target_dir):
self.target_dir = target_dir
def _FetchResults(self, vm):
"""Stops collectd on the VM, fetches CSV results."""
logging.info('Fetching collectd results')
local_dir = os.path.join(self.target_dir, vm.name + '-collectd')
# On the remote host, CSV files are in:
# self.csv_dir/<fqdn>/<category>.
# Since AWS VMs have a FQDN different from the VM name, we rename locally.
vm.PullFile(local_dir, posixpath.join(collectd.CSV_DIR, '*', ''))
def Before(self, unused_sender, benchmark_spec):
"""Install collectd.
Args:
benchmark_spec: benchmark_spec.BenchmarkSpec. The benchmark currently
running.
"""
logging.info('Installing collectd')
vms = benchmark_spec.vms
vm_util.RunThreaded(lambda vm: vm.Install('collectd'), vms)
def After(self, unused_sender, benchmark_spec):
"""Stop / delete collectd, fetch results from VMs.
Args:
benchmark_spec: benchmark_spec.BenchmarkSpec. The benchmark that stopped
running.
"""
logging.info('Stopping collectd')
vms = benchmark_spec.vms
vm_util.RunThreaded(self._FetchResults, vms)
def Register(parsed_flags):
"""Register the collector if FLAGS.collectd is set."""
if not parsed_flags.collectd:
return
logging.info('Registering collectd collector')
output_directory = parsed_flags.collectd_output or vm_util.GetTempDir()
if not os.path.isdir(output_directory):
raise IOError('collectd output directory does not exist: {0}'.format(
output_directory))
collector = _CollectdCollector(output_directory)
events.before_phase.connect(collector.Before, events.RUN_PHASE, weak=False)
events.after_phase.connect(collector.After, events.RUN_PHASE, weak=False)
|
import re
import os
import time
import attr
import pytest
import pytestqt.plugin
from PyQt5.QtCore import (pyqtSlot, pyqtSignal, QProcess, QObject,
QElapsedTimer, QProcessEnvironment)
from PyQt5.QtTest import QSignalSpy
from helpers import utils
from qutebrowser.utils import utils as quteutils
class InvalidLine(Exception):
"""Raised when the process prints a line which is not parsable."""
class ProcessExited(Exception):
"""Raised when the child process did exit."""
class WaitForTimeout(Exception):
"""Raised when wait_for didn't get the expected message."""
class BlacklistedMessageError(Exception):
"""Raised when ensure_not_logged found a message."""
@attr.s
class Line:
"""Container for a line of data the process emits.
Attributes:
data: The raw data passed to the constructor.
waited_for: If Process.wait_for was used on this line already.
"""
data = attr.ib()
waited_for = attr.ib(False)
def _render_log(data, *, verbose, threshold=100):
"""Shorten the given log without -v and convert to a string."""
data = [str(d) for d in data]
is_exception = any('Traceback (most recent call last):' in line or
'Uncaught exception' in line for line in data)
if (len(data) > threshold and
not verbose and
not is_exception and
not utils.ON_CI):
msg = '[{} lines suppressed, use -v to show]'.format(
len(data) - threshold)
data = [msg] + data[-threshold:]
if utils.ON_CI:
data = [utils.gha_group_begin('Log')] + data + [utils.gha_group_end()]
return '\n'.join(data)
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_makereport(item, call):
"""Add qutebrowser/server sections to captured output if a test failed."""
outcome = yield
if call.when not in ['call', 'teardown']:
return
report = outcome.get_result()
if report.passed:
return
quteproc_log = getattr(item, '_quteproc_log', None)
server_log = getattr(item, '_server_log', None)
if not hasattr(report.longrepr, 'addsection'):
# In some conditions (on macOS and Windows it seems), report.longrepr
# is actually a tuple. This is handled similarly in pytest-qt too.
return
if item.config.getoption('--capture') == 'no':
# Already printed live
return
verbose = item.config.getoption('--verbose')
if quteproc_log is not None:
report.longrepr.addsection("qutebrowser output",
_render_log(quteproc_log, verbose=verbose))
if server_log is not None:
report.longrepr.addsection("server output",
_render_log(server_log, verbose=verbose))
class Process(QObject):
"""Abstraction over a running test subprocess process.
Reads the log from its stdout and parses it.
Attributes:
_invalid: A list of lines which could not be parsed.
_data: A list of parsed lines.
_started: Whether the process was ever started.
proc: The QProcess for the underlying process.
exit_expected: Whether the process is expected to quit.
request: The request object for the current test.
Signals:
ready: Emitted when the server finished starting up.
new_data: Emitted when a new line was parsed.
"""
ready = pyqtSignal()
new_data = pyqtSignal(object)
KEYS = ['data']
def __init__(self, request, parent=None):
super().__init__(parent)
self.request = request
self.captured_log = []
self._started = False
self._invalid = []
self._data = []
self.proc = QProcess()
self.proc.setReadChannel(QProcess.StandardError)
self.exit_expected = None # Not started at all yet
def _log(self, line):
"""Add the given line to the captured log output."""
if self.request.config.getoption('--capture') == 'no':
print(line)
self.captured_log.append(line)
def log_summary(self, text):
"""Log the given line as summary/title."""
text = '\n{line} {text} {line}\n'.format(line='='*30, text=text)
self._log(text)
def _parse_line(self, line):
"""Parse the given line from the log.
Return:
A self.ParseResult member.
"""
raise NotImplementedError
def _executable_args(self):
"""Get the executable and necessary arguments as a tuple."""
raise NotImplementedError
def _default_args(self):
"""Get the default arguments to use if none were passed to start()."""
raise NotImplementedError
def _get_data(self):
"""Get the parsed data for this test.
Also waits for 0.5s to make sure any new data is received.
Subprocesses are expected to alias this to a public method with a
better name.
"""
self.proc.waitForReadyRead(500)
self.read_log()
return self._data
def _wait_signal(self, signal, timeout=5000, raising=True):
"""Wait for a signal to be emitted.
Should be used in a contextmanager.
"""
blocker = pytestqt.plugin.SignalBlocker(timeout=timeout,
raising=raising)
blocker.connect(signal)
return blocker
@pyqtSlot()
def read_log(self):
"""Read the log from the process' stdout."""
if not hasattr(self, 'proc'):
# I have no idea how this happens, but it does...
return
while self.proc.canReadLine():
line = self.proc.readLine()
line = bytes(line).decode('utf-8', errors='ignore').rstrip('\r\n')
try:
parsed = self._parse_line(line)
except InvalidLine:
self._invalid.append(line)
self._log("INVALID: {}".format(line))
continue
if parsed is None:
if self._invalid:
self._log("IGNORED: {}".format(line))
else:
self._data.append(parsed)
self.new_data.emit(parsed)
def start(self, args=None, *, env=None):
"""Start the process and wait until it started."""
self._start(args, env=env)
self._started = True
verbose = self.request.config.getoption('--verbose')
timeout = 60 if 'CI' in os.environ else 20
for _ in range(timeout):
with self._wait_signal(self.ready, timeout=1000,
raising=False) as blocker:
pass
if not self.is_running():
if self.exit_expected:
return
# _start ensures it actually started, but it might quit shortly
# afterwards
raise ProcessExited('\n' + _render_log(self.captured_log,
verbose=verbose))
if blocker.signal_triggered:
self._after_start()
return
raise WaitForTimeout("Timed out while waiting for process start.\n" +
_render_log(self.captured_log, verbose=verbose))
def _start(self, args, env):
"""Actually start the process."""
executable, exec_args = self._executable_args()
if args is None:
args = self._default_args()
procenv = QProcessEnvironment.systemEnvironment()
if env is not None:
for k, v in env.items():
procenv.insert(k, v)
self.proc.readyRead.connect(self.read_log)
self.proc.setProcessEnvironment(procenv)
self.proc.start(executable, exec_args + args)
ok = self.proc.waitForStarted()
assert ok
assert self.is_running()
def _after_start(self):
"""Do things which should be done immediately after starting."""
def before_test(self):
"""Restart process before a test if it exited before."""
self._invalid = []
if not self.is_running():
self.start()
def after_test(self):
"""Clean up data after each test.
Also checks self._invalid so the test counts as failed if there were
unexpected output lines earlier.
"""
__tracebackhide__ = lambda e: e.errisinstance(ProcessExited)
self.captured_log = []
if self._invalid:
# Wait for a bit so the full error has a chance to arrive
time.sleep(1)
# Exit the process to make sure we're in a defined state again
self.terminate()
self.clear_data()
raise InvalidLine('\n' + '\n'.join(self._invalid))
self.clear_data()
if not self.is_running() and not self.exit_expected and self._started:
raise ProcessExited
self.exit_expected = False
def clear_data(self):
"""Clear the collected data."""
self._data.clear()
def terminate(self):
"""Clean up and shut down the process."""
if not self.is_running():
return
if quteutils.is_windows:
self.proc.kill()
else:
self.proc.terminate()
ok = self.proc.waitForFinished()
if not ok:
self.proc.kill()
self.proc.waitForFinished()
def is_running(self):
"""Check if the process is currently running."""
return self.proc.state() == QProcess.Running
def _match_data(self, value, expected):
"""Helper for wait_for to match a given value.
The behavior of this method is slightly different depending on the
types of the filtered values:
- If expected is None, the filter always matches.
- If the value is a string or bytes object and the expected value is
too, the pattern is treated as a glob pattern (with only * active).
- If the value is a string or bytes object and the expected value is a
compiled regex, it is used for matching.
- If the value is any other type, == is used.
Return:
A bool
"""
regex_type = type(re.compile(''))
if expected is None:
return True
elif isinstance(expected, regex_type):
return expected.search(value)
elif isinstance(value, (bytes, str)):
return utils.pattern_match(pattern=expected, value=value)
else:
return value == expected
def _wait_for_existing(self, override_waited_for, after, **kwargs):
"""Check if there are any line in the history for wait_for.
Return: either the found line or None.
"""
for line in self._data:
matches = []
for key, expected in kwargs.items():
value = getattr(line, key)
matches.append(self._match_data(value, expected))
if after is None:
too_early = False
else:
too_early = ((line.timestamp, line.msecs) <
(after.timestamp, after.msecs))
if (all(matches) and
(not line.waited_for or override_waited_for) and
not too_early):
# If we waited for this line, chances are we don't mean the
# same thing the next time we use wait_for and it matches
# this line again.
line.waited_for = True
self._log("\n----> Already found {!r} in the log: {}".format(
kwargs.get('message', 'line'), line))
return line
return None
def _wait_for_new(self, timeout, do_skip, **kwargs):
"""Wait for a log message which doesn't exist yet.
Called via wait_for.
"""
__tracebackhide__ = lambda e: e.errisinstance(WaitForTimeout)
message = kwargs.get('message', None)
if message is not None:
elided = quteutils.elide(repr(message), 100)
self._log("\n----> Waiting for {} in the log".format(elided))
spy = QSignalSpy(self.new_data)
elapsed_timer = QElapsedTimer()
elapsed_timer.start()
while True:
# Skip if there are pending messages causing a skip
self._maybe_skip()
got_signal = spy.wait(timeout)
if not got_signal or elapsed_timer.hasExpired(timeout):
msg = "Timed out after {}ms waiting for {!r}.".format(
timeout, kwargs)
if do_skip:
pytest.skip(msg)
else:
raise WaitForTimeout(msg)
match = self._wait_for_match(spy, kwargs)
if match is not None:
if message is not None:
self._log("----> found it")
return match
raise quteutils.Unreachable
def _wait_for_match(self, spy, kwargs):
"""Try matching the kwargs with the given QSignalSpy."""
for args in spy:
assert len(args) == 1
line = args[0]
matches = []
for key, expected in kwargs.items():
value = getattr(line, key)
matches.append(self._match_data(value, expected))
if all(matches):
# If we waited for this line, chances are we don't mean the
# same thing the next time we use wait_for and it matches
# this line again.
line.waited_for = True
return line
return None
def _maybe_skip(self):
"""Can be overridden by subclasses to skip on certain log lines.
We can't run pytest.skip directly while parsing the log, as that would
lead to a pytest.skip.Exception error in a virtual Qt method, which
means pytest-qt fails the test.
Instead, we check for skip messages periodically in
QuteProc._maybe_skip, and call _maybe_skip after every parsed message
in wait_for (where it's most likely that new messages arrive).
"""
def wait_for(self, timeout=None, *, override_waited_for=False,
do_skip=False, divisor=1, after=None, **kwargs):
"""Wait until a given value is found in the data.
Keyword arguments to this function get interpreted as attributes of the
searched data. Every given argument is treated as a pattern which
the attribute has to match against.
Args:
timeout: How long to wait for the message.
override_waited_for: If set, gets triggered by previous messages
again.
do_skip: If set, call pytest.skip on a timeout.
divisor: A factor to decrease the timeout by.
after: If it's an existing line, ensure it's after the given one.
Return:
The matched line.
"""
__tracebackhide__ = lambda e: e.errisinstance(WaitForTimeout)
if timeout is None:
if do_skip:
timeout = 2000
elif 'CI' in os.environ:
timeout = 15000
else:
timeout = 5000
timeout //= divisor
if not kwargs:
raise TypeError("No keyword arguments given!")
for key in kwargs:
assert key in self.KEYS
existing = self._wait_for_existing(override_waited_for, after,
**kwargs)
if existing is not None:
return existing
else:
return self._wait_for_new(timeout=timeout, do_skip=do_skip,
**kwargs)
def ensure_not_logged(self, delay=500, **kwargs):
"""Make sure the data matching the given arguments is not logged.
If nothing is found in the log, we wait for delay ms to make sure
nothing arrives.
"""
__tracebackhide__ = lambda e: e.errisinstance(BlacklistedMessageError)
try:
line = self.wait_for(timeout=delay, override_waited_for=True,
**kwargs)
except WaitForTimeout:
return
else:
raise BlacklistedMessageError(line)
def wait_for_quit(self):
"""Wait until the process has quit."""
self.exit_expected = True
with self._wait_signal(self.proc.finished, timeout=15000):
pass
|
import os
from homeassistant.components import folder_watcher
from homeassistant.setup import async_setup_component
from tests.async_mock import Mock, patch
async def test_invalid_path_setup(hass):
"""Test that an invalid path is not set up."""
assert not await async_setup_component(
hass,
folder_watcher.DOMAIN,
{folder_watcher.DOMAIN: {folder_watcher.CONF_FOLDER: "invalid_path"}},
)
async def test_valid_path_setup(hass):
"""Test that a valid path is setup."""
cwd = os.path.join(os.path.dirname(__file__))
hass.config.allowlist_external_dirs = {cwd}
with patch.object(folder_watcher, "Watcher"):
assert await async_setup_component(
hass,
folder_watcher.DOMAIN,
{folder_watcher.DOMAIN: {folder_watcher.CONF_FOLDER: cwd}},
)
def test_event():
"""Check that Home Assistant events are fired correctly on watchdog event."""
class MockPatternMatchingEventHandler:
"""Mock base class for the pattern matcher event handler."""
def __init__(self, patterns):
pass
with patch(
"homeassistant.components.folder_watcher.PatternMatchingEventHandler",
MockPatternMatchingEventHandler,
):
hass = Mock()
handler = folder_watcher.create_event_handler(["*"], hass)
handler.on_created(
Mock(is_directory=False, src_path="/hello/world.txt", event_type="created")
)
assert hass.bus.fire.called
assert hass.bus.fire.mock_calls[0][1][0] == folder_watcher.DOMAIN
assert hass.bus.fire.mock_calls[0][1][1] == {
"event_type": "created",
"path": "/hello/world.txt",
"file": "world.txt",
"folder": "/hello",
}
|
from homeassistant.const import CONF_NAME, DATA_RATE_MEGABYTES_PER_SECOND, STATE_IDLE
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity
from .const import (
CONF_LIMIT,
CONF_ORDER,
DOMAIN,
STATE_ATTR_TORRENT_INFO,
SUPPORTED_ORDER_MODES,
)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Transmission sensors."""
tm_client = hass.data[DOMAIN][config_entry.entry_id]
name = config_entry.data[CONF_NAME]
dev = [
TransmissionSpeedSensor(tm_client, name, "Down Speed", "download"),
TransmissionSpeedSensor(tm_client, name, "Up Speed", "upload"),
TransmissionStatusSensor(tm_client, name, "Status"),
TransmissionTorrentsSensor(tm_client, name, "Active Torrents", "active"),
TransmissionTorrentsSensor(tm_client, name, "Paused Torrents", "paused"),
TransmissionTorrentsSensor(tm_client, name, "Total Torrents", "total"),
TransmissionTorrentsSensor(tm_client, name, "Completed Torrents", "completed"),
TransmissionTorrentsSensor(tm_client, name, "Started Torrents", "started"),
]
async_add_entities(dev, True)
class TransmissionSensor(Entity):
"""A base class for all Transmission sensors."""
def __init__(self, tm_client, client_name, sensor_name, sub_type=None):
"""Initialize the sensor."""
self._tm_client = tm_client
self._client_name = client_name
self._name = sensor_name
self._sub_type = sub_type
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return f"{self._client_name} {self._name}"
@property
def unique_id(self):
"""Return the unique id of the entity."""
return f"{self._tm_client.api.host}-{self.name}"
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def should_poll(self):
"""Return the polling requirement for this sensor."""
return False
@property
def available(self):
"""Could the device be accessed during the last update call."""
return self._tm_client.api.available
async def async_added_to_hass(self):
"""Handle entity which will be added."""
@callback
def update():
"""Update the state."""
self.async_schedule_update_ha_state(True)
self.async_on_remove(
async_dispatcher_connect(
self.hass, self._tm_client.api.signal_update, update
)
)
class TransmissionSpeedSensor(TransmissionSensor):
"""Representation of a Transmission speed sensor."""
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return DATA_RATE_MEGABYTES_PER_SECOND
def update(self):
"""Get the latest data from Transmission and updates the state."""
data = self._tm_client.api.data
if data:
mb_spd = (
float(data.downloadSpeed)
if self._sub_type == "download"
else float(data.uploadSpeed)
)
mb_spd = mb_spd / 1024 / 1024
self._state = round(mb_spd, 2 if mb_spd < 0.1 else 1)
class TransmissionStatusSensor(TransmissionSensor):
"""Representation of a Transmission status sensor."""
def update(self):
"""Get the latest data from Transmission and updates the state."""
data = self._tm_client.api.data
if data:
upload = data.uploadSpeed
download = data.downloadSpeed
if upload > 0 and download > 0:
self._state = "Up/Down"
elif upload > 0 and download == 0:
self._state = "Seeding"
elif upload == 0 and download > 0:
self._state = "Downloading"
else:
self._state = STATE_IDLE
else:
self._state = None
class TransmissionTorrentsSensor(TransmissionSensor):
"""Representation of a Transmission torrents sensor."""
SUBTYPE_MODES = {
"started": ("downloading"),
"completed": ("seeding"),
"paused": ("stopped"),
"active": ("seeding", "downloading"),
"total": None,
}
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return "Torrents"
@property
def device_state_attributes(self):
"""Return the state attributes, if any."""
limit = self._tm_client.config_entry.options[CONF_LIMIT]
order = self._tm_client.config_entry.options[CONF_ORDER]
torrents = self._tm_client.api.torrents[0:limit]
info = _torrents_info(
torrents,
order=order,
statuses=self.SUBTYPE_MODES[self._sub_type],
)
return {
STATE_ATTR_TORRENT_INFO: info,
}
def update(self):
"""Get the latest data from Transmission and updates the state."""
torrents = _filter_torrents(
self._tm_client.api.torrents, statuses=self.SUBTYPE_MODES[self._sub_type]
)
self._state = len(torrents)
def _filter_torrents(torrents, statuses=None):
return [
torrent
for torrent in torrents
if statuses is None or torrent.status in statuses
]
def _torrents_info(torrents, order, statuses=None):
infos = {}
torrents = _filter_torrents(torrents, statuses)
torrents = SUPPORTED_ORDER_MODES[order](torrents)
for torrent in _filter_torrents(torrents, statuses):
info = infos[torrent.name] = {
"added_date": torrent.addedDate,
"percent_done": f"{torrent.percentDone * 100:.2f}",
"status": torrent.status,
"id": torrent.id,
}
try:
info["eta"] = str(torrent.eta)
except ValueError:
pass
return infos
|
import logging
import os
from slackclient import SlackClient
from paasta_tools.utils import optionally_load_system_paasta_config
log = logging.getLogger(__name__)
class PaastaSlackClient(SlackClient):
def __init__(self, token):
super().__init__(self)
if token is None:
log.warning("No slack token available, will only log")
self.sc = None
else:
self.sc = SlackClient(token)
self.token = token
def post(self, channels, message=None, blocks=None, thread_ts=None):
responses = []
if self.token is not None:
for channel in channels:
log.info(f"Slack notification [{channel}]: {message}")
response = self.sc.api_call(
"chat.postMessage",
channel=channel,
text=message,
blocks=blocks,
thread_ts=thread_ts,
)
if response["ok"] is not True:
log.error("Posting to slack failed: {}".format(response["error"]))
responses.append(response)
else:
log.info(f"(not sent to Slack) {channels}: {message}")
return responses
def post_single(self, channel, message=None, blocks=None, thread_ts=None):
if self.token is not None:
log.info(f"Slack notification [{channel}]: {message}")
response = self.sc.api_call(
"chat.postMessage",
channel=channel,
text=message,
blocks=blocks,
thread_ts=thread_ts,
)
if response["ok"] is not True:
log.error("Posting to slack failed: {}".format(response["error"]))
return response
else:
log.info(f"(not sent to Slack) {channel}: {message}")
return {"ok"}
def get_slack_client():
token = os.environ.get("SLACK_API_TOKEN", None)
if token is None:
token = optionally_load_system_paasta_config().get_slack_token()
return PaastaSlackClient(token=token)
|
import io
import pytest
from homeassistant.components import light, scene
from homeassistant.const import ATTR_ENTITY_ID, ENTITY_MATCH_ALL, SERVICE_TURN_ON
from homeassistant.setup import async_setup_component
from homeassistant.util.yaml import loader as yaml_loader
from tests.common import async_mock_service
@pytest.fixture(autouse=True)
def entities(hass):
"""Initialize the test light."""
platform = getattr(hass.components, "test.light")
platform.init()
yield platform.ENTITIES[0:2]
async def test_config_yaml_alias_anchor(hass, entities):
"""Test the usage of YAML aliases and anchors.
The following test scene configuration is equivalent to:
scene:
- name: test
entities:
light_1: &light_1_state
state: 'on'
brightness: 100
light_2: *light_1_state
When encountering a YAML alias/anchor, the PyYAML parser will use a
reference to the original dictionary, instead of creating a copy, so
care needs to be taken to not modify the original.
"""
light_1, light_2 = await setup_lights(hass, entities)
entity_state = {"state": "on", "brightness": 100}
assert await async_setup_component(
hass,
scene.DOMAIN,
{
"scene": [
{
"name": "test",
"entities": {
light_1.entity_id: entity_state,
light_2.entity_id: entity_state,
},
}
]
},
)
await hass.async_block_till_done()
await activate(hass, "scene.test")
assert light.is_on(hass, light_1.entity_id)
assert light.is_on(hass, light_2.entity_id)
assert 100 == light_1.last_call("turn_on")[1].get("brightness")
assert 100 == light_2.last_call("turn_on")[1].get("brightness")
async def test_config_yaml_bool(hass, entities):
"""Test parsing of booleans in yaml config."""
light_1, light_2 = await setup_lights(hass, entities)
config = (
"scene:\n"
" - name: test\n"
" entities:\n"
f" {light_1.entity_id}: on\n"
f" {light_2.entity_id}:\n"
" state: on\n"
" brightness: 100\n"
)
with io.StringIO(config) as file:
doc = yaml_loader.yaml.safe_load(file)
assert await async_setup_component(hass, scene.DOMAIN, doc)
await hass.async_block_till_done()
await activate(hass, "scene.test")
assert light.is_on(hass, light_1.entity_id)
assert light.is_on(hass, light_2.entity_id)
assert 100 == light_2.last_call("turn_on")[1].get("brightness")
async def test_activate_scene(hass, entities):
"""Test active scene."""
light_1, light_2 = await setup_lights(hass, entities)
assert await async_setup_component(
hass,
scene.DOMAIN,
{
"scene": [
{
"name": "test",
"entities": {
light_1.entity_id: "on",
light_2.entity_id: {"state": "on", "brightness": 100},
},
}
]
},
)
await hass.async_block_till_done()
await activate(hass, "scene.test")
assert light.is_on(hass, light_1.entity_id)
assert light.is_on(hass, light_2.entity_id)
assert light_2.last_call("turn_on")[1].get("brightness") == 100
calls = async_mock_service(hass, "light", "turn_on")
await hass.services.async_call(
scene.DOMAIN, "turn_on", {"transition": 42, "entity_id": "scene.test"}
)
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].domain == "light"
assert calls[0].service == "turn_on"
assert calls[0].data.get("transition") == 42
async def activate(hass, entity_id=ENTITY_MATCH_ALL):
"""Activate a scene."""
data = {}
if entity_id:
data[ATTR_ENTITY_ID] = entity_id
await hass.services.async_call(scene.DOMAIN, SERVICE_TURN_ON, data, blocking=True)
async def test_services_registered(hass):
"""Test we register services with empty config."""
assert await async_setup_component(hass, "scene", {})
assert hass.services.has_service("scene", "reload")
assert hass.services.has_service("scene", "turn_on")
assert hass.services.has_service("scene", "apply")
async def setup_lights(hass, entities):
"""Set up the light component."""
assert await async_setup_component(
hass, light.DOMAIN, {light.DOMAIN: {"platform": "test"}}
)
await hass.async_block_till_done()
light_1, light_2 = entities
await hass.services.async_call(
"light",
"turn_off",
{"entity_id": [light_1.entity_id, light_2.entity_id]},
blocking=True,
)
await hass.async_block_till_done()
assert not light.is_on(hass, light_1.entity_id)
assert not light.is_on(hass, light_2.entity_id)
return light_1, light_2
|
from django.db import migrations
def set_source_language(apps, schema_editor):
Glossary = apps.get_model("glossary", "Glossary")
db_alias = schema_editor.connection.alias
for glossary in Glossary.objects.using(db_alias).select_related("project"):
glossary.source_language = glossary.project.source_language
glossary.save(update_fields=["source_language"])
class Migration(migrations.Migration):
dependencies = [
("glossary", "0004_glossary_source_language"),
]
operations = [
migrations.RunPython(
set_source_language, migrations.RunPython.noop, elidable=True
),
]
|
import io
import logging
import re
from babelfish import Language, language_converters
from guessit import guessit
try:
from lxml import etree
except ImportError:
try:
import xml.etree.cElementTree as etree
except ImportError:
import xml.etree.ElementTree as etree
from requests import Session
from zipfile import ZipFile
from . import Provider
from ..exceptions import ProviderError
from ..matches import guess_matches
from ..subtitle import Subtitle, fix_line_ending
from ..video import Episode
logger = logging.getLogger(__name__)
class PodnapisiSubtitle(Subtitle):
"""Podnapisi Subtitle."""
provider_name = 'podnapisi'
def __init__(self, language, hearing_impaired, page_link, pid, releases, title, season=None, episode=None,
year=None):
super(PodnapisiSubtitle, self).__init__(language, hearing_impaired=hearing_impaired, page_link=page_link)
self.pid = pid
self.releases = releases
self.title = title
self.season = season
self.episode = episode
self.year = year
@property
def id(self):
return self.pid
@property
def info(self):
return ' '.join(self.releases) or self.pid
def get_matches(self, video):
matches = guess_matches(video, {
'title': self.title,
'year': self.year,
'season': self.season,
'episode': self.episode
})
video_type = 'episode' if isinstance(video, Episode) else 'movie'
for release in self.releases:
matches |= guess_matches(video, guessit(release, {'type': video_type}))
return matches
class PodnapisiProvider(Provider):
"""Podnapisi Provider."""
languages = ({Language('por', 'BR'), Language('srp', script='Latn')} |
{Language.fromalpha2(l) for l in language_converters['alpha2'].codes})
server_url = 'https://www.podnapisi.net/subtitles/'
subtitle_class = PodnapisiSubtitle
def __init__(self):
self.session = None
def initialize(self):
self.session = Session()
self.session.headers['User-Agent'] = self.user_agent
def terminate(self):
self.session.close()
def query(self, language, keyword, season=None, episode=None, year=None):
# set parameters, see http://www.podnapisi.net/forum/viewtopic.php?f=62&t=26164#p212652
params = {'sXML': 1, 'sL': str(language), 'sK': keyword}
is_episode = False
if season and episode:
is_episode = True
params['sTS'] = season
params['sTE'] = episode
if year:
params['sY'] = year
# loop over paginated results
logger.info('Searching subtitles %r', params)
subtitles = []
pids = set()
while True:
# query the server
r = self.session.get(self.server_url + 'search/old', params=params, timeout=10)
r.raise_for_status()
xml = etree.fromstring(r.content)
# exit if no results
if not int(xml.find('pagination/results').text):
logger.debug('No subtitles found')
break
# loop over subtitles
for subtitle_xml in xml.findall('subtitle'):
# read xml elements
pid = subtitle_xml.find('pid').text
# ignore duplicates, see http://www.podnapisi.net/forum/viewtopic.php?f=62&t=26164&start=10#p213321
if pid in pids:
continue
language = Language.fromietf(subtitle_xml.find('language').text)
hearing_impaired = 'n' in (subtitle_xml.find('flags').text or '')
page_link = subtitle_xml.find('url').text
releases = []
if subtitle_xml.find('release').text:
for release in subtitle_xml.find('release').text.split():
release = re.sub(r'\.+$', '', release) # remove trailing dots
release = ''.join(filter(lambda x: ord(x) < 128, release)) # remove non-ascii characters
releases.append(release)
title = subtitle_xml.find('title').text
season = int(subtitle_xml.find('tvSeason').text)
episode = int(subtitle_xml.find('tvEpisode').text)
year = int(subtitle_xml.find('year').text)
if is_episode:
subtitle = self.subtitle_class(language, hearing_impaired, page_link, pid, releases, title,
season=season, episode=episode, year=year)
else:
subtitle = self.subtitle_class(language, hearing_impaired, page_link, pid, releases, title,
year=year)
logger.debug('Found subtitle %r', subtitle)
subtitles.append(subtitle)
pids.add(pid)
# stop on last page
if int(xml.find('pagination/current').text) >= int(xml.find('pagination/count').text):
break
# increment current page
params['page'] = int(xml.find('pagination/current').text) + 1
logger.debug('Getting page %d', params['page'])
return subtitles
def list_subtitles(self, video, languages):
season = episode = None
if isinstance(video, Episode):
titles = [video.series] + video.alternative_series
season = video.season
episode = video.episode
else:
titles = [video.title] + video.alternative_titles
for title in titles:
subtitles = [s for l in languages for s in
self.query(l, title, season=season, episode=episode, year=video.year)]
if subtitles:
return subtitles
return []
def download_subtitle(self, subtitle):
# download as a zip
logger.info('Downloading subtitle %r', subtitle)
r = self.session.get(self.server_url + subtitle.pid + '/download', params={'container': 'zip'}, timeout=10)
r.raise_for_status()
# open the zip
with ZipFile(io.BytesIO(r.content)) as zf:
if len(zf.namelist()) > 1:
raise ProviderError('More than one file to unzip')
subtitle.content = fix_line_ending(zf.read(zf.namelist()[0]))
|
import sys
import mne
def run():
"""Run command."""
parser = mne.commands.utils.get_optparser(
__file__, usage='mne compare_fiff <file_a> <file_b>')
options, args = parser.parse_args()
if len(args) != 2:
parser.print_help()
sys.exit(1)
mne.viz.compare_fiff(args[0], args[1])
mne.utils.run_command_if_main()
|
import re
from django.db import models
from django.urls import reverse
from django.utils.translation import gettext_lazy as _
from weblate.trans.fields import RegexField
from weblate.trans.mixins import CacheKeyMixin
from weblate.utils.stats import ComponentListStats
class ComponentListQuerySet(models.QuerySet):
def order(self):
return self.order_by("name")
class ComponentList(models.Model, CacheKeyMixin):
name = models.CharField(
verbose_name=_("Component list name"),
max_length=100,
unique=True,
help_text=_("Display name"),
)
slug = models.SlugField(
verbose_name=_("URL slug"),
db_index=True,
unique=True,
max_length=100,
help_text=_("Name used in URLs and filenames."),
)
show_dashboard = models.BooleanField(
verbose_name=_("Show on dashboard"),
default=True,
db_index=True,
help_text=_(
"When enabled this component list will be shown as a tab on "
"the dashboard"
),
)
components = models.ManyToManyField("Component", blank=True)
objects = ComponentListQuerySet.as_manager()
class Meta:
verbose_name = _("Component list")
verbose_name_plural = _("Component lists")
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse("component-list", kwargs={"name": self.slug})
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.stats = ComponentListStats(self)
def tab_slug(self):
return "list-" + self.slug
class AutoComponentList(models.Model):
project_match = RegexField(
verbose_name=_("Project regular expression"),
max_length=200,
default="^$",
help_text=_("Regular expression which is used to match project slug."),
)
component_match = RegexField(
verbose_name=_("Component regular expression"),
max_length=200,
default="^$",
help_text=_("Regular expression which is used to match component slug."),
)
componentlist = models.ForeignKey(
ComponentList,
verbose_name=_("Component list to assign"),
on_delete=models.deletion.CASCADE,
)
class Meta:
verbose_name = _("Automatic component list assignment")
verbose_name_plural = _("Automatic component list assignments")
def __str__(self):
return self.componentlist.name
def check_match(self, component):
if not re.match(self.project_match, component.project.slug):
return
if not re.match(self.component_match, component.slug):
return
self.componentlist.components.add(component)
|
import logging
import blebox_uniapi
import pytest
from homeassistant.const import (
ATTR_DEVICE_CLASS,
ATTR_UNIT_OF_MEASUREMENT,
DEVICE_CLASS_TEMPERATURE,
STATE_UNKNOWN,
TEMP_CELSIUS,
)
from .conftest import async_setup_entity, mock_feature
from tests.async_mock import AsyncMock, PropertyMock
@pytest.fixture(name="tempsensor")
def tempsensor_fixture():
"""Return a default sensor mock."""
feature = mock_feature(
"sensors",
blebox_uniapi.sensor.Temperature,
unique_id="BleBox-tempSensor-1afe34db9437-0.temperature",
full_name="tempSensor-0.temperature",
device_class="temperature",
unit="celsius",
current=None,
)
product = feature.product
type(product).name = PropertyMock(return_value="My temperature sensor")
type(product).model = PropertyMock(return_value="tempSensor")
return (feature, "sensor.tempsensor_0_temperature")
async def test_init(tempsensor, hass, config):
"""Test sensor default state."""
_, entity_id = tempsensor
entry = await async_setup_entity(hass, config, entity_id)
assert entry.unique_id == "BleBox-tempSensor-1afe34db9437-0.temperature"
state = hass.states.get(entity_id)
assert state.name == "tempSensor-0.temperature"
assert state.attributes[ATTR_DEVICE_CLASS] == DEVICE_CLASS_TEMPERATURE
assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == TEMP_CELSIUS
assert state.state == STATE_UNKNOWN
device_registry = await hass.helpers.device_registry.async_get_registry()
device = device_registry.async_get(entry.device_id)
assert device.name == "My temperature sensor"
assert device.identifiers == {("blebox", "abcd0123ef5678")}
assert device.manufacturer == "BleBox"
assert device.model == "tempSensor"
assert device.sw_version == "1.23"
async def test_update(tempsensor, hass, config):
"""Test sensor update."""
feature_mock, entity_id = tempsensor
def initial_update():
feature_mock.current = 25.18
feature_mock.async_update = AsyncMock(side_effect=initial_update)
await async_setup_entity(hass, config, entity_id)
state = hass.states.get(entity_id)
assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == TEMP_CELSIUS
assert state.state == "25.18"
async def test_update_failure(tempsensor, hass, config, caplog):
"""Test that update failures are logged."""
caplog.set_level(logging.ERROR)
feature_mock, entity_id = tempsensor
feature_mock.async_update = AsyncMock(side_effect=blebox_uniapi.error.ClientError)
await async_setup_entity(hass, config, entity_id)
assert f"Updating '{feature_mock.full_name}' failed: " in caplog.text
|
from __future__ import print_function
import os
import sys
import inspect
import traceback
import optparse
import logging
import configobj
import unittest
try:
import cPickle as pickle
except ImportError:
import pickle as pickle
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
try:
from setproctitle import setproctitle
except ImportError:
setproctitle = None
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__))))
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),
'src')))
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),
'src', 'collectors')))
def run_only(func, predicate):
if predicate():
return func
else:
def f(arg):
pass
return f
def get_collector_config(key, value):
config = configobj.ConfigObj()
config['server'] = {}
config['server']['collectors_config_path'] = ''
config['collectors'] = {}
config['collectors']['default'] = {}
config['collectors']['default']['hostname_method'] = "uname_short"
config['collectors'][key] = value
return config
class CollectorTestCase(unittest.TestCase):
def setDocExample(self, collector, metrics, defaultpath=None):
if not len(metrics):
return False
filePath = os.path.join(os.path.dirname(os.path.realpath(__file__)),
'docs', 'collectors', collector + '.md')
if not os.path.exists(filePath):
return False
if not os.access(filePath, os.W_OK):
return False
if not os.access(filePath, os.R_OK):
return False
try:
with open(filePath, 'Ur') as fp:
content = fp.readlines()
with open(filePath, 'w') as fp:
for line in content:
if line.strip() == '__EXAMPLESHERE__':
for metric in sorted(metrics.iterkeys()):
metricPath = 'servers.hostname.'
if defaultpath:
metricPath += defaultpath + '.'
metricPath += metric
metricPath = metricPath.replace('..', '.')
fp.write('%s %s\n' % (metricPath, metrics[metric]))
else:
fp.write(line)
except IOError:
return False
return True
def getFixtureDirPath(self):
path = os.path.join(
os.path.dirname(inspect.getfile(self.__class__)),
'fixtures')
return path
def getFixturePath(self, fixture_name):
path = os.path.join(self.getFixtureDirPath(),
fixture_name)
if not os.access(path, os.R_OK):
print("Missing Fixture " + path)
return path
def getFixture(self, fixture_name):
with open(self.getFixturePath(fixture_name), 'r') as f:
return StringIO(f.read())
def getFixtures(self):
fixtures = []
for root, dirnames, filenames in os.walk(self.getFixtureDirPath()):
fixtures.append(os.path.join(root, dirnames, filenames))
return fixtures
def getPickledResults(self, results_name):
with open(self.getFixturePath(results_name), 'r') as f:
return pickle.load(f)
def setPickledResults(self, results_name, data):
with open(self.getFixturePath(results_name), 'w+b') as f:
pickle.dump(data, f)
def assertUnpublished(self, mock, key, value, expected_value=0):
return self.assertPublished(mock, key, value, expected_value)
def assertPublished(self, mock, key, value, expected_value=1):
if type(mock) is list:
for m in mock:
calls = (filter(lambda x: x[0][0] == key, m.call_args_list))
if len(calls) > 0:
break
else:
calls = filter(lambda x: x[0][0] == key, mock.call_args_list)
actual_value = len(calls)
message = '%s: actual number of calls %d, expected %d' % (
key, actual_value, expected_value)
self.assertEqual(actual_value, expected_value, message)
if expected_value:
actual_value = calls[0][0][1]
expected_value = value
precision = 0
if isinstance(value, tuple):
expected_value, precision = expected_value
message = '%s: actual %r, expected %r' % (key,
actual_value,
expected_value)
if precision is not None:
self.assertAlmostEqual(float(actual_value),
float(expected_value),
places=precision,
msg=message)
else:
self.assertEqual(actual_value, expected_value, message)
def assertUnpublishedMany(self, mock, dict, expected_value=0):
return self.assertPublishedMany(mock, dict, expected_value)
def assertPublishedMany(self, mock, dict, expected_value=1):
for key, value in dict.iteritems():
self.assertPublished(mock, key, value, expected_value)
if type(mock) is list:
for m in mock:
m.reset_mock()
else:
mock.reset_mock()
def assertUnpublishedMetric(self, mock, key, value, expected_value=0):
return self.assertPublishedMetric(mock, key, value, expected_value)
def assertPublishedMetric(self, mock, key, value, expected_value=1):
calls = filter(lambda x: x[0][0].path.find(key) != -1,
mock.call_args_list)
actual_value = len(calls)
message = '%s: actual number of calls %d, expected %d' % (
key, actual_value, expected_value)
self.assertEqual(actual_value, expected_value, message)
if expected_value:
actual_value = calls[0][0][0].value
expected_value = value
precision = 0
if isinstance(value, tuple):
expected_value, precision = expected_value
message = '%s: actual %r, expected %r' % (key,
actual_value,
expected_value)
if precision is not None:
self.assertAlmostEqual(float(actual_value),
float(expected_value),
places=precision,
msg=message)
else:
self.assertEqual(actual_value, expected_value, message)
def assertUnpublishedMetricMany(self, mock, dict, expected_value=0):
return self.assertPublishedMetricMany(mock, dict, expected_value)
def assertPublishedMetricMany(self, mock, dict, expected_value=1):
for key, value in dict.iteritems():
self.assertPublishedMetric(mock, key, value, expected_value)
mock.reset_mock()
collectorTests = {}
def getCollectorTests(path):
for f in os.listdir(path):
cPath = os.path.abspath(os.path.join(path, f))
if ((os.path.isfile(cPath) and
len(f) > 3 and
f[-3:] == '.py' and
f[0:4] == 'test')):
sys.path.append(os.path.dirname(cPath))
sys.path.append(os.path.dirname(os.path.dirname(cPath)))
modname = f[:-3]
try:
# Import the module
collectorTests[modname] = __import__(modname,
globals(),
locals(),
['*'])
except Exception:
print("Failed to import module: %s. %s" % (
modname, traceback.format_exc()))
continue
for f in os.listdir(path):
cPath = os.path.abspath(os.path.join(path, f))
if os.path.isdir(cPath):
getCollectorTests(cPath)
###############################################################################
if __name__ == "__main__":
if setproctitle:
setproctitle('test.py')
# Disable log output for the unit tests
log = logging.getLogger("diamond")
log.addHandler(logging.StreamHandler(sys.stderr))
log.disabled = True
# Initialize Options
parser = optparse.OptionParser()
parser.add_option("-c",
"--collector",
dest="collector",
default="",
help="Run a single collector's unit tests")
parser.add_option("-v",
"--verbose",
dest="verbose",
default=1,
action="count",
help="verbose")
# Parse Command Line Args
(options, args) = parser.parse_args()
cPath = os.path.abspath(os.path.join(os.path.dirname(__file__),
'src',
'collectors',
options.collector))
dPath = os.path.abspath(os.path.join(os.path.dirname(__file__),
'src',
'diamond'))
getCollectorTests(cPath)
if not options.collector:
# Only pull in diamond tests when a specific collector
# hasn't been specified
getCollectorTests(dPath)
loader = unittest.TestLoader()
tests = []
for test in collectorTests:
for name, c in inspect.getmembers(collectorTests[test],
inspect.isclass):
if not issubclass(c, unittest.TestCase):
continue
tests.append(loader.loadTestsFromTestCase(c))
suite = unittest.TestSuite(tests)
results = unittest.TextTestRunner(verbosity=options.verbose).run(suite)
results = str(results)
results = results.replace('>', '').split()[1:]
resobj = {}
for result in results:
result = result.split('=')
resobj[result[0]] = int(result[1])
if resobj['failures'] > 0:
sys.exit(1)
if resobj['errors'] > 0:
sys.exit(2)
sys.exit(0)
|
from pynzbgetapi import NZBGetAPIException
from homeassistant.components.nzbget.const import DOMAIN
from homeassistant.config_entries import SOURCE_USER
from homeassistant.const import CONF_SCAN_INTERVAL, CONF_VERIFY_SSL
from homeassistant.data_entry_flow import (
RESULT_TYPE_ABORT,
RESULT_TYPE_CREATE_ENTRY,
RESULT_TYPE_FORM,
)
from homeassistant.setup import async_setup_component
from . import (
ENTRY_CONFIG,
USER_INPUT,
_patch_async_setup,
_patch_async_setup_entry,
_patch_history,
_patch_status,
_patch_version,
)
from tests.async_mock import patch
from tests.common import MockConfigEntry
async def test_user_form(hass):
"""Test we get the user initiated form."""
await async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] == RESULT_TYPE_FORM
assert result["errors"] == {}
with _patch_version(), _patch_status(), _patch_history(), _patch_async_setup() as mock_setup, _patch_async_setup_entry() as mock_setup_entry:
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
USER_INPUT,
)
await hass.async_block_till_done()
assert result["type"] == RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "10.10.10.30"
assert result["data"] == {**USER_INPUT, CONF_VERIFY_SSL: False}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_user_form_show_advanced_options(hass):
"""Test we get the user initiated form with advanced options shown."""
await async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER, "show_advanced_options": True}
)
assert result["type"] == RESULT_TYPE_FORM
assert result["errors"] == {}
user_input_advanced = {
**USER_INPUT,
CONF_VERIFY_SSL: True,
}
with _patch_version(), _patch_status(), _patch_history(), _patch_async_setup() as mock_setup, _patch_async_setup_entry() as mock_setup_entry:
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input_advanced,
)
await hass.async_block_till_done()
assert result["type"] == RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "10.10.10.30"
assert result["data"] == {**USER_INPUT, CONF_VERIFY_SSL: True}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_user_form_cannot_connect(hass):
"""Test we handle cannot connect error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
with patch(
"homeassistant.components.nzbget.coordinator.NZBGetAPI.version",
side_effect=NZBGetAPIException(),
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
USER_INPUT,
)
assert result["type"] == RESULT_TYPE_FORM
assert result["errors"] == {"base": "cannot_connect"}
async def test_user_form_unexpected_exception(hass):
"""Test we handle unexpected exception."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
with patch(
"homeassistant.components.nzbget.coordinator.NZBGetAPI.version",
side_effect=Exception(),
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
USER_INPUT,
)
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "unknown"
async def test_user_form_single_instance_allowed(hass):
"""Test that configuring more than one instance is rejected."""
entry = MockConfigEntry(domain=DOMAIN, data=ENTRY_CONFIG)
entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data=USER_INPUT,
)
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "single_instance_allowed"
async def test_options_flow(hass, nzbget_api):
"""Test updating options."""
entry = MockConfigEntry(
domain=DOMAIN,
data=ENTRY_CONFIG,
options={CONF_SCAN_INTERVAL: 5},
)
entry.add_to_hass(hass)
with patch("homeassistant.components.nzbget.PLATFORMS", []):
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert entry.options[CONF_SCAN_INTERVAL] == 5
result = await hass.config_entries.options.async_init(entry.entry_id)
assert result["type"] == RESULT_TYPE_FORM
assert result["step_id"] == "init"
with _patch_async_setup(), _patch_async_setup_entry():
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={CONF_SCAN_INTERVAL: 15},
)
await hass.async_block_till_done()
assert result["type"] == RESULT_TYPE_CREATE_ENTRY
assert result["data"][CONF_SCAN_INTERVAL] == 15
|
import locale
# High performance method for English (no translation needed)
loc = locale.getlocale()[0]
if loc is None or loc.startswith('en'):
class NullTranslation(object):
def gettext(self, str):
return str
def ngettext(self, str1, strN, n):
if n == 1:
return str1.replace("{0}", str(n))
else:
return strN.replace("{0}", str(n))
def get_translation_for(package_name):
return NullTranslation()
else:
import os
import gettext
# If not installed with setuptools, this might not be available
try:
import pkg_resources
except ImportError:
pkg_resources = None
try:
from typing import Tuple, List, Callable
except ImportError:
pass
local_dir = os.path.basename(__file__)
def get_translation_for(
package_name): # type: (str) -> gettext.NullTranslations
'''Find and return gettext translation for package
(Try to find folder manually if setuptools does not exist)
'''
if '.' in package_name:
package_name = '.'.join(package_name.split('.')[:-1])
localedir = None
if pkg_resources is None:
mydir = os.path.join(local_dir, 'i18n')
else:
mydir = pkg_resources.resource_filename(package_name, 'i18n')
for localedir in mydir, None:
localefile = gettext.find(package_name, localedir)
if localefile:
break
return gettext.translation(
package_name, localedir=localedir, fallback=True)
|
import tensorflow as tf
from keras import backend as K
from keras.engine import Layer
from matchzoo.contrib.layers.attention_layer import AttentionLayer
class MultiPerspectiveLayer(Layer):
"""
A keras implementation of multi-perspective layer of BiMPM.
For detailed information, see Bilateral Multi-Perspective
Matching for Natural Language Sentences, section 3.2.
Examples:
>>> import matchzoo as mz
>>> perspective={'full': True, 'max-pooling': True,
... 'attentive': True, 'max-attentive': True}
>>> layer = mz.contrib.layers.MultiPerspectiveLayer(
... att_dim=50, mp_dim=20, perspective=perspective)
>>> layer.compute_output_shape(
... [(32, 10, 100), (32, 50), None, (32, 50), None,
... [(32, 40, 100), (32, 50), None, (32, 50), None]])
(32, 10, 83)
"""
def __init__(self,
att_dim: int,
mp_dim: int,
perspective: dict):
"""Class initialization."""
super(MultiPerspectiveLayer, self).__init__()
self._att_dim = att_dim
self._mp_dim = mp_dim
self._perspective = perspective
@classmethod
def list_available_perspectives(cls) -> list:
"""List available strategy for multi-perspective matching."""
return ['full', 'max-pooling', 'attentive', 'max-attentive']
@property
def num_perspective(self):
"""Get the number of perspectives that is True."""
return sum(self._perspective.values())
def build(self, input_shape: list):
"""Input shape."""
# The shape of the weights is l * d.
if self._perspective.get('full'):
self.full_match = MpFullMatch(self._mp_dim)
if self._perspective.get('max-pooling'):
self.max_pooling_match = MpMaxPoolingMatch(self._mp_dim)
if self._perspective.get('attentive'):
self.attentive_match = MpAttentiveMatch(self._att_dim,
self._mp_dim)
if self._perspective.get('max-attentive'):
self.max_attentive_match = MpMaxAttentiveMatch(self._att_dim)
self.built = True
def call(self, x: list, **kwargs):
"""Call."""
seq_lt, seq_rt = x[:5], x[5:]
# unpack seq_left and seq_right
# all hidden states, last hidden state of forward pass,
# last cell state of forward pass, last hidden state of
# backward pass, last cell state of backward pass.
lstm_reps_lt, forward_h_lt, _, backward_h_lt, _ = seq_lt
lstm_reps_rt, forward_h_rt, _, backward_h_rt, _ = seq_rt
match_tensor_list = []
match_dim = 0
if self._perspective.get('full'):
# Each forward & backward contextual embedding compare
# with the last step of the last time step of the other sentence.
h_lt = tf.concat([forward_h_lt, backward_h_lt], axis=-1)
full_match_tensor = self.full_match([h_lt, lstm_reps_rt])
match_tensor_list.append(full_match_tensor)
match_dim += self._mp_dim + 1
if self._perspective.get('max-pooling'):
# Each contextual embedding compare with each contextual embedding.
# retain the maximum of each dimension.
max_match_tensor = self.max_pooling_match([lstm_reps_lt,
lstm_reps_rt])
match_tensor_list.append(max_match_tensor)
match_dim += self._mp_dim
if self._perspective.get('attentive'):
# Each contextual embedding compare with each contextual embedding.
# retain sum of weighted mean of each dimension.
attentive_tensor = self.attentive_match([lstm_reps_lt,
lstm_reps_rt])
match_tensor_list.append(attentive_tensor)
match_dim += self._mp_dim + 1
if self._perspective.get('max-attentive'):
# Each contextual embedding compare with each contextual embedding.
# retain max of weighted mean of each dimension.
relevancy_matrix = _calc_relevancy_matrix(lstm_reps_lt,
lstm_reps_rt)
max_attentive_tensor = self.max_attentive_match([lstm_reps_lt,
lstm_reps_rt,
relevancy_matrix])
match_tensor_list.append(max_attentive_tensor)
match_dim += self._mp_dim + 1
mp_tensor = tf.concat(match_tensor_list, axis=-1)
return mp_tensor
def compute_output_shape(self, input_shape: list):
"""Compute output shape."""
shape_a = input_shape[0]
match_dim = 0
if self._perspective.get('full'):
match_dim += self._mp_dim + 1
if self._perspective.get('max-pooling'):
match_dim += self._mp_dim
if self._perspective.get('attentive'):
match_dim += self._mp_dim + 1
if self._perspective.get('max-attentive'):
match_dim += self._mp_dim + 1
return shape_a[0], shape_a[1], match_dim
class MpFullMatch(Layer):
"""Mp Full Match Layer."""
def __init__(self, mp_dim):
"""Init."""
super(MpFullMatch, self).__init__()
self.mp_dim = mp_dim
def build(self, input_shapes):
"""Build."""
# input_shape = input_shapes[0]
self.built = True
def call(self, x, **kwargs):
"""Call.
"""
rep_lt, reps_rt = x
att_lt = tf.expand_dims(rep_lt, 1)
match_tensor, match_dim = _multi_perspective_match(self.mp_dim,
reps_rt,
att_lt)
# match_tensor => [b, len_rt, mp_dim+1]
return match_tensor
def compute_output_shape(self, input_shape):
"""Compute output shape."""
return input_shape[1][0], input_shape[1][1], self.mp_dim + 1
class MpMaxPoolingMatch(Layer):
"""MpMaxPoolingMatch."""
def __init__(self, mp_dim):
"""Init."""
super(MpMaxPoolingMatch, self).__init__()
self.mp_dim = mp_dim
def build(self, input_shapes):
"""Build."""
d = input_shapes[0][-1]
self.kernel = self.add_weight(name='kernel',
shape=(1, 1, 1, self.mp_dim, d),
initializer='uniform',
trainable=True)
self.built = True
def call(self, x, **kwargs):
"""Call."""
reps_lt, reps_rt = x
# kernel: [1, 1, 1, mp_dim, d]
# lstm_lt => [b, len_lt, 1, 1, d]
reps_lt = tf.expand_dims(reps_lt, axis=2)
reps_lt = tf.expand_dims(reps_lt, axis=2)
reps_lt = reps_lt * self.kernel
# lstm_rt -> [b, 1, len_rt, 1, d]
reps_rt = tf.expand_dims(reps_rt, axis=2)
reps_rt = tf.expand_dims(reps_rt, axis=1)
match_tensor = _cosine_distance(reps_lt, reps_rt, cosine_norm=False)
max_match_tensor = tf.reduce_max(match_tensor, axis=1)
# match_tensor => [b, len_rt, m]
return max_match_tensor
def compute_output_shape(self, input_shape):
"""Compute output shape."""
return input_shape[1][0], input_shape[1][1], self.mp_dim
class MpAttentiveMatch(Layer):
"""
MpAttentiveMatch Layer.
Reference:
https://github.com/zhiguowang/BiMPM/blob/master/src/match_utils.py#L188-L193
Examples:
>>> import matchzoo as mz
>>> layer = mz.contrib.layers.multi_perspective_layer.MpAttentiveMatch(
... att_dim=30, mp_dim=20)
>>> layer.compute_output_shape([(32, 10, 100), (32, 40, 100)])
(32, 40, 20)
"""
def __init__(self, att_dim, mp_dim):
"""Init."""
super(MpAttentiveMatch, self).__init__()
self.att_dim = att_dim
self.mp_dim = mp_dim
def build(self, input_shapes):
"""Build."""
# input_shape = input_shapes[0]
self.built = True
def call(self, x, **kwargs):
"""Call."""
reps_lt, reps_rt = x[0], x[1]
# attention prob matrix
attention_layer = AttentionLayer(self.att_dim)
attn_prob = attention_layer([reps_rt, reps_lt])
# attention reps
att_lt = K.batch_dot(attn_prob, reps_lt)
# mp match
attn_match_tensor, match_dim = _multi_perspective_match(self.mp_dim,
reps_rt,
att_lt)
return attn_match_tensor
def compute_output_shape(self, input_shape):
"""Compute output shape."""
return input_shape[1][0], input_shape[1][1], self.mp_dim
class MpMaxAttentiveMatch(Layer):
"""MpMaxAttentiveMatch."""
def __init__(self, mp_dim):
"""Init."""
super(MpMaxAttentiveMatch, self).__init__()
self.mp_dim = mp_dim
def build(self, input_shapes):
"""Build."""
# input_shape = input_shapes[0]
self.built = True
def call(self, x):
"""Call."""
reps_lt, reps_rt = x[0], x[1]
relevancy_matrix = x[2]
max_att_lt = cal_max_question_representation(reps_lt, relevancy_matrix)
max_attentive_tensor, match_dim = _multi_perspective_match(self.mp_dim,
reps_rt,
max_att_lt)
return max_attentive_tensor
def cal_max_question_representation(reps_lt, attn_scores):
"""
Calculate max_question_representation.
:param reps_lt: [batch_size, passage_len, hidden_size]
:param attn_scores: []
:return: [batch_size, passage_len, hidden_size].
"""
attn_positions = tf.argmax(attn_scores, axis=2)
max_reps_lt = collect_representation(reps_lt, attn_positions)
return max_reps_lt
def collect_representation(representation, positions):
"""
Collect_representation.
:param representation: [batch_size, node_num, feature_dim]
:param positions: [batch_size, neighbour_num]
:return: [batch_size, neighbour_num]?
"""
return collect_probs(representation, positions)
def collect_final_step_of_lstm(lstm_representation, lengths):
"""
Collect final step of lstm.
:param lstm_representation: [batch_size, len_rt, dim]
:param lengths: [batch_size]
:return: [batch_size, dim]
"""
lengths = tf.maximum(lengths, K.zeros_like(lengths))
batch_size = tf.shape(lengths)[0]
# shape (batch_size)
batch_nums = tf.range(0, limit=batch_size)
# shape (batch_size, 2)
indices = tf.stack((batch_nums, lengths), axis=1)
result = tf.gather_nd(lstm_representation, indices,
name='last-forwar-lstm')
# [batch_size, dim]
return result
def collect_probs(probs, positions):
"""
Collect Probabilities.
Reference:
https://github.com/zhiguowang/BiMPM/blob/master/src/layer_utils.py#L128-L140
:param probs: [batch_size, chunks_size]
:param positions: [batch_size, pair_size]
:return: [batch_size, pair_size]
"""
batch_size = tf.shape(probs)[0]
pair_size = tf.shape(positions)[1]
# shape (batch_size)
batch_nums = K.arange(0, batch_size)
# [batch_size, 1]
batch_nums = tf.reshape(batch_nums, shape=[-1, 1])
# [batch_size, pair_size]
batch_nums = K.tile(batch_nums, [1, pair_size])
# shape (batch_size, pair_size, 2)
# Alert: to solve error message
positions = tf.cast(positions, tf.int32)
indices = tf.stack([batch_nums, positions], axis=2)
pair_probs = tf.gather_nd(probs, indices)
# pair_probs = tf.reshape(pair_probs, shape=[batch_size, pair_size])
return pair_probs
def _multi_perspective_match(mp_dim, reps_rt, att_lt,
with_cosine=True, with_mp_cosine=True):
"""
The core function of zhiguowang's implementation.
reference:
https://github.com/zhiguowang/BiMPM/blob/master/src/match_utils.py#L207-L223
:param mp_dim: about 20
:param reps_rt: [batch, len_rt, dim]
:param att_lt: [batch, len_rt, dim]
:param with_cosine: True
:param with_mp_cosine: True
:return: [batch, len, 1 + mp_dim]
"""
shape_rt = tf.shape(reps_rt)
batch_size = shape_rt[0]
len_lt = shape_rt[1]
match_dim = 0
match_result_list = []
if with_cosine:
cosine_tensor = _cosine_distance(reps_rt, att_lt, False)
cosine_tensor = tf.reshape(cosine_tensor,
[batch_size, len_lt, 1])
match_result_list.append(cosine_tensor)
match_dim += 1
if with_mp_cosine:
mp_cosine_layer = MpCosineLayer(mp_dim)
mp_cosine_tensor = mp_cosine_layer([reps_rt, att_lt])
mp_cosine_tensor = tf.reshape(mp_cosine_tensor,
[batch_size, len_lt, mp_dim])
match_result_list.append(mp_cosine_tensor)
match_dim += mp_cosine_layer.mp_dim
match_result = tf.concat(match_result_list, 2)
return match_result, match_dim
class MpCosineLayer(Layer):
"""
Implementation of Multi-Perspective Cosine Distance.
Reference:
https://github.com/zhiguowang/BiMPM/blob/master/src/match_utils.py#L121-L129
Examples:
>>> import matchzoo as mz
>>> layer = mz.contrib.layers.multi_perspective_layer.MpCosineLayer(
... mp_dim=50)
>>> layer.compute_output_shape([(32, 10, 100), (32, 10, 100)])
(32, 10, 50)
"""
def __init__(self, mp_dim, **kwargs):
"""Init."""
self.mp_dim = mp_dim
super(MpCosineLayer, self).__init__(**kwargs)
def build(self, input_shape):
"""Build."""
self.kernel = self.add_weight(name='kernel',
shape=(1, 1, self.mp_dim,
input_shape[0][-1]),
initializer='uniform',
trainable=True)
super(MpCosineLayer, self).build(input_shape)
def call(self, x, **kwargs):
"""Call."""
v1, v2 = x
v1 = tf.expand_dims(v1, 2) * self.kernel # [b, s_lt, m, d]
v2 = tf.expand_dims(v2, 2) # [b, s_lt, 1, d]
return _cosine_distance(v1, v2, False)
def compute_output_shape(self, input_shape):
"""Compute output shape."""
return input_shape[0][0], input_shape[0][1], self.mp_dim
def _calc_relevancy_matrix(reps_lt, reps_rt):
reps_lt = tf.expand_dims(reps_lt, 1) # [b, 1, len_lt, d]
reps_rt = tf.expand_dims(reps_rt, 2) # [b, len_rt, 1, d]
relevancy_matrix = _cosine_distance(reps_lt, reps_rt)
# => [b, len_rt, len_lt, d]
return relevancy_matrix
def _mask_relevancy_matrix(relevancy_matrix, mask_lt, mask_rt):
"""
Mask relevancy matrix.
:param relevancy_matrix: [b, len_rt, len_lt]
:param mask_lt: [b, len_lt]
:param mask_rt: [b, len_rt]
:return: masked_matrix: [b, len_rt, len_lt]
"""
if mask_lt is not None:
relevancy_matrix = relevancy_matrix * tf.expand_dims(mask_lt, 1)
relevancy_matrix = relevancy_matrix * tf.expand_dims(mask_rt, 2)
return relevancy_matrix
def _cosine_distance(v1, v2, cosine_norm=True, eps=1e-6):
"""
Only requires `tf.reduce_sum(v1 * v2, axis=-1)`.
:param v1: [batch, time_steps(v1), 1, m, d]
:param v2: [batch, 1, time_steps(v2), m, d]
:param cosine_norm: True
:param eps: 1e-6
:return: [batch, time_steps(v1), time_steps(v2), m]
"""
cosine_numerator = tf.reduce_sum(v1 * v2, axis=-1)
if not cosine_norm:
return K.tanh(cosine_numerator)
v1_norm = K.sqrt(tf.maximum(tf.reduce_sum(tf.square(v1), axis=-1), eps))
v2_norm = K.sqrt(tf.maximum(tf.reduce_sum(tf.square(v2), axis=-1), eps))
return cosine_numerator / v1_norm / v2_norm
|
import logging
import uuid
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.components import http, websocket_api
from homeassistant.components.http.data_validator import RequestDataValidator
from homeassistant.const import HTTP_BAD_REQUEST, HTTP_NOT_FOUND
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.util.json import load_json, save_json
from .const import DOMAIN
ATTR_NAME = "name"
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = vol.Schema({DOMAIN: {}}, extra=vol.ALLOW_EXTRA)
EVENT = "shopping_list_updated"
ITEM_UPDATE_SCHEMA = vol.Schema({"complete": bool, ATTR_NAME: str})
PERSISTENCE = ".shopping_list.json"
SERVICE_ADD_ITEM = "add_item"
SERVICE_COMPLETE_ITEM = "complete_item"
SERVICE_ITEM_SCHEMA = vol.Schema({vol.Required(ATTR_NAME): vol.Any(None, cv.string)})
WS_TYPE_SHOPPING_LIST_ITEMS = "shopping_list/items"
WS_TYPE_SHOPPING_LIST_ADD_ITEM = "shopping_list/items/add"
WS_TYPE_SHOPPING_LIST_UPDATE_ITEM = "shopping_list/items/update"
WS_TYPE_SHOPPING_LIST_CLEAR_ITEMS = "shopping_list/items/clear"
SCHEMA_WEBSOCKET_ITEMS = websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend(
{vol.Required("type"): WS_TYPE_SHOPPING_LIST_ITEMS}
)
SCHEMA_WEBSOCKET_ADD_ITEM = websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend(
{vol.Required("type"): WS_TYPE_SHOPPING_LIST_ADD_ITEM, vol.Required("name"): str}
)
SCHEMA_WEBSOCKET_UPDATE_ITEM = websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend(
{
vol.Required("type"): WS_TYPE_SHOPPING_LIST_UPDATE_ITEM,
vol.Required("item_id"): str,
vol.Optional("name"): str,
vol.Optional("complete"): bool,
}
)
SCHEMA_WEBSOCKET_CLEAR_ITEMS = websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend(
{vol.Required("type"): WS_TYPE_SHOPPING_LIST_CLEAR_ITEMS}
)
async def async_setup(hass, config):
"""Initialize the shopping list."""
if DOMAIN not in config:
return True
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_IMPORT}
)
)
return True
async def async_setup_entry(hass, config_entry):
"""Set up shopping list from config flow."""
async def add_item_service(call):
"""Add an item with `name`."""
data = hass.data[DOMAIN]
name = call.data.get(ATTR_NAME)
if name is not None:
await data.async_add(name)
async def complete_item_service(call):
"""Mark the item provided via `name` as completed."""
data = hass.data[DOMAIN]
name = call.data.get(ATTR_NAME)
if name is None:
return
try:
item = [item for item in data.items if item["name"] == name][0]
except IndexError:
_LOGGER.error("Removing of item failed: %s cannot be found", name)
else:
await data.async_update(item["id"], {"name": name, "complete": True})
data = hass.data[DOMAIN] = ShoppingData(hass)
await data.async_load()
hass.services.async_register(
DOMAIN, SERVICE_ADD_ITEM, add_item_service, schema=SERVICE_ITEM_SCHEMA
)
hass.services.async_register(
DOMAIN, SERVICE_COMPLETE_ITEM, complete_item_service, schema=SERVICE_ITEM_SCHEMA
)
hass.http.register_view(ShoppingListView)
hass.http.register_view(CreateShoppingListItemView)
hass.http.register_view(UpdateShoppingListItemView)
hass.http.register_view(ClearCompletedItemsView)
hass.components.frontend.async_register_built_in_panel(
"shopping-list", "shopping_list", "mdi:cart"
)
hass.components.websocket_api.async_register_command(
WS_TYPE_SHOPPING_LIST_ITEMS, websocket_handle_items, SCHEMA_WEBSOCKET_ITEMS
)
hass.components.websocket_api.async_register_command(
WS_TYPE_SHOPPING_LIST_ADD_ITEM, websocket_handle_add, SCHEMA_WEBSOCKET_ADD_ITEM
)
hass.components.websocket_api.async_register_command(
WS_TYPE_SHOPPING_LIST_UPDATE_ITEM,
websocket_handle_update,
SCHEMA_WEBSOCKET_UPDATE_ITEM,
)
hass.components.websocket_api.async_register_command(
WS_TYPE_SHOPPING_LIST_CLEAR_ITEMS,
websocket_handle_clear,
SCHEMA_WEBSOCKET_CLEAR_ITEMS,
)
return True
class ShoppingData:
"""Class to hold shopping list data."""
def __init__(self, hass):
"""Initialize the shopping list."""
self.hass = hass
self.items = []
async def async_add(self, name):
"""Add a shopping list item."""
item = {"name": name, "id": uuid.uuid4().hex, "complete": False}
self.items.append(item)
await self.hass.async_add_executor_job(self.save)
return item
async def async_update(self, item_id, info):
"""Update a shopping list item."""
item = next((itm for itm in self.items if itm["id"] == item_id), None)
if item is None:
raise KeyError
info = ITEM_UPDATE_SCHEMA(info)
item.update(info)
await self.hass.async_add_executor_job(self.save)
return item
async def async_clear_completed(self):
"""Clear completed items."""
self.items = [itm for itm in self.items if not itm["complete"]]
await self.hass.async_add_executor_job(self.save)
async def async_load(self):
"""Load items."""
def load():
"""Load the items synchronously."""
return load_json(self.hass.config.path(PERSISTENCE), default=[])
self.items = await self.hass.async_add_executor_job(load)
def save(self):
"""Save the items."""
save_json(self.hass.config.path(PERSISTENCE), self.items)
class ShoppingListView(http.HomeAssistantView):
"""View to retrieve shopping list content."""
url = "/api/shopping_list"
name = "api:shopping_list"
@callback
def get(self, request):
"""Retrieve shopping list items."""
return self.json(request.app["hass"].data[DOMAIN].items)
class UpdateShoppingListItemView(http.HomeAssistantView):
"""View to retrieve shopping list content."""
url = "/api/shopping_list/item/{item_id}"
name = "api:shopping_list:item:id"
async def post(self, request, item_id):
"""Update a shopping list item."""
data = await request.json()
try:
item = await request.app["hass"].data[DOMAIN].async_update(item_id, data)
request.app["hass"].bus.async_fire(EVENT)
return self.json(item)
except KeyError:
return self.json_message("Item not found", HTTP_NOT_FOUND)
except vol.Invalid:
return self.json_message("Item not found", HTTP_BAD_REQUEST)
class CreateShoppingListItemView(http.HomeAssistantView):
"""View to retrieve shopping list content."""
url = "/api/shopping_list/item"
name = "api:shopping_list:item"
@RequestDataValidator(vol.Schema({vol.Required("name"): str}))
async def post(self, request, data):
"""Create a new shopping list item."""
item = await request.app["hass"].data[DOMAIN].async_add(data["name"])
request.app["hass"].bus.async_fire(EVENT)
return self.json(item)
class ClearCompletedItemsView(http.HomeAssistantView):
"""View to retrieve shopping list content."""
url = "/api/shopping_list/clear_completed"
name = "api:shopping_list:clear_completed"
async def post(self, request):
"""Retrieve if API is running."""
hass = request.app["hass"]
await hass.data[DOMAIN].async_clear_completed()
hass.bus.async_fire(EVENT)
return self.json_message("Cleared completed items.")
@callback
def websocket_handle_items(hass, connection, msg):
"""Handle get shopping_list items."""
connection.send_message(
websocket_api.result_message(msg["id"], hass.data[DOMAIN].items)
)
@websocket_api.async_response
async def websocket_handle_add(hass, connection, msg):
"""Handle add item to shopping_list."""
item = await hass.data[DOMAIN].async_add(msg["name"])
hass.bus.async_fire(EVENT, {"action": "add", "item": item})
connection.send_message(websocket_api.result_message(msg["id"], item))
@websocket_api.async_response
async def websocket_handle_update(hass, connection, msg):
"""Handle update shopping_list item."""
msg_id = msg.pop("id")
item_id = msg.pop("item_id")
msg.pop("type")
data = msg
try:
item = await hass.data[DOMAIN].async_update(item_id, data)
hass.bus.async_fire(EVENT, {"action": "update", "item": item})
connection.send_message(websocket_api.result_message(msg_id, item))
except KeyError:
connection.send_message(
websocket_api.error_message(msg_id, "item_not_found", "Item not found")
)
@websocket_api.async_response
async def websocket_handle_clear(hass, connection, msg):
"""Handle clearing shopping_list items."""
await hass.data[DOMAIN].async_clear_completed()
hass.bus.async_fire(EVENT, {"action": "clear"})
connection.send_message(websocket_api.result_message(msg["id"]))
|
import typing
import matchzoo
from matchzoo.engine.base_metric import BaseMetric
from matchzoo.engine import base_task
def parse_metric(
metric: typing.Union[str, typing.Type[BaseMetric], BaseMetric],
task: 'base_task.BaseTask' = None
) -> typing.Union['BaseMetric', str]:
"""
Parse input metric in any form into a :class:`BaseMetric` instance.
:param metric: Input metric in any form.
:param task: Task type for determining specific metric.
:return: A :class:`BaseMetric` instance
Examples::
>>> from matchzoo import metrics
>>> from matchzoo.engine.parse_metric import parse_metric
Use `str` as keras native metrics:
>>> parse_metric('mse')
'mse'
Use `str` as MatchZoo metrics:
>>> mz_metric = parse_metric('map')
>>> type(mz_metric)
<class 'matchzoo.metrics.mean_average_precision.MeanAveragePrecision'>
Use :class:`matchzoo.engine.BaseMetric` subclasses as MatchZoo metrics:
>>> type(parse_metric(metrics.AveragePrecision))
<class 'matchzoo.metrics.average_precision.AveragePrecision'>
Use :class:`matchzoo.engine.BaseMetric` instances as MatchZoo metrics:
>>> type(parse_metric(metrics.AveragePrecision()))
<class 'matchzoo.metrics.average_precision.AveragePrecision'>
"""
if task is None:
task = matchzoo.tasks.Ranking()
if isinstance(metric, str):
metric = metric.lower() # ignore case
# matchzoo metrics in str form
for subclass in BaseMetric.__subclasses__():
if metric == subclass.ALIAS or metric in subclass.ALIAS:
return subclass()
# keras native metrics
return _remap_keras_metric(metric, task)
elif isinstance(metric, BaseMetric):
return metric
elif issubclass(metric, BaseMetric):
return metric()
else:
raise ValueError(metric)
def _remap_keras_metric(metric: str, task) -> str:
# we do not support sparse label in classification.
lookup = {
matchzoo.tasks.Ranking: {
'acc': 'binary_accuracy',
'accuracy': 'binary_accuracy',
'crossentropy': 'binary_crossentropy',
'ce': 'binary_crossentropy',
},
matchzoo.tasks.Classification: {
'acc': 'categorical_accuracy',
'accuracy': 'categorical_accuracy',
'crossentropy': 'categorical_crossentropy',
'ce': 'categorical_crossentropy',
}
}
return lookup[type(task)].get(metric, metric)
|
from enum import IntEnum
import logging
from typing import Optional, Tuple
from homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN, ClimateEntity
from homeassistant.components.climate.const import (
ATTR_HVAC_MODE,
ATTR_TARGET_TEMP_HIGH,
ATTR_TARGET_TEMP_LOW,
CURRENT_HVAC_COOL,
CURRENT_HVAC_FAN,
CURRENT_HVAC_HEAT,
CURRENT_HVAC_IDLE,
CURRENT_HVAC_OFF,
HVAC_MODE_COOL,
HVAC_MODE_DRY,
HVAC_MODE_FAN_ONLY,
HVAC_MODE_HEAT,
HVAC_MODE_HEAT_COOL,
HVAC_MODE_OFF,
PRESET_NONE,
SUPPORT_FAN_MODE,
SUPPORT_PRESET_MODE,
SUPPORT_TARGET_TEMPERATURE,
SUPPORT_TARGET_TEMPERATURE_RANGE,
)
from homeassistant.const import ATTR_TEMPERATURE, TEMP_CELSIUS, TEMP_FAHRENHEIT
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .const import DATA_UNSUBSCRIBE, DOMAIN
from .entity import ZWaveDeviceEntity
VALUE_LIST = "List"
VALUE_ID = "Value"
VALUE_LABEL = "Label"
VALUE_SELECTED_ID = "Selected_id"
VALUE_SELECTED_LABEL = "Selected"
ATTR_FAN_ACTION = "fan_action"
ATTR_VALVE_POSITION = "valve_position"
_LOGGER = logging.getLogger(__name__)
class ThermostatMode(IntEnum):
"""Enum with all (known/used) Z-Wave ThermostatModes."""
# https://github.com/OpenZWave/open-zwave/blob/master/cpp/src/command_classes/ThermostatMode.cpp
OFF = 0
HEAT = 1
COOL = 2
AUTO = 3
AUXILIARY = 4
RESUME_ON = 5
FAN = 6
FURNANCE = 7
DRY = 8
MOIST = 9
AUTO_CHANGE_OVER = 10
HEATING_ECON = 11
COOLING_ECON = 12
AWAY = 13
FULL_POWER = 15
MANUFACTURER_SPECIFIC = 31
# In Z-Wave the modes and presets are both in ThermostatMode.
# This list contains thermostatmodes we should consider a mode only
MODES_LIST = [
ThermostatMode.OFF,
ThermostatMode.HEAT,
ThermostatMode.COOL,
ThermostatMode.AUTO,
ThermostatMode.AUTO_CHANGE_OVER,
]
MODE_SETPOINT_MAPPINGS = {
ThermostatMode.OFF: (),
ThermostatMode.HEAT: ("setpoint_heating",),
ThermostatMode.COOL: ("setpoint_cooling",),
ThermostatMode.AUTO: ("setpoint_heating", "setpoint_cooling"),
ThermostatMode.AUXILIARY: ("setpoint_heating",),
ThermostatMode.FURNANCE: ("setpoint_furnace",),
ThermostatMode.DRY: ("setpoint_dry_air",),
ThermostatMode.MOIST: ("setpoint_moist_air",),
ThermostatMode.AUTO_CHANGE_OVER: ("setpoint_auto_changeover",),
ThermostatMode.HEATING_ECON: ("setpoint_eco_heating",),
ThermostatMode.COOLING_ECON: ("setpoint_eco_cooling",),
ThermostatMode.AWAY: ("setpoint_away_heating", "setpoint_away_cooling"),
ThermostatMode.FULL_POWER: ("setpoint_full_power",),
}
# strings, OZW and/or qt-ozw does not send numeric values
# https://github.com/OpenZWave/open-zwave/blob/master/cpp/src/command_classes/ThermostatOperatingState.cpp
HVAC_CURRENT_MAPPINGS = {
"idle": CURRENT_HVAC_IDLE,
"heat": CURRENT_HVAC_HEAT,
"pending heat": CURRENT_HVAC_IDLE,
"heating": CURRENT_HVAC_HEAT,
"cool": CURRENT_HVAC_COOL,
"pending cool": CURRENT_HVAC_IDLE,
"cooling": CURRENT_HVAC_COOL,
"fan only": CURRENT_HVAC_FAN,
"vent / economiser": CURRENT_HVAC_FAN,
"off": CURRENT_HVAC_OFF,
}
# Map Z-Wave HVAC Mode to Home Assistant value
# Note: We treat "auto" as "heat_cool" as most Z-Wave devices
# report auto_changeover as auto without schedule support.
ZW_HVAC_MODE_MAPPINGS = {
ThermostatMode.OFF: HVAC_MODE_OFF,
ThermostatMode.HEAT: HVAC_MODE_HEAT,
ThermostatMode.COOL: HVAC_MODE_COOL,
# Z-Wave auto mode is actually heat/cool in the hass world
ThermostatMode.AUTO: HVAC_MODE_HEAT_COOL,
ThermostatMode.AUXILIARY: HVAC_MODE_HEAT,
ThermostatMode.FAN: HVAC_MODE_FAN_ONLY,
ThermostatMode.FURNANCE: HVAC_MODE_HEAT,
ThermostatMode.DRY: HVAC_MODE_DRY,
ThermostatMode.AUTO_CHANGE_OVER: HVAC_MODE_HEAT_COOL,
ThermostatMode.HEATING_ECON: HVAC_MODE_HEAT,
ThermostatMode.COOLING_ECON: HVAC_MODE_COOL,
ThermostatMode.AWAY: HVAC_MODE_HEAT_COOL,
ThermostatMode.FULL_POWER: HVAC_MODE_HEAT,
}
# Map Home Assistant HVAC Mode to Z-Wave value
HVAC_MODE_ZW_MAPPINGS = {
HVAC_MODE_OFF: ThermostatMode.OFF,
HVAC_MODE_HEAT: ThermostatMode.HEAT,
HVAC_MODE_COOL: ThermostatMode.COOL,
HVAC_MODE_FAN_ONLY: ThermostatMode.FAN,
HVAC_MODE_DRY: ThermostatMode.DRY,
HVAC_MODE_HEAT_COOL: ThermostatMode.AUTO_CHANGE_OVER,
}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Z-Wave Climate from Config Entry."""
@callback
def async_add_climate(values):
"""Add Z-Wave Climate."""
async_add_entities([ZWaveClimateEntity(values)])
hass.data[DOMAIN][config_entry.entry_id][DATA_UNSUBSCRIBE].append(
async_dispatcher_connect(
hass, f"{DOMAIN}_new_{CLIMATE_DOMAIN}", async_add_climate
)
)
class ZWaveClimateEntity(ZWaveDeviceEntity, ClimateEntity):
"""Representation of a Z-Wave Climate device."""
def __init__(self, values):
"""Initialize the entity."""
super().__init__(values)
self._hvac_modes = {}
self._hvac_presets = {}
self.on_value_update()
@callback
def on_value_update(self):
"""Call when the underlying values object changes."""
self._current_mode_setpoint_values = self._get_current_mode_setpoint_values()
if not self._hvac_modes:
self._set_modes_and_presets()
@property
def hvac_mode(self):
"""Return hvac operation ie. heat, cool mode."""
if not self.values.mode:
# Thermostat(valve) with no support for setting a mode is considered heating-only
return HVAC_MODE_HEAT
return ZW_HVAC_MODE_MAPPINGS.get(
self.values.mode.value[VALUE_SELECTED_ID], HVAC_MODE_HEAT_COOL
)
@property
def hvac_modes(self):
"""Return the list of available hvac operation modes."""
return list(self._hvac_modes)
@property
def fan_mode(self):
"""Return the fan speed set."""
return self.values.fan_mode.value[VALUE_SELECTED_LABEL]
@property
def fan_modes(self):
"""Return a list of available fan modes."""
return [entry[VALUE_LABEL] for entry in self.values.fan_mode.value[VALUE_LIST]]
@property
def temperature_unit(self):
"""Return the unit of measurement."""
if self.values.temperature is not None and self.values.temperature.units == "F":
return TEMP_FAHRENHEIT
return TEMP_CELSIUS
@property
def current_temperature(self):
"""Return the current temperature."""
if not self.values.temperature:
return None
return self.values.temperature.value
@property
def hvac_action(self):
"""Return the current running hvac operation if supported."""
if not self.values.operating_state:
return None
cur_state = self.values.operating_state.value.lower()
return HVAC_CURRENT_MAPPINGS.get(cur_state)
@property
def preset_mode(self):
"""Return preset operation ie. eco, away."""
# A Zwave mode that can not be translated to a hass mode is considered a preset
if not self.values.mode:
return None
if self.values.mode.value[VALUE_SELECTED_ID] not in MODES_LIST:
return self.values.mode.value[VALUE_SELECTED_LABEL]
return PRESET_NONE
@property
def preset_modes(self):
"""Return the list of available preset operation modes."""
return list(self._hvac_presets)
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return self._current_mode_setpoint_values[0].value
@property
def target_temperature_low(self) -> Optional[float]:
"""Return the lowbound target temperature we try to reach."""
return self._current_mode_setpoint_values[0].value
@property
def target_temperature_high(self) -> Optional[float]:
"""Return the highbound target temperature we try to reach."""
return self._current_mode_setpoint_values[1].value
async def async_set_temperature(self, **kwargs):
"""Set new target temperature.
Must know if single or double setpoint.
"""
hvac_mode = kwargs.get(ATTR_HVAC_MODE)
if hvac_mode is not None:
await self.async_set_hvac_mode(hvac_mode)
if len(self._current_mode_setpoint_values) == 1:
setpoint = self._current_mode_setpoint_values[0]
target_temp = kwargs.get(ATTR_TEMPERATURE)
if setpoint is not None and target_temp is not None:
setpoint.send_value(target_temp)
elif len(self._current_mode_setpoint_values) == 2:
(setpoint_low, setpoint_high) = self._current_mode_setpoint_values
target_temp_low = kwargs.get(ATTR_TARGET_TEMP_LOW)
target_temp_high = kwargs.get(ATTR_TARGET_TEMP_HIGH)
if setpoint_low is not None and target_temp_low is not None:
setpoint_low.send_value(target_temp_low)
if setpoint_high is not None and target_temp_high is not None:
setpoint_high.send_value(target_temp_high)
async def async_set_fan_mode(self, fan_mode):
"""Set new target fan mode."""
# get id for this fan_mode
fan_mode_value = _get_list_id(self.values.fan_mode.value[VALUE_LIST], fan_mode)
if fan_mode_value is None:
_LOGGER.warning("Received an invalid fan mode: %s", fan_mode)
return
self.values.fan_mode.send_value(fan_mode_value)
async def async_set_hvac_mode(self, hvac_mode):
"""Set new target hvac mode."""
if not self.values.mode:
# Thermostat(valve) with no support for setting a mode
_LOGGER.warning(
"Thermostat %s does not support setting a mode", self.entity_id
)
return
hvac_mode_value = self._hvac_modes.get(hvac_mode)
if hvac_mode_value is None:
_LOGGER.warning("Received an invalid hvac mode: %s", hvac_mode)
return
self.values.mode.send_value(hvac_mode_value)
async def async_set_preset_mode(self, preset_mode):
"""Set new target preset mode."""
if preset_mode == PRESET_NONE:
# try to restore to the (translated) main hvac mode
await self.async_set_hvac_mode(self.hvac_mode)
return
preset_mode_value = self._hvac_presets.get(preset_mode)
if preset_mode_value is None:
_LOGGER.warning("Received an invalid preset mode: %s", preset_mode)
return
self.values.mode.send_value(preset_mode_value)
@property
def device_state_attributes(self):
"""Return the optional state attributes."""
data = super().device_state_attributes
if self.values.fan_action:
data[ATTR_FAN_ACTION] = self.values.fan_action.value
if self.values.valve_position:
data[
ATTR_VALVE_POSITION
] = f"{self.values.valve_position.value} {self.values.valve_position.units}"
return data
@property
def supported_features(self):
"""Return the list of supported features."""
support = 0
if len(self._current_mode_setpoint_values) == 1:
support |= SUPPORT_TARGET_TEMPERATURE
if len(self._current_mode_setpoint_values) > 1:
support |= SUPPORT_TARGET_TEMPERATURE_RANGE
if self.values.fan_mode:
support |= SUPPORT_FAN_MODE
if self.values.mode:
support |= SUPPORT_PRESET_MODE
return support
def _get_current_mode_setpoint_values(self) -> Tuple:
"""Return a tuple of current setpoint Z-Wave value(s)."""
if not self.values.mode:
setpoint_names = ("setpoint_heating",)
else:
current_mode = self.values.mode.value[VALUE_SELECTED_ID]
setpoint_names = MODE_SETPOINT_MAPPINGS.get(current_mode, ())
# we do not want None values in our tuple so check if the value exists
return tuple(
getattr(self.values, value_name)
for value_name in setpoint_names
if getattr(self.values, value_name, None)
)
def _set_modes_and_presets(self):
"""Convert Z-Wave Thermostat modes into Home Assistant modes and presets."""
all_modes = {}
all_presets = {PRESET_NONE: None}
if self.values.mode:
# Z-Wave uses one list for both modes and presets.
# Iterate over all Z-Wave ThermostatModes and extract the hvac modes and presets.
for val in self.values.mode.value[VALUE_LIST]:
if val[VALUE_ID] in MODES_LIST:
# treat value as hvac mode
hass_mode = ZW_HVAC_MODE_MAPPINGS.get(val[VALUE_ID])
all_modes[hass_mode] = val[VALUE_ID]
else:
# treat value as hvac preset
all_presets[val[VALUE_LABEL]] = val[VALUE_ID]
else:
all_modes[HVAC_MODE_HEAT] = None
self._hvac_modes = all_modes
self._hvac_presets = all_presets
def _get_list_id(value_lst, value_lbl):
"""Return the id for the value in the list."""
return next(
(val[VALUE_ID] for val in value_lst if val[VALUE_LABEL] == value_lbl), None
)
|
import pywink
from homeassistant.helpers.entity import ToggleEntity
from . import DOMAIN, WinkDevice
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Wink platform."""
for switch in pywink.get_switches():
_id = switch.object_id() + switch.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
add_entities([WinkToggleDevice(switch, hass)])
for switch in pywink.get_powerstrips():
_id = switch.object_id() + switch.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
add_entities([WinkToggleDevice(switch, hass)])
for sprinkler in pywink.get_sprinklers():
_id = sprinkler.object_id() + sprinkler.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
add_entities([WinkToggleDevice(sprinkler, hass)])
for switch in pywink.get_binary_switch_groups():
_id = switch.object_id() + switch.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
add_entities([WinkToggleDevice(switch, hass)])
class WinkToggleDevice(WinkDevice, ToggleEntity):
"""Representation of a Wink toggle device."""
async def async_added_to_hass(self):
"""Call when entity is added to hass."""
self.hass.data[DOMAIN]["entities"]["switch"].append(self)
@property
def is_on(self):
"""Return true if device is on."""
return self.wink.state()
def turn_on(self, **kwargs):
"""Turn the device on."""
self.wink.set_state(True)
def turn_off(self, **kwargs):
"""Turn the device off."""
self.wink.set_state(False)
@property
def device_state_attributes(self):
"""Return the state attributes."""
attributes = super().device_state_attributes
try:
event = self.wink.last_event()
if event is not None:
attributes["last_event"] = event
except AttributeError:
pass
return attributes
|
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
SUPPORT_BRIGHTNESS,
LightEntity,
)
from . import IHC_CONTROLLER, IHC_INFO
from .const import CONF_DIMMABLE, CONF_OFF_ID, CONF_ON_ID
from .ihcdevice import IHCDevice
from .util import async_pulse, async_set_bool, async_set_int
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the IHC lights platform."""
if discovery_info is None:
return
devices = []
for name, device in discovery_info.items():
ihc_id = device["ihc_id"]
product_cfg = device["product_cfg"]
product = device["product"]
# Find controller that corresponds with device id
ctrl_id = device["ctrl_id"]
ihc_key = f"ihc{ctrl_id}"
info = hass.data[ihc_key][IHC_INFO]
ihc_controller = hass.data[ihc_key][IHC_CONTROLLER]
ihc_off_id = product_cfg.get(CONF_OFF_ID)
ihc_on_id = product_cfg.get(CONF_ON_ID)
dimmable = product_cfg[CONF_DIMMABLE]
light = IhcLight(
ihc_controller, name, ihc_id, ihc_off_id, ihc_on_id, info, dimmable, product
)
devices.append(light)
add_entities(devices)
class IhcLight(IHCDevice, LightEntity):
"""Representation of a IHC light.
For dimmable lights, the associated IHC resource should be a light
level (integer). For non dimmable light the IHC resource should be
an on/off (boolean) resource
"""
def __init__(
self,
ihc_controller,
name,
ihc_id: int,
ihc_off_id: int,
ihc_on_id: int,
info: bool,
dimmable=False,
product=None,
) -> None:
"""Initialize the light."""
super().__init__(ihc_controller, name, ihc_id, info, product)
self._ihc_off_id = ihc_off_id
self._ihc_on_id = ihc_on_id
self._brightness = 0
self._dimmable = dimmable
self._state = None
@property
def brightness(self) -> int:
"""Return the brightness of this light between 0..255."""
return self._brightness
@property
def is_on(self) -> bool:
"""Return true if light is on."""
return self._state
@property
def supported_features(self):
"""Flag supported features."""
if self._dimmable:
return SUPPORT_BRIGHTNESS
return 0
async def async_turn_on(self, **kwargs):
"""Turn the light on."""
if ATTR_BRIGHTNESS in kwargs:
brightness = kwargs[ATTR_BRIGHTNESS]
else:
brightness = self._brightness
if brightness == 0:
brightness = 255
if self._dimmable:
await async_set_int(
self.hass, self.ihc_controller, self.ihc_id, int(brightness * 100 / 255)
)
else:
if self._ihc_on_id:
await async_pulse(self.hass, self.ihc_controller, self._ihc_on_id)
else:
await async_set_bool(self.hass, self.ihc_controller, self.ihc_id, True)
async def async_turn_off(self, **kwargs):
"""Turn the light off."""
if self._dimmable:
await async_set_int(self.hass, self.ihc_controller, self.ihc_id, 0)
else:
if self._ihc_off_id:
await async_pulse(self.hass, self.ihc_controller, self._ihc_off_id)
else:
await async_set_bool(self.hass, self.ihc_controller, self.ihc_id, False)
def on_ihc_change(self, ihc_id, value):
"""Handle IHC notifications."""
if isinstance(value, bool):
self._dimmable = False
self._state = value != 0
else:
self._dimmable = True
self._state = value > 0
if self._state:
self._brightness = int(value * 255 / 100)
self.schedule_update_ha_state()
|
from datetime import timedelta
import logging
from operator import itemgetter
import oasatelematics
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import ATTR_ATTRIBUTION, CONF_NAME, DEVICE_CLASS_TIMESTAMP
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import dt as dt_util
_LOGGER = logging.getLogger(__name__)
ATTR_STOP_ID = "stop_id"
ATTR_STOP_NAME = "stop_name"
ATTR_ROUTE_ID = "route_id"
ATTR_ROUTE_NAME = "route_name"
ATTR_NEXT_ARRIVAL = "next_arrival"
ATTR_SECOND_NEXT_ARRIVAL = "second_next_arrival"
ATTR_NEXT_DEPARTURE = "next_departure"
ATTRIBUTION = "Data retrieved from telematics.oasa.gr"
CONF_STOP_ID = "stop_id"
CONF_ROUTE_ID = "route_id"
DEFAULT_NAME = "OASA Telematics"
ICON = "mdi:bus"
SCAN_INTERVAL = timedelta(seconds=60)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_STOP_ID): cv.string,
vol.Required(CONF_ROUTE_ID): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the OASA Telematics sensor."""
name = config[CONF_NAME]
stop_id = config[CONF_STOP_ID]
route_id = config.get(CONF_ROUTE_ID)
data = OASATelematicsData(stop_id, route_id)
add_entities([OASATelematicsSensor(data, stop_id, route_id, name)], True)
class OASATelematicsSensor(Entity):
"""Implementation of the OASA Telematics sensor."""
def __init__(self, data, stop_id, route_id, name):
"""Initialize the sensor."""
self.data = data
self._name = name
self._stop_id = stop_id
self._route_id = route_id
self._name_data = self._times = self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def device_class(self):
"""Return the class of this sensor."""
return DEVICE_CLASS_TIMESTAMP
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def device_state_attributes(self):
"""Return the state attributes."""
params = {}
if self._times is not None:
next_arrival_data = self._times[0]
if ATTR_NEXT_ARRIVAL in next_arrival_data:
next_arrival = next_arrival_data[ATTR_NEXT_ARRIVAL]
params.update({ATTR_NEXT_ARRIVAL: next_arrival.isoformat()})
if len(self._times) > 1:
second_next_arrival_time = self._times[1][ATTR_NEXT_ARRIVAL]
if second_next_arrival_time is not None:
second_arrival = second_next_arrival_time
params.update(
{ATTR_SECOND_NEXT_ARRIVAL: second_arrival.isoformat()}
)
params.update(
{
ATTR_ROUTE_ID: self._times[0][ATTR_ROUTE_ID],
ATTR_STOP_ID: self._stop_id,
ATTR_ATTRIBUTION: ATTRIBUTION,
}
)
params.update(
{
ATTR_ROUTE_NAME: self._name_data[ATTR_ROUTE_NAME],
ATTR_STOP_NAME: self._name_data[ATTR_STOP_NAME],
}
)
return {k: v for k, v in params.items() if v}
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return ICON
def update(self):
"""Get the latest data from OASA API and update the states."""
self.data.update()
self._times = self.data.info
self._name_data = self.data.name_data
next_arrival_data = self._times[0]
if ATTR_NEXT_ARRIVAL in next_arrival_data:
self._state = next_arrival_data[ATTR_NEXT_ARRIVAL].isoformat()
class OASATelematicsData:
"""The class for handling data retrieval."""
def __init__(self, stop_id, route_id):
"""Initialize the data object."""
self.stop_id = stop_id
self.route_id = route_id
self.info = self.empty_result()
self.oasa_api = oasatelematics
self.name_data = {
ATTR_ROUTE_NAME: self.get_route_name(),
ATTR_STOP_NAME: self.get_stop_name(),
}
def empty_result(self):
"""Object returned when no arrivals are found."""
return [{ATTR_ROUTE_ID: self.route_id}]
def get_route_name(self):
"""Get the route name from the API."""
try:
route = self.oasa_api.getRouteName(self.route_id)
if route:
return route[0].get("route_departure_eng")
except TypeError:
_LOGGER.error("Cannot get route name from OASA API")
return None
def get_stop_name(self):
"""Get the stop name from the API."""
try:
name_data = self.oasa_api.getStopNameAndXY(self.stop_id)
if name_data:
return name_data[0].get("stop_descr_matrix_eng")
except TypeError:
_LOGGER.error("Cannot get stop name from OASA API")
return None
def update(self):
"""Get the latest arrival data from telematics.oasa.gr API."""
self.info = []
results = self.oasa_api.getStopArrivals(self.stop_id)
if not results:
self.info = self.empty_result()
return
# Parse results
results = [r for r in results if r.get("route_code") in self.route_id]
current_time = dt_util.utcnow()
for result in results:
btime2 = result.get("btime2")
if btime2 is not None:
arrival_min = int(btime2)
timestamp = current_time + timedelta(minutes=arrival_min)
arrival_data = {
ATTR_NEXT_ARRIVAL: timestamp,
ATTR_ROUTE_ID: self.route_id,
}
self.info.append(arrival_data)
if not self.info:
_LOGGER.debug("No arrivals with given parameters")
self.info = self.empty_result()
return
# Sort the data by time
sort = sorted(self.info, key=itemgetter(ATTR_NEXT_ARRIVAL))
self.info = sort
|
import voluptuous as vol
from homeassistant.components.device_automation import TRIGGER_BASE_SCHEMA
from homeassistant.components.device_automation.exceptions import (
InvalidDeviceAutomationConfig,
)
from homeassistant.components.homeassistant.triggers import (
numeric_state as numeric_state_trigger,
)
from homeassistant.const import (
ATTR_DEVICE_CLASS,
ATTR_UNIT_OF_MEASUREMENT,
CONF_ABOVE,
CONF_BELOW,
CONF_ENTITY_ID,
CONF_FOR,
CONF_TYPE,
DEVICE_CLASS_BATTERY,
DEVICE_CLASS_CURRENT,
DEVICE_CLASS_ENERGY,
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_ILLUMINANCE,
DEVICE_CLASS_POWER,
DEVICE_CLASS_POWER_FACTOR,
DEVICE_CLASS_PRESSURE,
DEVICE_CLASS_SIGNAL_STRENGTH,
DEVICE_CLASS_TEMPERATURE,
DEVICE_CLASS_TIMESTAMP,
DEVICE_CLASS_VOLTAGE,
)
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.entity_registry import async_entries_for_device
from . import DOMAIN
# mypy: allow-untyped-defs, no-check-untyped-defs
DEVICE_CLASS_NONE = "none"
CONF_BATTERY_LEVEL = "battery_level"
CONF_CURRENT = "current"
CONF_ENERGY = "energy"
CONF_HUMIDITY = "humidity"
CONF_ILLUMINANCE = "illuminance"
CONF_POWER = "power"
CONF_POWER_FACTOR = "power_factor"
CONF_PRESSURE = "pressure"
CONF_SIGNAL_STRENGTH = "signal_strength"
CONF_TEMPERATURE = "temperature"
CONF_TIMESTAMP = "timestamp"
CONF_VOLTAGE = "voltage"
CONF_VALUE = "value"
ENTITY_TRIGGERS = {
DEVICE_CLASS_BATTERY: [{CONF_TYPE: CONF_BATTERY_LEVEL}],
DEVICE_CLASS_CURRENT: [{CONF_TYPE: CONF_CURRENT}],
DEVICE_CLASS_ENERGY: [{CONF_TYPE: CONF_ENERGY}],
DEVICE_CLASS_HUMIDITY: [{CONF_TYPE: CONF_HUMIDITY}],
DEVICE_CLASS_ILLUMINANCE: [{CONF_TYPE: CONF_ILLUMINANCE}],
DEVICE_CLASS_POWER: [{CONF_TYPE: CONF_POWER}],
DEVICE_CLASS_POWER_FACTOR: [{CONF_TYPE: CONF_POWER_FACTOR}],
DEVICE_CLASS_PRESSURE: [{CONF_TYPE: CONF_PRESSURE}],
DEVICE_CLASS_SIGNAL_STRENGTH: [{CONF_TYPE: CONF_SIGNAL_STRENGTH}],
DEVICE_CLASS_TEMPERATURE: [{CONF_TYPE: CONF_TEMPERATURE}],
DEVICE_CLASS_TIMESTAMP: [{CONF_TYPE: CONF_TIMESTAMP}],
DEVICE_CLASS_VOLTAGE: [{CONF_TYPE: CONF_VOLTAGE}],
DEVICE_CLASS_NONE: [{CONF_TYPE: CONF_VALUE}],
}
TRIGGER_SCHEMA = vol.All(
TRIGGER_BASE_SCHEMA.extend(
{
vol.Required(CONF_ENTITY_ID): cv.entity_id,
vol.Required(CONF_TYPE): vol.In(
[
CONF_BATTERY_LEVEL,
CONF_CURRENT,
CONF_ENERGY,
CONF_HUMIDITY,
CONF_ILLUMINANCE,
CONF_POWER,
CONF_POWER_FACTOR,
CONF_PRESSURE,
CONF_SIGNAL_STRENGTH,
CONF_TEMPERATURE,
CONF_TIMESTAMP,
CONF_VOLTAGE,
CONF_VALUE,
]
),
vol.Optional(CONF_BELOW): vol.Any(vol.Coerce(float)),
vol.Optional(CONF_ABOVE): vol.Any(vol.Coerce(float)),
vol.Optional(CONF_FOR): cv.positive_time_period_dict,
}
),
cv.has_at_least_one_key(CONF_BELOW, CONF_ABOVE),
)
async def async_attach_trigger(hass, config, action, automation_info):
"""Listen for state changes based on configuration."""
numeric_state_config = {
numeric_state_trigger.CONF_PLATFORM: "numeric_state",
numeric_state_trigger.CONF_ENTITY_ID: config[CONF_ENTITY_ID],
}
if CONF_ABOVE in config:
numeric_state_config[numeric_state_trigger.CONF_ABOVE] = config[CONF_ABOVE]
if CONF_BELOW in config:
numeric_state_config[numeric_state_trigger.CONF_BELOW] = config[CONF_BELOW]
if CONF_FOR in config:
numeric_state_config[CONF_FOR] = config[CONF_FOR]
numeric_state_config = numeric_state_trigger.TRIGGER_SCHEMA(numeric_state_config)
return await numeric_state_trigger.async_attach_trigger(
hass, numeric_state_config, action, automation_info, platform_type="device"
)
async def async_get_triggers(hass, device_id):
"""List device triggers."""
triggers = []
entity_registry = await hass.helpers.entity_registry.async_get_registry()
entries = [
entry
for entry in async_entries_for_device(entity_registry, device_id)
if entry.domain == DOMAIN
]
for entry in entries:
device_class = DEVICE_CLASS_NONE
state = hass.states.get(entry.entity_id)
unit_of_measurement = (
state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) if state else None
)
if not state or not unit_of_measurement:
continue
if ATTR_DEVICE_CLASS in state.attributes:
device_class = state.attributes[ATTR_DEVICE_CLASS]
templates = ENTITY_TRIGGERS.get(
device_class, ENTITY_TRIGGERS[DEVICE_CLASS_NONE]
)
triggers.extend(
{
**automation,
"platform": "device",
"device_id": device_id,
"entity_id": entry.entity_id,
"domain": DOMAIN,
}
for automation in templates
)
return triggers
async def async_get_trigger_capabilities(hass, config):
"""List trigger capabilities."""
state = hass.states.get(config[CONF_ENTITY_ID])
unit_of_measurement = (
state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) if state else None
)
if not state or not unit_of_measurement:
raise InvalidDeviceAutomationConfig
return {
"extra_fields": vol.Schema(
{
vol.Optional(
CONF_ABOVE, description={"suffix": unit_of_measurement}
): vol.Coerce(float),
vol.Optional(
CONF_BELOW, description={"suffix": unit_of_measurement}
): vol.Coerce(float),
vol.Optional(CONF_FOR): cv.positive_time_period_dict,
}
)
}
|
import glob
import logging
from os.path import basename, normpath
from enocean.communicators import SerialCommunicator
from enocean.protocol.packet import RadioPacket
import serial
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .const import SIGNAL_RECEIVE_MESSAGE, SIGNAL_SEND_MESSAGE
_LOGGER = logging.getLogger(__name__)
class EnOceanDongle:
"""Representation of an EnOcean dongle.
The dongle is responsible for receiving the ENOcean frames,
creating devices if needed, and dispatching messages to platforms.
"""
def __init__(self, hass, serial_path):
"""Initialize the EnOcean dongle."""
self._communicator = SerialCommunicator(
port=serial_path, callback=self.callback
)
self.serial_path = serial_path
self.identifier = basename(normpath(serial_path))
self.hass = hass
self.dispatcher_disconnect_handle = None
async def async_setup(self):
"""Finish the setup of the bridge and supported platforms."""
self._communicator.start()
self.dispatcher_disconnect_handle = async_dispatcher_connect(
self.hass, SIGNAL_SEND_MESSAGE, self._send_message_callback
)
def unload(self):
"""Disconnect callbacks established at init time."""
if self.dispatcher_disconnect_handle:
self.dispatcher_disconnect_handle()
self.dispatcher_disconnect_handle = None
def _send_message_callback(self, command):
"""Send a command through the EnOcean dongle."""
self._communicator.send(command)
def callback(self, packet):
"""Handle EnOcean device's callback.
This is the callback function called by python-enocan whenever there
is an incoming packet.
"""
if isinstance(packet, RadioPacket):
_LOGGER.debug("Received radio packet: %s", packet)
self.hass.helpers.dispatcher.dispatcher_send(SIGNAL_RECEIVE_MESSAGE, packet)
def detect():
"""Return a list of candidate paths for USB ENOcean dongles.
This method is currently a bit simplistic, it may need to be
improved to support more configurations and OS.
"""
globs_to_test = ["/dev/tty*FTOA2PV*", "/dev/serial/by-id/*EnOcean*"]
found_paths = []
for current_glob in globs_to_test:
found_paths.extend(glob.glob(current_glob))
return found_paths
def validate_path(path: str):
"""Return True if the provided path points to a valid serial port, False otherwise."""
try:
# Creating the serial communicator will raise an exception
# if it cannot connect
SerialCommunicator(port=path)
return True
except serial.SerialException as exception:
_LOGGER.warning("Dongle path %s is invalid: %s", path, str(exception))
return False
|
from typing import Callable, Dict
from aioguardian import Client
from aioguardian.errors import GuardianError
import voluptuous as vol
from homeassistant.components.switch import SwitchEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_FILENAME, CONF_PORT, CONF_URL
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import config_validation as cv, entity_platform
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from . import ValveControllerEntity
from .const import (
API_VALVE_STATUS,
CONF_UID,
DATA_CLIENT,
DATA_COORDINATOR,
DATA_PAIRED_SENSOR_MANAGER,
DOMAIN,
LOGGER,
)
ATTR_AVG_CURRENT = "average_current"
ATTR_INST_CURRENT = "instantaneous_current"
ATTR_INST_CURRENT_DDT = "instantaneous_current_ddt"
ATTR_TRAVEL_COUNT = "travel_count"
SERVICE_DISABLE_AP = "disable_ap"
SERVICE_ENABLE_AP = "enable_ap"
SERVICE_PAIR_SENSOR = "pair_sensor"
SERVICE_REBOOT = "reboot"
SERVICE_RESET_VALVE_DIAGNOSTICS = "reset_valve_diagnostics"
SERVICE_UNPAIR_SENSOR = "unpair_sensor"
SERVICE_UPGRADE_FIRMWARE = "upgrade_firmware"
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: Callable
) -> None:
"""Set up Guardian switches based on a config entry."""
platform = entity_platform.current_platform.get()
for service_name, schema, method in [
(SERVICE_DISABLE_AP, {}, "async_disable_ap"),
(SERVICE_ENABLE_AP, {}, "async_enable_ap"),
(SERVICE_PAIR_SENSOR, {vol.Required(CONF_UID): cv.string}, "async_pair_sensor"),
(SERVICE_REBOOT, {}, "async_reboot"),
(SERVICE_RESET_VALVE_DIAGNOSTICS, {}, "async_reset_valve_diagnostics"),
(
SERVICE_UPGRADE_FIRMWARE,
{
vol.Optional(CONF_URL): cv.url,
vol.Optional(CONF_PORT): cv.port,
vol.Optional(CONF_FILENAME): cv.string,
},
"async_upgrade_firmware",
),
(
SERVICE_UNPAIR_SENSOR,
{vol.Required(CONF_UID): cv.string},
"async_unpair_sensor",
),
]:
platform.async_register_entity_service(service_name, schema, method)
async_add_entities(
[
ValveControllerSwitch(
entry,
hass.data[DOMAIN][DATA_CLIENT][entry.entry_id],
hass.data[DOMAIN][DATA_COORDINATOR][entry.entry_id],
)
]
)
class ValveControllerSwitch(ValveControllerEntity, SwitchEntity):
"""Define a switch to open/close the Guardian valve."""
def __init__(
self,
entry: ConfigEntry,
client: Client,
coordinators: Dict[str, DataUpdateCoordinator],
):
"""Initialize."""
super().__init__(
entry, coordinators, "valve", "Valve Controller", None, "mdi:water"
)
self._client = client
self._is_on = True
@property
def available(self) -> bool:
"""Return whether the entity is available."""
return self.coordinators[API_VALVE_STATUS].last_update_success
@property
def is_on(self) -> bool:
"""Return True if the valve is open."""
return self._is_on
async def _async_continue_entity_setup(self):
"""Register API interest (and related tasks) when the entity is added."""
self.async_add_coordinator_update_listener(API_VALVE_STATUS)
@callback
def _async_update_from_latest_data(self) -> None:
"""Update the entity."""
self._is_on = self.coordinators[API_VALVE_STATUS].data["state"] in (
"start_opening",
"opening",
"finish_opening",
"opened",
)
self._attrs.update(
{
ATTR_AVG_CURRENT: self.coordinators[API_VALVE_STATUS].data[
"average_current"
],
ATTR_INST_CURRENT: self.coordinators[API_VALVE_STATUS].data[
"instantaneous_current"
],
ATTR_INST_CURRENT_DDT: self.coordinators[API_VALVE_STATUS].data[
"instantaneous_current_ddt"
],
ATTR_TRAVEL_COUNT: self.coordinators[API_VALVE_STATUS].data[
"travel_count"
],
}
)
async def async_disable_ap(self):
"""Disable the device's onboard access point."""
try:
async with self._client:
await self._client.wifi.disable_ap()
except GuardianError as err:
LOGGER.error("Error while disabling valve controller AP: %s", err)
async def async_enable_ap(self):
"""Enable the device's onboard access point."""
try:
async with self._client:
await self._client.wifi.enable_ap()
except GuardianError as err:
LOGGER.error("Error while enabling valve controller AP: %s", err)
async def async_pair_sensor(self, *, uid):
"""Add a new paired sensor."""
try:
async with self._client:
await self._client.sensor.pair_sensor(uid)
except GuardianError as err:
LOGGER.error("Error while adding paired sensor: %s", err)
return
await self.hass.data[DOMAIN][DATA_PAIRED_SENSOR_MANAGER][
self._entry.entry_id
].async_pair_sensor(uid)
async def async_reboot(self):
"""Reboot the device."""
try:
async with self._client:
await self._client.system.reboot()
except GuardianError as err:
LOGGER.error("Error while rebooting valve controller: %s", err)
async def async_reset_valve_diagnostics(self):
"""Fully reset system motor diagnostics."""
try:
async with self._client:
await self._client.valve.reset()
except GuardianError as err:
LOGGER.error("Error while resetting valve diagnostics: %s", err)
async def async_unpair_sensor(self, *, uid):
"""Add a new paired sensor."""
try:
async with self._client:
await self._client.sensor.unpair_sensor(uid)
except GuardianError as err:
LOGGER.error("Error while removing paired sensor: %s", err)
return
await self.hass.data[DOMAIN][DATA_PAIRED_SENSOR_MANAGER][
self._entry.entry_id
].async_unpair_sensor(uid)
async def async_upgrade_firmware(self, *, url, port, filename):
"""Upgrade the device firmware."""
try:
async with self._client:
await self._client.system.upgrade_firmware(
url=url,
port=port,
filename=filename,
)
except GuardianError as err:
LOGGER.error("Error while upgrading firmware: %s", err)
async def async_turn_off(self, **kwargs) -> None:
"""Turn the valve off (closed)."""
try:
async with self._client:
await self._client.valve.close()
except GuardianError as err:
LOGGER.error("Error while closing the valve: %s", err)
return
self._is_on = False
self.async_write_ha_state()
async def async_turn_on(self, **kwargs) -> None:
"""Turn the valve on (open)."""
try:
async with self._client:
await self._client.valve.open()
except GuardianError as err:
LOGGER.error("Error while opening the valve: %s", err)
return
self._is_on = True
self.async_write_ha_state()
|
from datetime import timedelta
import functools as ft
import logging
from typing import Any
import voluptuous as vol
from homeassistant.const import (
SERVICE_CLOSE_COVER,
SERVICE_CLOSE_COVER_TILT,
SERVICE_OPEN_COVER,
SERVICE_OPEN_COVER_TILT,
SERVICE_SET_COVER_POSITION,
SERVICE_SET_COVER_TILT_POSITION,
SERVICE_STOP_COVER,
SERVICE_STOP_COVER_TILT,
SERVICE_TOGGLE,
SERVICE_TOGGLE_COVER_TILT,
STATE_CLOSED,
STATE_CLOSING,
STATE_OPEN,
STATE_OPENING,
)
from homeassistant.helpers.config_validation import ( # noqa: F401
PLATFORM_SCHEMA,
PLATFORM_SCHEMA_BASE,
)
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.loader import bind_hass
# mypy: allow-untyped-calls, allow-untyped-defs, no-check-untyped-defs
_LOGGER = logging.getLogger(__name__)
DOMAIN = "cover"
SCAN_INTERVAL = timedelta(seconds=15)
ENTITY_ID_FORMAT = DOMAIN + ".{}"
# Refer to the cover dev docs for device class descriptions
DEVICE_CLASS_AWNING = "awning"
DEVICE_CLASS_BLIND = "blind"
DEVICE_CLASS_CURTAIN = "curtain"
DEVICE_CLASS_DAMPER = "damper"
DEVICE_CLASS_DOOR = "door"
DEVICE_CLASS_GARAGE = "garage"
DEVICE_CLASS_GATE = "gate"
DEVICE_CLASS_SHADE = "shade"
DEVICE_CLASS_SHUTTER = "shutter"
DEVICE_CLASS_WINDOW = "window"
DEVICE_CLASSES = [
DEVICE_CLASS_AWNING,
DEVICE_CLASS_BLIND,
DEVICE_CLASS_CURTAIN,
DEVICE_CLASS_DAMPER,
DEVICE_CLASS_DOOR,
DEVICE_CLASS_GARAGE,
DEVICE_CLASS_GATE,
DEVICE_CLASS_SHADE,
DEVICE_CLASS_SHUTTER,
DEVICE_CLASS_WINDOW,
]
DEVICE_CLASSES_SCHEMA = vol.All(vol.Lower, vol.In(DEVICE_CLASSES))
SUPPORT_OPEN = 1
SUPPORT_CLOSE = 2
SUPPORT_SET_POSITION = 4
SUPPORT_STOP = 8
SUPPORT_OPEN_TILT = 16
SUPPORT_CLOSE_TILT = 32
SUPPORT_STOP_TILT = 64
SUPPORT_SET_TILT_POSITION = 128
ATTR_CURRENT_POSITION = "current_position"
ATTR_CURRENT_TILT_POSITION = "current_tilt_position"
ATTR_POSITION = "position"
ATTR_TILT_POSITION = "tilt_position"
@bind_hass
def is_closed(hass, entity_id):
"""Return if the cover is closed based on the statemachine."""
return hass.states.is_state(entity_id, STATE_CLOSED)
async def async_setup(hass, config):
"""Track states and offer events for covers."""
component = hass.data[DOMAIN] = EntityComponent(
_LOGGER, DOMAIN, hass, SCAN_INTERVAL
)
await component.async_setup(config)
component.async_register_entity_service(
SERVICE_OPEN_COVER, {}, "async_open_cover", [SUPPORT_OPEN]
)
component.async_register_entity_service(
SERVICE_CLOSE_COVER, {}, "async_close_cover", [SUPPORT_CLOSE]
)
component.async_register_entity_service(
SERVICE_SET_COVER_POSITION,
{
vol.Required(ATTR_POSITION): vol.All(
vol.Coerce(int), vol.Range(min=0, max=100)
)
},
"async_set_cover_position",
[SUPPORT_SET_POSITION],
)
component.async_register_entity_service(
SERVICE_STOP_COVER, {}, "async_stop_cover", [SUPPORT_STOP]
)
component.async_register_entity_service(
SERVICE_TOGGLE, {}, "async_toggle", [SUPPORT_OPEN | SUPPORT_CLOSE]
)
component.async_register_entity_service(
SERVICE_OPEN_COVER_TILT, {}, "async_open_cover_tilt", [SUPPORT_OPEN_TILT]
)
component.async_register_entity_service(
SERVICE_CLOSE_COVER_TILT, {}, "async_close_cover_tilt", [SUPPORT_CLOSE_TILT]
)
component.async_register_entity_service(
SERVICE_STOP_COVER_TILT, {}, "async_stop_cover_tilt", [SUPPORT_STOP_TILT]
)
component.async_register_entity_service(
SERVICE_SET_COVER_TILT_POSITION,
{
vol.Required(ATTR_TILT_POSITION): vol.All(
vol.Coerce(int), vol.Range(min=0, max=100)
)
},
"async_set_cover_tilt_position",
[SUPPORT_SET_TILT_POSITION],
)
component.async_register_entity_service(
SERVICE_TOGGLE_COVER_TILT,
{},
"async_toggle_tilt",
[SUPPORT_OPEN_TILT | SUPPORT_CLOSE_TILT],
)
return True
async def async_setup_entry(hass, entry):
"""Set up a config entry."""
return await hass.data[DOMAIN].async_setup_entry(entry)
async def async_unload_entry(hass, entry):
"""Unload a config entry."""
return await hass.data[DOMAIN].async_unload_entry(entry)
class CoverEntity(Entity):
"""Representation of a cover."""
@property
def current_cover_position(self):
"""Return current position of cover.
None is unknown, 0 is closed, 100 is fully open.
"""
@property
def current_cover_tilt_position(self):
"""Return current position of cover tilt.
None is unknown, 0 is closed, 100 is fully open.
"""
@property
def state(self):
"""Return the state of the cover."""
if self.is_opening:
return STATE_OPENING
if self.is_closing:
return STATE_CLOSING
closed = self.is_closed
if closed is None:
return None
return STATE_CLOSED if closed else STATE_OPEN
@property
def state_attributes(self):
"""Return the state attributes."""
data = {}
current = self.current_cover_position
if current is not None:
data[ATTR_CURRENT_POSITION] = self.current_cover_position
current_tilt = self.current_cover_tilt_position
if current_tilt is not None:
data[ATTR_CURRENT_TILT_POSITION] = self.current_cover_tilt_position
return data
@property
def supported_features(self):
"""Flag supported features."""
supported_features = SUPPORT_OPEN | SUPPORT_CLOSE | SUPPORT_STOP
if self.current_cover_position is not None:
supported_features |= SUPPORT_SET_POSITION
if self.current_cover_tilt_position is not None:
supported_features |= (
SUPPORT_OPEN_TILT
| SUPPORT_CLOSE_TILT
| SUPPORT_STOP_TILT
| SUPPORT_SET_TILT_POSITION
)
return supported_features
@property
def is_opening(self):
"""Return if the cover is opening or not."""
@property
def is_closing(self):
"""Return if the cover is closing or not."""
@property
def is_closed(self):
"""Return if the cover is closed or not."""
raise NotImplementedError()
def open_cover(self, **kwargs: Any) -> None:
"""Open the cover."""
raise NotImplementedError()
async def async_open_cover(self, **kwargs):
"""Open the cover."""
await self.hass.async_add_executor_job(ft.partial(self.open_cover, **kwargs))
def close_cover(self, **kwargs: Any) -> None:
"""Close cover."""
raise NotImplementedError()
async def async_close_cover(self, **kwargs):
"""Close cover."""
await self.hass.async_add_executor_job(ft.partial(self.close_cover, **kwargs))
def toggle(self, **kwargs: Any) -> None:
"""Toggle the entity."""
if self.is_closed:
self.open_cover(**kwargs)
else:
self.close_cover(**kwargs)
async def async_toggle(self, **kwargs):
"""Toggle the entity."""
if self.is_closed:
await self.async_open_cover(**kwargs)
else:
await self.async_close_cover(**kwargs)
def set_cover_position(self, **kwargs):
"""Move the cover to a specific position."""
async def async_set_cover_position(self, **kwargs):
"""Move the cover to a specific position."""
await self.hass.async_add_executor_job(
ft.partial(self.set_cover_position, **kwargs)
)
def stop_cover(self, **kwargs):
"""Stop the cover."""
async def async_stop_cover(self, **kwargs):
"""Stop the cover."""
await self.hass.async_add_executor_job(ft.partial(self.stop_cover, **kwargs))
def open_cover_tilt(self, **kwargs: Any) -> None:
"""Open the cover tilt."""
async def async_open_cover_tilt(self, **kwargs):
"""Open the cover tilt."""
await self.hass.async_add_executor_job(
ft.partial(self.open_cover_tilt, **kwargs)
)
def close_cover_tilt(self, **kwargs: Any) -> None:
"""Close the cover tilt."""
async def async_close_cover_tilt(self, **kwargs):
"""Close the cover tilt."""
await self.hass.async_add_executor_job(
ft.partial(self.close_cover_tilt, **kwargs)
)
def set_cover_tilt_position(self, **kwargs):
"""Move the cover tilt to a specific position."""
async def async_set_cover_tilt_position(self, **kwargs):
"""Move the cover tilt to a specific position."""
await self.hass.async_add_executor_job(
ft.partial(self.set_cover_tilt_position, **kwargs)
)
def stop_cover_tilt(self, **kwargs):
"""Stop the cover."""
async def async_stop_cover_tilt(self, **kwargs):
"""Stop the cover."""
await self.hass.async_add_executor_job(
ft.partial(self.stop_cover_tilt, **kwargs)
)
def toggle_tilt(self, **kwargs: Any) -> None:
"""Toggle the entity."""
if self.current_cover_tilt_position == 0:
self.open_cover_tilt(**kwargs)
else:
self.close_cover_tilt(**kwargs)
async def async_toggle_tilt(self, **kwargs):
"""Toggle the entity."""
if self.current_cover_tilt_position == 0:
await self.async_open_cover_tilt(**kwargs)
else:
await self.async_close_cover_tilt(**kwargs)
class CoverDevice(CoverEntity):
"""Representation of a cover (for backwards compatibility)."""
def __init_subclass__(cls, **kwargs):
"""Print deprecation warning."""
super().__init_subclass__(**kwargs)
_LOGGER.warning(
"CoverDevice is deprecated, modify %s to extend CoverEntity",
cls.__name__,
)
|
from homeassistant.components.ssdp import (
ATTR_SSDP_LOCATION,
ATTR_UPNP_MANUFACTURER,
ATTR_UPNP_MODEL_NAME,
ATTR_UPNP_MODEL_NUMBER,
ATTR_UPNP_SERIAL,
)
from homeassistant.components.wilight.config_flow import (
CONF_MODEL_NAME,
CONF_SERIAL_NUMBER,
)
from homeassistant.components.wilight.const import DOMAIN
from homeassistant.const import CONF_HOST
from homeassistant.helpers.typing import HomeAssistantType
from tests.common import MockConfigEntry
HOST = "127.0.0.1"
WILIGHT_ID = "000000000099"
SSDP_LOCATION = "http://127.0.0.1/"
UPNP_MANUFACTURER = "All Automacao Ltda"
UPNP_MODEL_NAME_P_B = "WiLight 0102001800010009-10010010"
UPNP_MODEL_NAME_DIMMER = "WiLight 0100001700020009-10010010"
UPNP_MODEL_NAME_COLOR = "WiLight 0107001800020009-11010"
UPNP_MODEL_NAME_LIGHT_FAN = "WiLight 0104001800010009-10"
UPNP_MODEL_NUMBER = "123456789012345678901234567890123456"
UPNP_SERIAL = "000000000099"
UPNP_MAC_ADDRESS = "5C:CF:7F:8B:CA:56"
UPNP_MANUFACTURER_NOT_WILIGHT = "Test"
CONF_COMPONENTS = "components"
MOCK_SSDP_DISCOVERY_INFO_P_B = {
ATTR_SSDP_LOCATION: SSDP_LOCATION,
ATTR_UPNP_MANUFACTURER: UPNP_MANUFACTURER,
ATTR_UPNP_MODEL_NAME: UPNP_MODEL_NAME_P_B,
ATTR_UPNP_MODEL_NUMBER: UPNP_MODEL_NUMBER,
ATTR_UPNP_SERIAL: UPNP_SERIAL,
}
MOCK_SSDP_DISCOVERY_INFO_WRONG_MANUFACTORER = {
ATTR_SSDP_LOCATION: SSDP_LOCATION,
ATTR_UPNP_MANUFACTURER: UPNP_MANUFACTURER_NOT_WILIGHT,
ATTR_UPNP_MODEL_NAME: UPNP_MODEL_NAME_P_B,
ATTR_UPNP_MODEL_NUMBER: UPNP_MODEL_NUMBER,
ATTR_UPNP_SERIAL: ATTR_UPNP_SERIAL,
}
MOCK_SSDP_DISCOVERY_INFO_MISSING_MANUFACTORER = {
ATTR_SSDP_LOCATION: SSDP_LOCATION,
ATTR_UPNP_MODEL_NAME: UPNP_MODEL_NAME_P_B,
ATTR_UPNP_MODEL_NUMBER: UPNP_MODEL_NUMBER,
ATTR_UPNP_SERIAL: ATTR_UPNP_SERIAL,
}
MOCK_SSDP_DISCOVERY_INFO_LIGHT_FAN = {
ATTR_SSDP_LOCATION: SSDP_LOCATION,
ATTR_UPNP_MANUFACTURER: UPNP_MANUFACTURER,
ATTR_UPNP_MODEL_NAME: UPNP_MODEL_NAME_LIGHT_FAN,
ATTR_UPNP_MODEL_NUMBER: UPNP_MODEL_NUMBER,
ATTR_UPNP_SERIAL: ATTR_UPNP_SERIAL,
}
async def setup_integration(
hass: HomeAssistantType,
) -> MockConfigEntry:
"""Mock ConfigEntry in Home Assistant."""
entry = MockConfigEntry(
domain=DOMAIN,
unique_id=WILIGHT_ID,
data={
CONF_HOST: HOST,
CONF_SERIAL_NUMBER: UPNP_SERIAL,
CONF_MODEL_NAME: UPNP_MODEL_NAME_P_B,
},
)
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
return entry
|
import os.path as op
from numpy import array
from numpy.testing import assert_allclose
import pytest
from mne.datasets import testing
from mne.utils import (requires_mayavi, run_tests_if_main, traits_test,
modified_env)
from mne.channels import read_dig_fif
data_path = testing.data_path(download=False)
subjects_dir = op.join(data_path, 'subjects')
bem_path = op.join(subjects_dir, 'sample', 'bem', 'sample-1280-bem.fif')
inst_path = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc_raw.fif')
fid_path = op.join(op.dirname(__file__), '..', '..', 'data', 'fsaverage',
'fsaverage-fiducials.fif')
@testing.requires_testing_data
@requires_mayavi
@traits_test
def test_bem_source():
"""Test SurfaceSource."""
from mne.gui._file_traits import SurfaceSource
bem = SurfaceSource()
assert bem.surf.rr.shape == (0, 3)
assert bem.surf.tris.shape == (0, 3)
bem.file = bem_path
assert bem.surf.rr.shape == (642, 3)
assert bem.surf.tris.shape == (1280, 3)
@testing.requires_testing_data
@requires_mayavi
@traits_test
def test_fiducials_source():
"""Test FiducialsSource."""
from mne.gui._file_traits import FiducialsSource
fid = FiducialsSource()
fid.file = fid_path
points = array([[-0.08061612, -0.02908875, -0.04131077],
[0.00146763, 0.08506715, -0.03483611],
[0.08436285, -0.02850276, -0.04127743]])
assert_allclose(fid.points, points, 1e-6)
fid.file = ''
assert fid.points is None
@testing.requires_testing_data
@requires_mayavi
@traits_test
def test_inst_source(tmpdir):
"""Test DigSource."""
from mne.gui._file_traits import DigSource
tempdir = str(tmpdir)
inst = DigSource()
assert inst.inst_fname == '-'
inst.file = inst_path
assert inst.inst_dir == op.dirname(inst_path)
lpa = array([[-7.13766068e-02, 0.00000000e+00, 5.12227416e-09]])
nasion = array([[3.72529030e-09, 1.02605611e-01, 4.19095159e-09]])
rpa = array([[7.52676800e-02, 0.00000000e+00, 5.58793545e-09]])
assert_allclose(inst.lpa, lpa)
assert_allclose(inst.nasion, nasion)
assert_allclose(inst.rpa, rpa)
montage = read_dig_fif(inst_path) # test reading DigMontage
montage_path = op.join(tempdir, 'temp_montage.fif')
montage.save(montage_path)
inst.file = montage_path
assert_allclose(inst.lpa, lpa)
assert_allclose(inst.nasion, nasion)
assert_allclose(inst.rpa, rpa)
@testing.requires_testing_data
@requires_mayavi
@traits_test
def test_subject_source():
"""Test SubjectSelector."""
from mne.gui._file_traits import MRISubjectSource
mri = MRISubjectSource()
mri.subjects_dir = subjects_dir
assert 'sample' in mri.subjects
mri.subject = 'sample'
@testing.requires_testing_data
@requires_mayavi
@traits_test
def test_subject_source_with_fsaverage(tmpdir):
"""Test SubjectSelector."""
from mne.gui._file_traits import MRISubjectSource
tempdir = str(tmpdir)
mri = MRISubjectSource()
assert not mri.can_create_fsaverage
pytest.raises(RuntimeError, mri.create_fsaverage)
mri.subjects_dir = tempdir
assert mri.can_create_fsaverage
assert not op.isdir(op.join(tempdir, 'fsaverage'))
# fake FREESURFER_HOME
with modified_env(FREESURFER_HOME=data_path):
mri.create_fsaverage()
assert op.isdir(op.join(tempdir, 'fsaverage'))
run_tests_if_main()
|
import os
import subprocess
import sys
import threading
import time
import rospkg
if sys.hexversion > 0x03000000: # Python3
python3 = True
else:
python3 = False
def _read_stdout(cmd):
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
std_out, std_err = p.communicate()
if python3:
return std_out.decode()
else:
return std_out
def num_cpus():
"""
Detects the number of CPUs on a system. Cribbed from pp.
"""
# Linux, Unix and MacOS:
if hasattr(os, 'sysconf'):
if 'SC_NPROCESSORS_ONLN' in os.sysconf_names:
# Linux & Unix:
ncpus = os.sysconf('SC_NPROCESSORS_ONLN')
if isinstance(ncpus, int) and ncpus > 0:
return ncpus
else: # OSX:
return int(_read_stdout(['sysctl', '-n', 'hw.ncpu'])) or 1
# Windows:
if 'NUMBER_OF_PROCESSORS' in os.environ:
ncpus = int(os.environ['NUMBER_OF_PROCESSORS'])
if ncpus > 0:
return ncpus
return 1 # Default
# TODO: may no longer need this now that we've ported to rospkg
class DependencyTracker:
"""Track dependencies between packages. This is basically a
caching way to call rospkg. It also will allow you to specifiy a
range of packages over which to track dependencies. This is useful
if you are only building a subset of the tree. For example with the
--specified-only option."""
def __init__(self, valid_packages=None, rospack=None):
"""
@param valid_packages: defaults to rospack list
"""
if rospack is None:
self.rospack = rospkg.RosPack()
else:
self.rospack = rospack
if valid_packages is None:
valid_packages = self.rospack.list()
self.valid_packages = valid_packages
self.deps_1 = {}
self.deps = {}
def get_deps_1(self, package):
if package not in self.deps_1:
self.deps_1[package] = []
try:
potential_dependencies = self.rospack.get_depends(package, implicit=False)
except rospkg.ResourceNotFound:
potential_dependencies = []
for p in potential_dependencies:
if p in self.valid_packages:
self.deps_1[package].append(p)
return self.deps_1[package]
def get_deps(self, package):
if package not in self.deps:
self.deps[package] = []
try:
potential_dependencies = self.rospack.get_depends(package)
except rospkg.ResourceNotFound:
potential_dependencies = []
for p in potential_dependencies:
if p in self.valid_packages:
self.deps[package].append(p)
return self.deps[package]
def load_fake_deps(self, deps, deps1):
self.deps = deps
self.deps_1 = deps1
return
class CompileThread(threading.Thread):
"""This is the class which is used as the thread for parallel
builds. This class will query the build queue object for new
commands and block on its calls until the build queue says that
building is done."""
def __init__(self, name, build_queue, rosmakeall, argument=None):
threading.Thread.__init__(self)
self.build_queue = build_queue
self.rosmakeall = rosmakeall
self.argument = argument
self.name = name
self.logging_enabled = True
def run(self):
while not self.build_queue.is_done():
pkg = self.build_queue.get_valid_package()
if not pkg:
if self.build_queue.succeeded():
self.rosmakeall.printer.print_verbose('[ Build Completed Thread Exiting ]', thread_name=self.name)
else:
self.rosmakeall.printer.print_verbose('[ Build Terminated Thread Exiting ]', thread_name=self.name)
break # no more packages must be done
# update status after accepting build
self.rosmakeall.update_status(self.argument,
self.build_queue.get_started_threads(),
self.build_queue.progress_str())
if self.argument:
self.rosmakeall.printer.print_all('Starting >>> %s [ make %s ]' % (pkg, self.argument), thread_name=self.name)
else:
self.rosmakeall.printer.print_all('Starting >>> %s [ make ] ' % pkg, thread_name=self.name)
(result, result_string) = self.rosmakeall.build(pkg, self.argument, self.build_queue.robust_build)
self.rosmakeall.printer.print_all('Finished <<< %s %s' % (pkg, result_string), thread_name=self.name)
# print "Finished2"
self.build_queue.return_built(pkg, result)
# print "returned"
if result or self.build_queue.robust_build:
pass # print "result", result, "robust", self.build_queue.robust_build
else:
if result_string.find('[Interrupted]') != -1:
self.rosmakeall.printer.print_all('Caught Interruption', thread_name=self.name)
self.build_queue.stop() # todo move this logic into BuildQueue itself
break # unnecessary since build_queue is done now while will quit
self.rosmakeall.printer.print_all('Halting due to failure in package %s. \n[ rosmake ] Waiting for other threads to complete.' % pkg)
self.build_queue.stop()
break # unnecessary since build_queue is done now, while will quit
# update status after at end of build
# print "updating status"
self.rosmakeall.update_status(self.argument,
self.build_queue.get_started_threads(),
self.build_queue.progress_str())
# print "done built", len(self.build_queue.built), self.build_queue.built
# print "failed", len(self.build_queue.failed), self.build_queue.failed
# print "to_build", len(self.build_queue.to_build), self.build_queue.to_build
# print "in progress", len(self.build_queue._started), self.build_queue._started
# print "last update"
# update status before ending thread
self.rosmakeall.update_status(self.argument,
self.build_queue.get_started_threads(),
self.build_queue.progress_str())
# print "thread finished"
class BuildQueue:
"""This class provides a thread safe build queue. Which will do
the sequencing for many CompileThreads."""
def __init__(self, package_list, dependency_tracker, robust_build=False):
self._total_pkgs = len(package_list)
self.dependency_tracker = dependency_tracker
self.to_build = package_list[:] # do a copy not a reference
self.built = []
self.failed = []
self.condition = threading.Condition()
self._done = False
self.robust_build = robust_build
self._started = {}
self._hack_end_counter = 0
def progress_str(self):
return '[ %d Active %d/%d Complete ]' % (len(self._started), len(self.built), self._total_pkgs)
def get_started_threads(self): # TODO sort this other than hash order
return self._started.copy()
def is_completed(self):
"""Return if the build queue has been completed."""
return len(self.built) + len(self.failed) == self._total_pkgs
def is_done(self):
"""Return if the build queue has been completed."""
return self.is_completed() or self._done # finished or halted
def succeeded(self):
"""Return whether the build queue has completed all packages successfully."""
return len(self.built) == self._total_pkgs # flag that we're finished
def stop(self):
"""Stop the build queue, including waking all blocking
threads. It will not stop in flight builds."""
self._done = True
with self.condition:
self.condition.notifyAll() # wake any blocking threads
def return_built(self, package, successful=True): # mark that a package is built
"""The thread which completes a package marks it as done with
this method."""
with self.condition:
if successful:
self.built.append(package)
else:
self.failed.append(package)
if package in self._started.keys():
self._started.pop(package)
else:
pass # used early on print "\n\n\nERROR THIS SHOULDN't RETURN %s\n\n\n"%package
if self.is_completed():
self._done = True
self.condition.notifyAll() # wake up any waiting threads
def get_valid_package(self): # blocking call to get a package to build returns none if done
"""This is a blocking call which will return a package which has
all dependencies met. If interrupted or done it will return
None"""
with self.condition:
while (not self.is_done() and len(self.to_build) > 0):
for p in self.to_build:
dependencies_met = True
for d in self.dependency_tracker.get_deps(p):
if d not in self.built and not (self.robust_build and d in self.failed):
dependencies_met = False
# print "Dependency %s not met for %s"%(d, p)
break
if dependencies_met: # all dependencies met
self.to_build.remove(p)
self._started[p] = time.time()
self._hack_end_counter = 0 # reset end counter if success
return p # break out and return package if found
elif len(self._started) == 0 and self._hack_end_counter > 2:
# we're hung with broken dependencies
return None
# print "TTGTTTTHTHT Waiting on condition"
self.condition.wait(1.0) # failed to find a package wait for a notify before looping
self._hack_end_counter += 1 # if we're here too often we will quit
if self.is_done():
break
return None
|
import diamond.collector
import subprocess
import re
import os
from diamond.collector import str_to_bool
class SmartCollector(diamond.collector.Collector):
def get_default_config_help(self):
config_help = super(SmartCollector, self).get_default_config_help()
config_help.update({
'devices': "device regex to collect stats on",
'bin': 'The path to the smartctl binary',
'use_sudo': 'Use sudo?',
'sudo_cmd': 'Path to sudo',
})
return config_help
def get_default_config(self):
"""
Returns default configuration options.
"""
config = super(SmartCollector, self).get_default_config()
config.update({
'path': 'smart',
'bin': 'smartctl',
'use_sudo': False,
'sudo_cmd': '/usr/bin/sudo',
'devices': '^disk[0-9]$|^sd[a-z]$|^hd[a-z]$',
})
return config
def collect(self):
"""
Collect and publish S.M.A.R.T. attributes
"""
devices = re.compile(self.config['devices'])
for device in os.listdir('/dev'):
if devices.match(device):
command = [self.config['bin'], "-A", os.path.join('/dev',
device)]
if str_to_bool(self.config['use_sudo']):
command.insert(0, self.config['sudo_cmd'])
attributes = subprocess.Popen(
command,
stdout=subprocess.PIPE
).communicate()[0].strip().splitlines()
metrics = {}
start_line = self.find_attr_start_line(attributes)
for attr in attributes[start_line:]:
attribute = attr.split()
if attribute[1] != "Unknown_Attribute":
metric = "%s.%s" % (device, attribute[1])
else:
metric = "%s.%s" % (device, attribute[0])
# 234 Thermal_Throttle (...) 0/0
if '/' in attribute[9]:
expanded = attribute[9].split('/')
for i, subattribute in enumerate(expanded):
submetric = '%s_%d' % (metric, i)
if submetric not in metrics:
metrics[submetric] = subattribute
elif metrics[submetric] == 0 and subattribute > 0:
metrics[submetric] = subattribute
else:
# New metric? Store it
if metric not in metrics:
metrics[metric] = attribute[9]
# Duplicate metric? Only store if it has a larger value
# This happens semi-often with the Temperature_Celsius
# attribute You will have a PASS/FAIL after the real
# temp, so only overwrite if The earlier one was a
# PASS/FAIL (0/1)
elif metrics[metric] == 0 and attribute[9] > 0:
metrics[metric] = attribute[9]
else:
continue
for metric in metrics.keys():
self.publish(metric, metrics[metric])
def find_attr_start_line(self, lines, min_line=4, max_line=9):
"""
Return line number of the first real attribute and value.
The first line is 0. If the 'ATTRIBUTE_NAME' header is not
found, return the index after max_line.
"""
for idx, line in enumerate(lines[min_line:max_line]):
col = line.split()
if len(col) > 1 and col[1] == 'ATTRIBUTE_NAME':
return idx + min_line + 1
self.log.warn('ATTRIBUTE_NAME not found in second column of'
' smartctl output between lines %d and %d.'
% (min_line, max_line))
return max_line + 1
|
import pytest
from homeassistant.components.zwave import const
from tests.async_mock import AsyncMock, MagicMock, patch
from tests.components.light.conftest import mock_light_profiles # noqa
from tests.mock.zwave import MockNetwork, MockNode, MockOption, MockValue
@pytest.fixture
def mock_openzwave():
"""Mock out Open Z-Wave."""
base_mock = MagicMock()
libopenzwave = base_mock.libopenzwave
libopenzwave.__file__ = "test"
base_mock.network.ZWaveNetwork = MockNetwork
base_mock.option.ZWaveOption = MockOption
with patch.dict(
"sys.modules",
{
"libopenzwave": libopenzwave,
"openzwave.option": base_mock.option,
"openzwave.network": base_mock.network,
"openzwave.group": base_mock.group,
},
):
yield base_mock
@pytest.fixture
def mock_discovery():
"""Mock discovery."""
discovery = MagicMock()
discovery.async_load_platform = AsyncMock(return_value=None)
yield discovery
@pytest.fixture
def mock_import_module():
"""Mock import module."""
platform = MagicMock()
mock_device = MagicMock()
mock_device.name = "test_device"
platform.get_device.return_value = mock_device
import_module = MagicMock()
import_module.return_value = platform
yield import_module
@pytest.fixture
def mock_values():
"""Mock values."""
node = MockNode()
mock_schema = {
const.DISC_COMPONENT: "mock_component",
const.DISC_VALUES: {
const.DISC_PRIMARY: {const.DISC_COMMAND_CLASS: ["mock_primary_class"]},
"secondary": {const.DISC_COMMAND_CLASS: ["mock_secondary_class"]},
"optional": {
const.DISC_COMMAND_CLASS: ["mock_optional_class"],
const.DISC_OPTIONAL: True,
},
},
}
value_class = MagicMock()
value_class.primary = MockValue(
command_class="mock_primary_class", node=node, value_id=1000
)
value_class.secondary = MockValue(command_class="mock_secondary_class", node=node)
value_class.duplicate_secondary = MockValue(
command_class="mock_secondary_class", node=node
)
value_class.optional = MockValue(command_class="mock_optional_class", node=node)
value_class.no_match_value = MockValue(command_class="mock_bad_class", node=node)
yield (node, value_class, mock_schema)
|
from datetime import timedelta
import json
import logging
import voluptuous as vol
from homeassistant.components import mqtt
from homeassistant.components.mqtt import CONF_STATE_TOPIC
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import ATTR_ID, CONF_NAME, CONF_TIMEOUT, STATE_NOT_HOME
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import dt, slugify
_LOGGER = logging.getLogger(__name__)
ATTR_DEVICE_ID = "device_id"
ATTR_DISTANCE = "distance"
ATTR_ROOM = "room"
CONF_DEVICE_ID = "device_id"
CONF_AWAY_TIMEOUT = "away_timeout"
DEFAULT_AWAY_TIMEOUT = 0
DEFAULT_NAME = "Room Sensor"
DEFAULT_TIMEOUT = 5
DEFAULT_TOPIC = "room_presence"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_DEVICE_ID): cv.string,
vol.Required(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
vol.Optional(CONF_AWAY_TIMEOUT, default=DEFAULT_AWAY_TIMEOUT): cv.positive_int,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
).extend(mqtt.MQTT_RO_PLATFORM_SCHEMA.schema)
MQTT_PAYLOAD = vol.Schema(
vol.All(
json.loads,
vol.Schema(
{
vol.Required(ATTR_ID): cv.string,
vol.Required(ATTR_DISTANCE): vol.Coerce(float),
},
extra=vol.ALLOW_EXTRA,
),
)
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up MQTT room Sensor."""
async_add_entities(
[
MQTTRoomSensor(
config.get(CONF_NAME),
config.get(CONF_STATE_TOPIC),
config.get(CONF_DEVICE_ID),
config.get(CONF_TIMEOUT),
config.get(CONF_AWAY_TIMEOUT),
)
]
)
class MQTTRoomSensor(Entity):
"""Representation of a room sensor that is updated via MQTT."""
def __init__(self, name, state_topic, device_id, timeout, consider_home):
"""Initialize the sensor."""
self._state = STATE_NOT_HOME
self._name = name
self._state_topic = f"{state_topic}/+"
self._device_id = slugify(device_id).upper()
self._timeout = timeout
self._consider_home = (
timedelta(seconds=consider_home) if consider_home else None
)
self._distance = None
self._updated = None
async def async_added_to_hass(self):
"""Subscribe to MQTT events."""
@callback
def update_state(device_id, room, distance):
"""Update the sensor state."""
self._state = room
self._distance = distance
self._updated = dt.utcnow()
self.async_write_ha_state()
@callback
def message_received(msg):
"""Handle new MQTT messages."""
try:
data = MQTT_PAYLOAD(msg.payload)
except vol.MultipleInvalid as error:
_LOGGER.debug("Skipping update because of malformatted data: %s", error)
return
device = _parse_update_data(msg.topic, data)
if device.get(CONF_DEVICE_ID) == self._device_id:
if self._distance is None or self._updated is None:
update_state(**device)
else:
# update if:
# device is in the same room OR
# device is closer to another room OR
# last update from other room was too long ago
timediff = dt.utcnow() - self._updated
if (
device.get(ATTR_ROOM) == self._state
or device.get(ATTR_DISTANCE) < self._distance
or timediff.seconds >= self._timeout
):
update_state(**device)
return await mqtt.async_subscribe(
self.hass, self._state_topic, message_received, 1
)
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def device_state_attributes(self):
"""Return the state attributes."""
return {ATTR_DISTANCE: self._distance}
@property
def state(self):
"""Return the current room of the entity."""
return self._state
def update(self):
"""Update the state for absent devices."""
if (
self._updated
and self._consider_home
and dt.utcnow() - self._updated > self._consider_home
):
self._state = STATE_NOT_HOME
def _parse_update_data(topic, data):
"""Parse the room presence update."""
parts = topic.split("/")
room = parts[-1]
device_id = slugify(data.get(ATTR_ID)).upper()
distance = data.get("distance")
parsed_data = {ATTR_DEVICE_ID: device_id, ATTR_ROOM: room, ATTR_DISTANCE: distance}
return parsed_data
|
import numpy as np
from ..base import BaseRaw
from ...utils import verbose, logger, _validate_type, fill_doc, _check_option
@fill_doc
class RawArray(BaseRaw):
"""Raw object from numpy array.
Parameters
----------
data : array, shape (n_channels, n_times)
The channels' time series. See notes for proper units of measure.
info : instance of Info
Info dictionary. Consider using :func:`mne.create_info` to populate
this structure. This may be modified in place by the class.
first_samp : int
First sample offset used during recording (default 0).
.. versionadded:: 0.12
copy : {'data', 'info', 'both', 'auto', None}
Determines what gets copied on instantiation. "auto" (default)
will copy info, and copy "data" only if necessary to get to
double floating point precision.
.. versionadded:: 0.18
%(verbose)s
See Also
--------
mne.EpochsArray
mne.EvokedArray
mne.create_info
Notes
-----
Proper units of measure:
* V: eeg, eog, seeg, emg, ecg, bio, ecog
* T: mag
* T/m: grad
* M: hbo, hbr
* Am: dipole
* AU: misc
"""
@verbose
def __init__(self, data, info, first_samp=0, copy='auto',
verbose=None): # noqa: D102
_validate_type(info, 'info', 'info')
_check_option('copy', copy, ('data', 'info', 'both', 'auto', None))
dtype = np.complex128 if np.any(np.iscomplex(data)) else np.float64
orig_data = data
data = np.asanyarray(orig_data, dtype=dtype)
if data.ndim != 2:
raise ValueError('Data must be a 2D array of shape (n_channels, '
'n_samples), got shape %s' % (data.shape,))
if len(data) != len(info['ch_names']):
raise ValueError('len(data) (%s) does not match '
'len(info["ch_names"]) (%s)'
% (len(data), len(info['ch_names'])))
assert len(info['ch_names']) == info['nchan']
if copy in ('auto', 'info', 'both'):
info = info.copy()
if copy in ('data', 'both'):
if data is orig_data:
data = data.copy()
elif copy != 'auto' and data is not orig_data:
raise ValueError('data copying was not requested by copy=%r but '
'it was required to get to double floating point '
'precision' % (copy,))
logger.info('Creating RawArray with %s data, n_channels=%s, n_times=%s'
% (dtype.__name__, data.shape[0], data.shape[1]))
super(RawArray, self).__init__(info, data,
first_samps=(int(first_samp),),
dtype=dtype, verbose=verbose)
logger.info(' Range : %d ... %d = %9.3f ... %9.3f secs' % (
self.first_samp, self.last_samp,
float(self.first_samp) / info['sfreq'],
float(self.last_samp) / info['sfreq']))
logger.info('Ready.')
|
import asyncio
import logging
from time import time
import pytest
from homeassistant import config_entries, data_entry_flow
from homeassistant.components.cloud import account_link
from homeassistant.helpers import config_entry_oauth2_flow
from homeassistant.util.dt import utcnow
from tests.async_mock import AsyncMock, Mock, patch
from tests.common import async_fire_time_changed, mock_platform
TEST_DOMAIN = "oauth2_test"
@pytest.fixture
def flow_handler(hass):
"""Return a registered config flow."""
mock_platform(hass, f"{TEST_DOMAIN}.config_flow")
class TestFlowHandler(config_entry_oauth2_flow.AbstractOAuth2FlowHandler):
"""Test flow handler."""
DOMAIN = TEST_DOMAIN
@property
def logger(self) -> logging.Logger:
"""Return logger."""
return logging.getLogger(__name__)
with patch.dict(config_entries.HANDLERS, {TEST_DOMAIN: TestFlowHandler}):
yield TestFlowHandler
async def test_setup_provide_implementation(hass):
"""Test that we provide implementations."""
account_link.async_setup(hass)
with patch(
"homeassistant.components.cloud.account_link._get_services",
return_value=[
{"service": "test", "min_version": "0.1.0"},
{"service": "too_new", "min_version": "100.0.0"},
],
):
assert (
await config_entry_oauth2_flow.async_get_implementations(
hass, "non_existing"
)
== {}
)
assert (
await config_entry_oauth2_flow.async_get_implementations(hass, "too_new")
== {}
)
implementations = await config_entry_oauth2_flow.async_get_implementations(
hass, "test"
)
assert "cloud" in implementations
assert implementations["cloud"].domain == "cloud"
assert implementations["cloud"].service == "test"
assert implementations["cloud"].hass is hass
async def test_get_services_cached(hass):
"""Test that we cache services."""
hass.data["cloud"] = None
services = 1
with patch.object(account_link, "CACHE_TIMEOUT", 0), patch(
"hass_nabucasa.account_link.async_fetch_available_services",
side_effect=lambda _: services,
) as mock_fetch:
assert await account_link._get_services(hass) == 1
services = 2
assert len(mock_fetch.mock_calls) == 1
assert await account_link._get_services(hass) == 1
services = 3
hass.data.pop(account_link.DATA_SERVICES)
assert await account_link._get_services(hass) == 3
services = 4
async_fire_time_changed(hass, utcnow())
await hass.async_block_till_done()
# Check cache purged
assert await account_link._get_services(hass) == 4
async def test_get_services_error(hass):
"""Test that we cache services."""
hass.data["cloud"] = None
with patch.object(account_link, "CACHE_TIMEOUT", 0), patch(
"hass_nabucasa.account_link.async_fetch_available_services",
side_effect=asyncio.TimeoutError,
):
assert await account_link._get_services(hass) == []
assert account_link.DATA_SERVICES not in hass.data
async def test_implementation(hass, flow_handler):
"""Test Cloud OAuth2 implementation."""
hass.data["cloud"] = None
impl = account_link.CloudOAuth2Implementation(hass, "test")
assert impl.name == "Home Assistant Cloud"
assert impl.domain == "cloud"
flow_handler.async_register_implementation(hass, impl)
flow_finished = asyncio.Future()
helper = Mock(
async_get_authorize_url=AsyncMock(return_value="http://example.com/auth"),
async_get_tokens=Mock(return_value=flow_finished),
)
with patch(
"hass_nabucasa.account_link.AuthorizeAccountHelper", return_value=helper
):
result = await hass.config_entries.flow.async_init(
TEST_DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_EXTERNAL_STEP
assert result["url"] == "http://example.com/auth"
flow_finished.set_result(
{
"refresh_token": "mock-refresh",
"access_token": "mock-access",
"expires_in": 10,
"token_type": "bearer",
}
)
await hass.async_block_till_done()
# Flow finished!
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["data"]["auth_implementation"] == "cloud"
expires_at = result["data"]["token"].pop("expires_at")
assert round(expires_at - time()) == 10
assert result["data"]["token"] == {
"refresh_token": "mock-refresh",
"access_token": "mock-access",
"token_type": "bearer",
"expires_in": 10,
}
entry = hass.config_entries.async_entries(TEST_DOMAIN)[0]
assert (
await config_entry_oauth2_flow.async_get_config_entry_implementation(
hass, entry
)
is impl
)
|
import voluptuous as vol
from homeassistant.components import websocket_api
WS_TYPE_LIST = "config/auth/list"
SCHEMA_WS_LIST = websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend(
{vol.Required("type"): WS_TYPE_LIST}
)
WS_TYPE_DELETE = "config/auth/delete"
SCHEMA_WS_DELETE = websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend(
{vol.Required("type"): WS_TYPE_DELETE, vol.Required("user_id"): str}
)
async def async_setup(hass):
"""Enable the Home Assistant views."""
hass.components.websocket_api.async_register_command(
WS_TYPE_LIST, websocket_list, SCHEMA_WS_LIST
)
hass.components.websocket_api.async_register_command(
WS_TYPE_DELETE, websocket_delete, SCHEMA_WS_DELETE
)
hass.components.websocket_api.async_register_command(websocket_create)
hass.components.websocket_api.async_register_command(websocket_update)
return True
@websocket_api.require_admin
@websocket_api.async_response
async def websocket_list(hass, connection, msg):
"""Return a list of users."""
result = [_user_info(u) for u in await hass.auth.async_get_users()]
connection.send_message(websocket_api.result_message(msg["id"], result))
@websocket_api.require_admin
@websocket_api.async_response
async def websocket_delete(hass, connection, msg):
"""Delete a user."""
if msg["user_id"] == connection.user.id:
connection.send_message(
websocket_api.error_message(
msg["id"], "no_delete_self", "Unable to delete your own account"
)
)
return
user = await hass.auth.async_get_user(msg["user_id"])
if not user:
connection.send_message(
websocket_api.error_message(msg["id"], "not_found", "User not found")
)
return
await hass.auth.async_remove_user(user)
connection.send_message(websocket_api.result_message(msg["id"]))
@websocket_api.require_admin
@websocket_api.async_response
@websocket_api.websocket_command(
{
vol.Required("type"): "config/auth/create",
vol.Required("name"): str,
vol.Optional("group_ids"): [str],
}
)
async def websocket_create(hass, connection, msg):
"""Create a user."""
user = await hass.auth.async_create_user(msg["name"], msg.get("group_ids"))
connection.send_message(
websocket_api.result_message(msg["id"], {"user": _user_info(user)})
)
@websocket_api.require_admin
@websocket_api.async_response
@websocket_api.websocket_command(
{
vol.Required("type"): "config/auth/update",
vol.Required("user_id"): str,
vol.Optional("name"): str,
vol.Optional("group_ids"): [str],
}
)
async def websocket_update(hass, connection, msg):
"""Update a user."""
user = await hass.auth.async_get_user(msg.pop("user_id"))
if not user:
connection.send_message(
websocket_api.error_message(
msg["id"], websocket_api.const.ERR_NOT_FOUND, "User not found"
)
)
return
if user.system_generated:
connection.send_message(
websocket_api.error_message(
msg["id"],
"cannot_modify_system_generated",
"Unable to update system generated users.",
)
)
return
msg.pop("type")
msg_id = msg.pop("id")
await hass.auth.async_update_user(user, **msg)
connection.send_message(
websocket_api.result_message(msg_id, {"user": _user_info(user)})
)
def _user_info(user):
"""Format a user."""
return {
"id": user.id,
"name": user.name,
"is_owner": user.is_owner,
"is_active": user.is_active,
"system_generated": user.system_generated,
"group_ids": [group.id for group in user.groups],
"credentials": [{"type": c.auth_provider_type} for c in user.credentials],
}
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import logging
from absl import flags
from perfkitbenchmarker import configs
from perfkitbenchmarker import errors
from perfkitbenchmarker import sample
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.linux_packages import redis_server
from six.moves import range
flags.DEFINE_integer('redis_numprocesses', 1, 'Number of Redis processes to '
'spawn per processor.')
flags.DEFINE_integer('redis_clients', 5, 'Number of redis loadgen clients')
flags.DEFINE_string('redis_setgetratio', '1:0', 'Ratio of reads to write '
'performed by the memtier benchmark, default is '
'\'1:0\', ie: writes only.')
FIRST_PORT = 6379
FLAGS = flags.FLAGS
LOAD_THREAD = 1
LOAD_CLIENT = 1
LOAD_PIPELINE = 100
START_KEY = 1
BENCHMARK_NAME = 'redis'
BENCHMARK_CONFIG = """
redis:
description: >
Run memtier_benchmark against Redis.
Specify the number of client VMs with --redis_clients.
vm_groups:
default:
vm_spec: *default_single_core
"""
def GetConfig(user_config):
config = configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
config['vm_groups']['default']['vm_count'] = 1 + FLAGS.redis_clients
return config
def PrepareLoadgen(load_vm):
load_vm.Install('memtier')
def GetNumRedisServers(redis_vm):
"""Get the number of redis servers to install/use for this test."""
if FLAGS.num_cpus_override:
return FLAGS.num_cpus_override * FLAGS.redis_numprocesses
return redis_vm.NumCpusForBenchmark() * FLAGS.redis_numprocesses
def Prepare(benchmark_spec):
"""Install Redis on one VM and memtier_benchmark on another.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
vms = benchmark_spec.vms
redis_vm = vms[0]
# Install latest redis on the 1st machine.
redis_vm.Install('redis_server')
redis_server.Configure(redis_vm)
redis_server.Start(redis_vm)
# Remove snapshotting
sed_cmd = (r"sed -i -e '/save 900/d' -e '/save 300/d' -e '/save 60/d' -e 's/#"
" save \"\"/save \"\"/g' %s/redis.conf")
redis_vm.RemoteCommand(sed_cmd % redis_server.GetRedisDir())
args = [((vm,), {}) for vm in vms]
vm_util.RunThreaded(PrepareLoadgen, args)
for i in range(GetNumRedisServers(redis_vm)):
port = FIRST_PORT + i
redis_vm.RemoteCommand(
'cp %s/redis.conf %s/redis-%d.conf' %
(redis_server.GetRedisDir(), redis_server.GetRedisDir(), port))
redis_vm.RemoteCommand(
r'sed -i -e "s/port 6379/port %d/g" %s/redis-%d.conf' %
(port, redis_server.GetRedisDir(), port))
redis_vm.RemoteCommand(
'nohup sudo %s/src/redis-server %s/redis-%d.conf &> /dev/null &' %
(redis_server.GetRedisDir(), redis_server.GetRedisDir(), port))
# Pre-populate the redis server(s) with data
redis_vm.RemoteCommand(
'memtier_benchmark -s localhost -p %d -d %s -t %d -c %d '
'--ratio 1:0 --key-pattern %s --pipeline %d '
'--key-minimum %d --key-maximum %d -n allkeys ' %
(port, FLAGS.memtier_data_size, LOAD_THREAD, LOAD_CLIENT,
FLAGS.memtier_key_pattern, LOAD_PIPELINE, START_KEY,
FLAGS.memtier_requests))
RedisResult = collections.namedtuple('RedisResult',
['throughput', 'average_latency'])
def RunLoad(redis_vm, load_vm, threads, port, test_id):
"""Spawn a memteir_benchmark on the load_vm against the redis_vm:port.
Args:
redis_vm: The target of the memtier_benchmark
load_vm: The vm that will run the memtier_benchmark.
threads: The number of threads to run in this memtier_benchmark process.
port: the port to target on the redis_vm.
test_id: test id to differentiate between tests.
Returns:
A throughput, latency tuple, or None if threads was 0.
Raises:
Exception: If an invalid combination of FLAGS is specified.
"""
if threads == 0:
return None
if len(FLAGS.memtier_pipeline) != 1:
raise Exception('Only one memtier pipeline is supported. '
'Passed in {0}'.format(FLAGS.memtier_pipeline))
memtier_pipeline = FLAGS.memtier_pipeline[0]
base_cmd = ('memtier_benchmark -s %s -p %d -d %s '
'--ratio %s --key-pattern %s --pipeline %d -c 1 -t %d '
'--test-time %d --random-data --key-minimum %d '
'--key-maximum %d > %s ;')
final_cmd = (base_cmd % (redis_vm.internal_ip, port, FLAGS.memtier_data_size,
FLAGS.redis_setgetratio, FLAGS.memtier_key_pattern,
memtier_pipeline, threads, 10, START_KEY,
FLAGS.memtier_requests, '/dev/null') +
base_cmd % (redis_vm.internal_ip, port, FLAGS.memtier_data_size,
FLAGS.redis_setgetratio, FLAGS.memtier_key_pattern,
memtier_pipeline, threads, 20, START_KEY,
FLAGS.memtier_requests, 'outfile-%d' % test_id) +
base_cmd % (redis_vm.internal_ip, port, FLAGS.memtier_data_size,
FLAGS.redis_setgetratio, FLAGS.memtier_key_pattern,
memtier_pipeline, threads, 10, START_KEY,
FLAGS.memtier_requests, '/dev/null'))
load_vm.RemoteCommand(final_cmd)
output, _ = load_vm.RemoteCommand('cat outfile-%d | grep Totals | '
'tr -s \' \' | cut -d \' \' -f 2' % test_id)
throughput = float(output)
output, _ = load_vm.RemoteCommand('cat outfile-%d | grep Totals | '
'tr -s \' \' | cut -d \' \' -f 5' % test_id)
latency = float(output)
output, _ = load_vm.RemoteCommand('cat outfile-%d' % test_id)
logging.info(output)
return RedisResult(throughput, latency)
def Run(benchmark_spec):
"""Run memtier_benchmark against Redis.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
Returns:
A list of sample.Sample objects.
"""
vms = benchmark_spec.vms
redis_vm = vms[0]
load_vms = vms[1:]
latency = 0.0
latency_threshold = 1000000.0
threads = 0
results = []
num_servers = GetNumRedisServers(redis_vm)
max_throughput_for_completion_latency_under_1ms = 0.0
while latency < latency_threshold:
threads += max(1, int(threads * .15))
num_loaders = len(load_vms) * num_servers
args = [((redis_vm, load_vms[i % len(load_vms)], threads // num_loaders +
(0 if (i + 1) > threads % num_loaders else 1),
FIRST_PORT + i % num_servers, i), {}) for i in range(num_loaders)]
client_results = [i for i in vm_util.RunThreaded(RunLoad, args)
if i is not None]
logging.info('Redis results by client: %s', client_results)
throughput = sum(r.throughput for r in client_results)
if not throughput:
raise errors.Benchmarks.RunError(
'Zero throughput for {} threads: {}'.format(threads, client_results))
# Average latency across clients
latency = (sum(client_latency * client_throughput
for client_latency, client_throughput in client_results) /
throughput)
if latency < 1.0:
max_throughput_for_completion_latency_under_1ms = max(
max_throughput_for_completion_latency_under_1ms,
throughput)
results.append(sample.Sample('throughput', throughput, 'req/s',
{'latency': latency, 'threads': threads}))
logging.info('Threads : %d (%f, %f) < %f', threads, throughput, latency,
latency_threshold)
if threads == 1:
latency_threshold = latency * 20
results.append(sample.Sample(
'max_throughput_for_completion_latency_under_1ms',
max_throughput_for_completion_latency_under_1ms,
'req/s'))
return results
def Cleanup(benchmark_spec):
"""Remove Redis and YCSB.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
pass
|
import pytest
from nikola import Nikola
def test_mixedargs(site):
test_template = """
arg1: {{ _args[0] }}
arg2: {{ _args[1] }}
kwarg1: {{ kwarg1 }}
kwarg2: {{ kwarg2 }}
"""
site.shortcode_registry["test1"] = site._make_renderfunc(test_template)
site.shortcode_registry["test2"] = site._make_renderfunc(
"Something completely different"
)
res = site.apply_shortcodes("{{% test1 kwarg1=spamm arg1 kwarg2=foo,bar arg2 %}}")[
0
]
assert res.strip() == """
arg1: arg1
arg2: arg2
kwarg1: spamm
kwarg2: foo,bar""".strip()
@pytest.mark.parametrize(
"template, data, expected_result",
[
# one argument
("arg={{ _args[0] }}", "{{% test1 onearg %}}", "arg=onearg"),
("arg={{ _args[0] }}", '{{% test1 "one two" %}}', "arg=one two"),
# keyword arguments
("foo={{ foo }}", "{{% test1 foo=bar %}}", "foo=bar"),
("foo={{ foo }}", '{{% test1 foo="bar baz" %}}', "foo=bar baz"),
("foo={{ foo }}", '{{% test1 foo="bar baz" spamm=ham %}}', "foo=bar baz"),
# data
(
"data={{ data }}",
"{{% test1 %}}spamm spamm{{% /test1 %}}",
"data=spamm spamm",
),
("data={{ data }}", "{{% test1 spamm %}}", "data="),
("data={{ data }}", "{{% test1 data=dummy %}}", "data="),
],
)
def test_applying_shortcode(site, template, data, expected_result):
site.shortcode_registry["test1"] = site._make_renderfunc(template)
assert site.apply_shortcodes(data)[0] == expected_result
@pytest.fixture(scope="module")
def site():
s = ShortcodeFakeSite()
s.init_plugins()
s._template_system = None
return s
class ShortcodeFakeSite(Nikola):
def _get_template_system(self):
if self._template_system is None:
# Load template plugin
self._template_system = self.plugin_manager.getPluginByName(
"jinja", "TemplateSystem"
).plugin_object
self._template_system.set_directories(".", "cache")
self._template_system.set_site(self)
return self._template_system
template_system = property(_get_template_system)
|
import asyncio
from datetime import timedelta
import logging
from pydexcom import AccountError, Dexcom, SessionError
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_PASSWORD, CONF_UNIT_OF_MEASUREMENT, CONF_USERNAME
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import (
CONF_SERVER,
COORDINATOR,
DOMAIN,
MG_DL,
PLATFORMS,
SERVER_OUS,
UNDO_UPDATE_LISTENER,
)
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(seconds=180)
async def async_setup(hass: HomeAssistant, config: dict):
"""Set up configured Dexcom."""
hass.data[DOMAIN] = {}
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Set up Dexcom from a config entry."""
try:
dexcom = await hass.async_add_executor_job(
Dexcom,
entry.data[CONF_USERNAME],
entry.data[CONF_PASSWORD],
entry.data[CONF_SERVER] == SERVER_OUS,
)
except AccountError:
return False
except SessionError as error:
raise ConfigEntryNotReady from error
if not entry.options:
hass.config_entries.async_update_entry(
entry, options={CONF_UNIT_OF_MEASUREMENT: MG_DL}
)
async def async_update_data():
try:
return await hass.async_add_executor_job(dexcom.get_current_glucose_reading)
except SessionError as error:
raise UpdateFailed(error) from error
hass.data[DOMAIN][entry.entry_id] = {
COORDINATOR: DataUpdateCoordinator(
hass,
_LOGGER,
name=DOMAIN,
update_method=async_update_data,
update_interval=SCAN_INTERVAL,
),
UNDO_UPDATE_LISTENER: entry.add_update_listener(update_listener),
}
await hass.data[DOMAIN][entry.entry_id][COORDINATOR].async_refresh()
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
hass.data[DOMAIN][entry.entry_id][UNDO_UPDATE_LISTENER]()
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
async def update_listener(hass, entry):
"""Handle options update."""
await hass.config_entries.async_reload(entry.entry_id)
|
from unittest import TestCase
from httpobs.scanner.grader import get_score_description, get_score_modifier
class TestGrader(TestCase):
def test_get_score_description(self):
self.assertEquals('Preloaded via the HTTP Public Key Pinning (HPKP) preloading process',
get_score_description('hpkp-preloaded'))
def test_get_score_modifier(self):
self.assertEquals(0, get_score_modifier('hpkp-preloaded'))
|
import logging
from homeassistant.components.switch import SwitchEntity
from . import XiaomiDevice
from .const import DOMAIN, GATEWAYS_KEY
_LOGGER = logging.getLogger(__name__)
# Load power in watts (W)
ATTR_LOAD_POWER = "load_power"
# Total (lifetime) power consumption in watts
ATTR_POWER_CONSUMED = "power_consumed"
ATTR_IN_USE = "in_use"
LOAD_POWER = "load_power"
POWER_CONSUMED = "power_consumed"
ENERGY_CONSUMED = "energy_consumed"
IN_USE = "inuse"
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Perform the setup for Xiaomi devices."""
entities = []
gateway = hass.data[DOMAIN][GATEWAYS_KEY][config_entry.entry_id]
for device in gateway.devices["switch"]:
model = device["model"]
if model == "plug":
if "proto" not in device or int(device["proto"][0:1]) == 1:
data_key = "status"
else:
data_key = "channel_0"
entities.append(
XiaomiGenericSwitch(
device, "Plug", data_key, True, gateway, config_entry
)
)
elif model in [
"ctrl_neutral1",
"ctrl_neutral1.aq1",
"switch_b1lacn02",
"switch.b1lacn02",
]:
entities.append(
XiaomiGenericSwitch(
device, "Wall Switch", "channel_0", False, gateway, config_entry
)
)
elif model in [
"ctrl_ln1",
"ctrl_ln1.aq1",
"switch_b1nacn02",
"switch.b1nacn02",
]:
entities.append(
XiaomiGenericSwitch(
device, "Wall Switch LN", "channel_0", False, gateway, config_entry
)
)
elif model in [
"ctrl_neutral2",
"ctrl_neutral2.aq1",
"switch_b2lacn02",
"switch.b2lacn02",
]:
entities.append(
XiaomiGenericSwitch(
device,
"Wall Switch Left",
"channel_0",
False,
gateway,
config_entry,
)
)
entities.append(
XiaomiGenericSwitch(
device,
"Wall Switch Right",
"channel_1",
False,
gateway,
config_entry,
)
)
elif model in [
"ctrl_ln2",
"ctrl_ln2.aq1",
"switch_b2nacn02",
"switch.b2nacn02",
]:
entities.append(
XiaomiGenericSwitch(
device,
"Wall Switch LN Left",
"channel_0",
False,
gateway,
config_entry,
)
)
entities.append(
XiaomiGenericSwitch(
device,
"Wall Switch LN Right",
"channel_1",
False,
gateway,
config_entry,
)
)
elif model in ["86plug", "ctrl_86plug", "ctrl_86plug.aq1"]:
if "proto" not in device or int(device["proto"][0:1]) == 1:
data_key = "status"
else:
data_key = "channel_0"
entities.append(
XiaomiGenericSwitch(
device, "Wall Plug", data_key, True, gateway, config_entry
)
)
async_add_entities(entities)
class XiaomiGenericSwitch(XiaomiDevice, SwitchEntity):
"""Representation of a XiaomiPlug."""
def __init__(
self,
device,
name,
data_key,
supports_power_consumption,
xiaomi_hub,
config_entry,
):
"""Initialize the XiaomiPlug."""
self._data_key = data_key
self._in_use = None
self._load_power = None
self._power_consumed = None
self._supports_power_consumption = supports_power_consumption
super().__init__(device, name, xiaomi_hub, config_entry)
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
if self._data_key == "status":
return "mdi:power-plug"
return "mdi:power-socket"
@property
def is_on(self):
"""Return true if it is on."""
return self._state
@property
def device_state_attributes(self):
"""Return the state attributes."""
if self._supports_power_consumption:
attrs = {
ATTR_IN_USE: self._in_use,
ATTR_LOAD_POWER: self._load_power,
ATTR_POWER_CONSUMED: self._power_consumed,
}
else:
attrs = {}
attrs.update(super().device_state_attributes)
return attrs
@property
def should_poll(self):
"""Return the polling state. Polling needed for Zigbee plug only."""
return self._supports_power_consumption
def turn_on(self, **kwargs):
"""Turn the switch on."""
if self._write_to_hub(self._sid, **{self._data_key: "on"}):
self._state = True
self.schedule_update_ha_state()
def turn_off(self, **kwargs):
"""Turn the switch off."""
if self._write_to_hub(self._sid, **{self._data_key: "off"}):
self._state = False
self.schedule_update_ha_state()
def parse_data(self, data, raw_data):
"""Parse data sent by gateway."""
if IN_USE in data:
self._in_use = int(data[IN_USE])
if not self._in_use:
self._load_power = 0
for key in [POWER_CONSUMED, ENERGY_CONSUMED]:
if key in data:
self._power_consumed = round(float(data[key]), 2)
break
if LOAD_POWER in data:
self._load_power = round(float(data[LOAD_POWER]), 2)
value = data.get(self._data_key)
if value not in ["on", "off"]:
return False
state = value == "on"
if self._state == state:
return False
self._state = state
return True
def update(self):
"""Get data from hub."""
_LOGGER.debug("Update data from hub: %s", self._name)
self._get_from_hub(self._sid)
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.