text
stringlengths 213
32.3k
|
---|
import os
import tempfile
import unittest2
import time
from cStringIO import StringIO
from psdash.log import Logs, LogReader, LogError, ReverseFileSearcher
class TestLogs(unittest2.TestCase):
NEEDLE = 'foobar\n'
POSITIONS = [10000, 8000, 6000, 4000, 2000, 500]
def setUp(self):
fd, filename = tempfile.mkstemp()
self.filename = filename
self.fp = os.fdopen(fd, 'w+')
for pos in self.POSITIONS:
self.fp.seek(pos)
self.fp.write(self.NEEDLE)
self.fp.close()
self.logs = Logs()
self.logs.add_available(filename)
def tearDown(self):
os.remove(self.filename)
self.logs.clear_available()
def test_searching(self):
log = self.logs.get(self.filename)
positions = [log.search(self.NEEDLE)[0] for _ in xrange(len(self.POSITIONS))]
self.assertEqual(self.POSITIONS, positions)
def test_searching_other_buffer_size(self):
log = LogReader(self.filename, LogReader.BUFFER_SIZE / 2)
positions = [log.search(self.NEEDLE)[0] for _ in xrange(len(self.POSITIONS))]
self.assertEqual(self.POSITIONS, positions)
def test_searching_no_result(self):
log = self.logs.get(self.filename)
pos = log.search('wontexist')[0]
self.assertEqual(pos, -1)
def test_read_tail(self):
log = self.logs.get(self.filename)
log.set_tail_position()
buf = log.read()
self.assertEqual(len(buf), LogReader.BUFFER_SIZE)
def test_add_non_existing(self):
self.assertRaises(LogError, self.logs.add_available, '/var/log/w0ntre4lly3xist.log')
def test_repr_works(self):
log = self.logs.get(self.filename)
self.assertIn('<LogReader', repr(log))
def test_add_pattern(self):
ts = time.time()
suffix = '%d.log' % ts
tempfile.mkstemp(suffix=suffix)
tempfile.mkstemp(suffix=suffix)
num_added = self.logs.add_patterns(['/tmp/*%s' % suffix])
self.assertEqual(num_added, 2)
@unittest2.skipIf(os.environ.get('USER') == 'root', "We'll have access to this if we're root")
def test_add_pattern_no_access(self):
num_added = self.logs.add_patterns(['/proc/vmallocinfo'])
self.assertEqual(num_added, 0)
def test_add_dir(self):
num_added = self.logs.add_patterns(['/tmp'])
self.assertEqual(num_added, 0)
class TestFileSearcher(unittest2.TestCase):
def _create_temp_file(self, buf):
_, filename = tempfile.mkstemp("log")
with open(filename, "w") as f:
f.write(buf)
f.flush()
return filename
def setUp(self):
self.needle = "THENEEDLE"
buf = StringIO()
buf.write("TESTING SOME SEARCHING!\n" * 10000)
self.positions = [1000, 2000, 2500, 3700, 7034, 8343]
for pos in self.positions:
buf.seek(pos)
buf.write(self.needle)
self.filename = self._create_temp_file(buf.getvalue())
self.searcher = ReverseFileSearcher(self.filename, self.needle)
def test_construction(self):
self.assertEqual(self.searcher._filename, self.filename)
self.assertEqual(self.searcher._needle, self.needle)
def test_find_all(self):
positions = self.searcher.find_all()
self.assertEqual(positions, tuple(reversed(self.positions)))
def test_find_one(self):
pos = self.searcher.find()
self.assertEqual(pos, self.positions[-1])
def test_unicode(self):
encoding = "utf-8"
needle = u"Åter till testerna, så låt oss nu testa lite".encode(encoding)
buf = StringIO()
data = (u"Det är nog bra att ha några konstiga bokstäver här med.\n" * 10000).encode(encoding)
buf.write(data)
positions = [1000, 2000, 2500, 3700, 7034, 8343]
for pos in positions:
buf.seek(pos)
buf.write(needle)
filename = self._create_temp_file(buf.getvalue())
searcher = ReverseFileSearcher(filename, needle)
self.assertEqual(searcher.find_all(), tuple(reversed(self.positions)))
def test_needle_split_by_chunk(self):
buf = StringIO()
buf.write("TESTING SOME SEARCHING!\n" * 10000)
buf.seek(-(ReverseFileSearcher.DEFAULT_CHUNK_SIZE + 5), os.SEEK_END)
buf.write(self.needle)
filename = self._create_temp_file(buf.getvalue())
searcher = ReverseFileSearcher(filename, self.needle)
found_positions = searcher.find_all()
self.assertEqual(len(found_positions), 1)
def test_needle_on_chunk_border(self):
buf = StringIO()
buf.write("TESTING SOME SEARCHING!\n" * 10000)
buf.seek(-ReverseFileSearcher.DEFAULT_CHUNK_SIZE, os.SEEK_END)
buf.write(self.needle)
filename = self._create_temp_file(buf.getvalue())
searcher = ReverseFileSearcher(filename, self.needle)
found_positions = searcher.find_all()
self.assertEqual(len(found_positions), 1)
def test_needle_on_chunk_border_does_not_hide_occurence(self):
buf = StringIO()
buf.write("TESTING SOME SEARCHING!\n" * 10000)
buf.seek(-(ReverseFileSearcher.DEFAULT_CHUNK_SIZE + 100), os.SEEK_END)
buf.write(self.needle)
buf.seek(-ReverseFileSearcher.DEFAULT_CHUNK_SIZE, os.SEEK_END)
buf.write(self.needle)
filename = self._create_temp_file(buf.getvalue())
searcher = ReverseFileSearcher(filename, self.needle)
found_positions = searcher.find_all()
self.assertEqual(len(found_positions), 2)
def test_lots_of_needles_in_same_chunk(self):
buf = StringIO()
buf.write("TESTING SOME SEARCHING!\n" * 10000)
buf.seek(-(ReverseFileSearcher.DEFAULT_CHUNK_SIZE + 100), os.SEEK_END)
for _ in xrange(20):
buf.write(self.needle)
filename = self._create_temp_file(buf.getvalue())
searcher = ReverseFileSearcher(filename, self.needle)
found_positions = searcher.find_all()
self.assertEqual(len(found_positions), 20)
def test_single_chunk(self):
buf = StringIO()
buf.write("TESTING SOME SEARCHING!\n" * 100)
for _ in xrange(20):
buf.write(self.needle)
filename = self._create_temp_file(buf.getvalue())
searcher = ReverseFileSearcher(filename, self.needle)
found_positions = searcher.find_all()
self.assertEqual(len(found_positions), 20)
def test_single_char(self):
buf = StringIO()
buf.write("TESTING SOME SEARCHING!\n" * 10000)
filename = self._create_temp_file(buf.getvalue())
searcher = ReverseFileSearcher(filename, "C")
found_positions = searcher.find_all()
self.assertEqual(len(found_positions), 10000)
def test_empty_needle(self):
buf = StringIO()
buf.write("TESTING SOME SEARCHING!\n" * 10000)
filename = self._create_temp_file(buf.getvalue())
self.assertRaises(ValueError, ReverseFileSearcher, filename, "")
def test_needle_larger_than_chunk_size(self):
buf = StringIO()
buf.write("TESTING SOME SEARCHING!\n" * 10000)
needle = "NEEDLE" * ReverseFileSearcher.DEFAULT_CHUNK_SIZE
filename = self._create_temp_file(buf.getvalue())
self.assertRaises(ValueError, ReverseFileSearcher, filename, needle)
if __name__ == '__main__':
unittest2.main()
|
import logging
from api.soma_api import SomaApi
from requests import RequestException
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_HOST, CONF_PORT
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.typing import HomeAssistantType
from .const import API, DOMAIN, HOST, PORT
DEVICES = "devices"
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{vol.Required(CONF_HOST): cv.string, vol.Required(CONF_PORT): cv.string}
)
},
extra=vol.ALLOW_EXTRA,
)
SOMA_COMPONENTS = ["cover"]
async def async_setup(hass, config):
"""Set up the Soma component."""
if DOMAIN not in config:
return True
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
data=config[DOMAIN],
context={"source": config_entries.SOURCE_IMPORT},
)
)
return True
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry):
"""Set up Soma from a config entry."""
hass.data[DOMAIN] = {}
hass.data[DOMAIN][API] = SomaApi(entry.data[HOST], entry.data[PORT])
devices = await hass.async_add_executor_job(hass.data[DOMAIN][API].list_devices)
hass.data[DOMAIN][DEVICES] = devices["shades"]
for component in SOMA_COMPONENTS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
return True
async def async_unload_entry(hass: HomeAssistantType, entry: ConfigEntry):
"""Unload a config entry."""
return True
class SomaEntity(Entity):
"""Representation of a generic Soma device."""
def __init__(self, device, api):
"""Initialize the Soma device."""
self.device = device
self.api = api
self.current_position = 50
self.is_available = True
@property
def available(self):
"""Return true if the last API commands returned successfully."""
return self.is_available
@property
def unique_id(self):
"""Return the unique id base on the id returned by pysoma API."""
return self.device["mac"]
@property
def name(self):
"""Return the name of the device."""
return self.device["name"]
@property
def device_info(self):
"""Return device specific attributes.
Implemented by platform classes.
"""
return {
"identifiers": {(DOMAIN, self.unique_id)},
"name": self.name,
"manufacturer": "Wazombi Labs",
}
async def async_update(self):
"""Update the device with the latest data."""
try:
response = await self.hass.async_add_executor_job(
self.api.get_shade_state, self.device["mac"]
)
except RequestException:
_LOGGER.error("Connection to SOMA Connect failed")
self.is_available = False
return
if response["result"] != "success":
_LOGGER.error(
"Unable to reach device %s (%s)", self.device["name"], response["msg"]
)
self.is_available = False
return
self.current_position = 100 - response["position"]
self.is_available = True
|
from gi.repository import Gdk, GObject, Gtk
@Gtk.Template(resource_path='/org/gnome/meld/ui/notebook-label.ui')
class NotebookLabel(Gtk.EventBox):
__gtype_name__ = 'NotebookLabel'
label = Gtk.Template.Child()
label_text = GObject.Property(
type=str,
nick='Text of this notebook label',
default='',
)
page = GObject.Property(
type=object,
nick='Notebook page for which this is the label',
default=None,
)
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.bind_property(
'label-text', self.label, 'label',
GObject.BindingFlags.DEFAULT | GObject.BindingFlags.SYNC_CREATE,
)
self.bind_property(
'label-text', self, 'tooltip-text',
GObject.BindingFlags.DEFAULT | GObject.BindingFlags.SYNC_CREATE,
)
@Gtk.Template.Callback()
def on_label_button_press_event(self, widget, event):
# Middle-click on the tab closes the tab.
if event.type == Gdk.EventType.BUTTON_PRESS and event.button == 2:
self.page.on_delete_event()
@Gtk.Template.Callback()
def on_close_button_clicked(self, widget):
self.page.on_delete_event()
|
from django.utils.deprecation import MiddlewareMixin
from django.utils.functional import SimpleLazyObject
from django.utils import timezone
from shop.models.customer import CustomerModel
def get_customer(request, force=False):
if force or not hasattr(request, '_cached_customer'):
request._cached_customer = CustomerModel.objects.get_from_request(request)
return request._cached_customer
class CustomerMiddleware(MiddlewareMixin):
"""
Similar to Django's AuthenticationMiddleware, which adds the user object to the request,
this middleware adds the customer object to the request.
"""
def process_request(self, request):
assert hasattr(request, 'session'), (
"The django-SHOP middleware requires session middleware to be installed. "
"Edit your MIDDLEWARE_CLASSES setting to insert 'django.contrib.sessions.middleware.SessionMiddleware'.")
assert hasattr(request, 'user'), (
"The django-SHOP middleware requires an authentication middleware to be installed. "
"Edit your MIDDLEWARE_CLASSES setting to insert 'django.contrib.auth.middleware.AuthenticationMiddleware'.")
request.customer = SimpleLazyObject(lambda: get_customer(request))
def process_response(self, request, response):
content_type = response.get('content-type')
try:
if content_type.startswith('text/html'):
# only update last_access when rendering the main page
request.customer.last_access = timezone.now()
request.customer.save(update_fields=['last_access'])
except (AttributeError, ValueError):
pass
return response
|
from collections import deque
import os
import pkg_resources
from perfkitbenchmarker import errors
# Path of the root of the current git branch.
_BRANCH_ROOT_DIR = os.path.dirname(os.path.dirname(__file__))
def _CheckRequirements(requirements_file_path):
"""Checks that all package requirements specified in a file are met.
Args:
requirements_file_path: string. Path to a pip requirements file.
"""
with open(requirements_file_path, 'r') as fp:
requirements_to_check = [(requirements_file_path, deque(fp.readlines()))]
try:
while requirements_to_check:
file_path, lines = requirements_to_check.pop()
while lines:
line = lines.popleft().strip()
if line.startswith('-r'):
requirements_to_check.append((file_path, lines))
file_path = os.path.join(os.path.dirname(file_path), line[2:])
with open(file_path, 'r') as fp:
lines = deque(fp.readlines())
elif line:
pkg_resources.require(line)
except (pkg_resources.DistributionNotFound,
pkg_resources.VersionConflict) as e:
# In newer versions of setuptools, these exception classes have a report
# method that provides a readable description of the error.
report = getattr(e, 'report', None)
err_msg = report() if report else str(e)
raise errors.Setup.PythonPackageRequirementUnfulfilled(
'A Python package requirement was not met while checking "{path}": '
'{msg}{linesep}To install required packages, execute the following '
'command:{linesep}pip install -r "{path}"{linesep}To bypass package '
'requirement checks, run PerfKit Benchmarker with the '
'--ignore_package_requirements flag.'.format(
linesep=os.linesep, msg=err_msg, path=requirements_file_path))
def CheckBasicRequirements():
"""Checks that all basic package requirements are met.
The basic requirements include packages used by modules that are imported
regardless of the specified cloud providers. The list of required packages
and versions is found in the requirements.txt file in the git branch's root
directory. If such a file does not exist, then the requirements check is
skipped.
"""
requirements_file_path = os.path.join(_BRANCH_ROOT_DIR,
'requirements.txt')
if os.path.isfile(requirements_file_path):
_CheckRequirements(requirements_file_path)
def CheckProviderRequirements(provider):
"""Checks that all provider-specific requirements are met.
The provider-specific requirements include packages used by modules that are
imported when using a particular cloud provider. The list of required packages
is found in the requirements-<provider>.txt file in the git branch's root
directory. If such a file does not exist, then no additional requirements are
necessary.
Args:
provider: string. Lowercase name of the cloud provider (e.g. 'gcp').
"""
requirements_file_path = os.path.join(
_BRANCH_ROOT_DIR, 'perfkitbenchmarker', 'providers', provider,
'requirements.txt')
if os.path.isfile(requirements_file_path):
_CheckRequirements(requirements_file_path)
|
import contextlib
from radicale import pathutils, storage
from radicale.log import logger
class StorageVerifyMixin:
def verify(self):
item_errors = collection_errors = 0
@contextlib.contextmanager
def exception_cm(sane_path, href=None):
nonlocal item_errors, collection_errors
try:
yield
except Exception as e:
if href:
item_errors += 1
name = "item %r in %r" % (href, sane_path)
else:
collection_errors += 1
name = "collection %r" % sane_path
logger.error("Invalid %s: %s", name, e, exc_info=True)
remaining_sane_paths = [""]
while remaining_sane_paths:
sane_path = remaining_sane_paths.pop(0)
path = pathutils.unstrip_path(sane_path, True)
logger.debug("Verifying collection %r", sane_path)
with exception_cm(sane_path):
saved_item_errors = item_errors
collection = None
uids = set()
has_child_collections = False
for item in self.discover(path, "1", exception_cm):
if not collection:
collection = item
collection.get_meta()
continue
if isinstance(item, storage.BaseCollection):
has_child_collections = True
remaining_sane_paths.append(item.path)
elif item.uid in uids:
logger.error("Invalid item %r in %r: UID conflict %r",
item.href, sane_path, item.uid)
else:
uids.add(item.uid)
logger.debug("Verified item %r in %r",
item.href, sane_path)
if item_errors == saved_item_errors:
collection.sync()
if has_child_collections and collection.get_meta("tag"):
logger.error("Invalid collection %r: %r must not have "
"child collections", sane_path,
collection.get_meta("tag"))
return item_errors == 0 and collection_errors == 0
|
from __future__ import division
from math import cos, sin
from pygal import Line
from pygal.test.utils import texts
def test_simple_line():
"""Simple line test"""
line = Line()
rng = range(-30, 31, 5)
line.add('test1', [cos(x / 10) for x in rng])
line.add('test2', [sin(x / 10) for x in rng])
line.add('test3', [cos(x / 10) - sin(x / 10) for x in rng])
line.x_labels = map(str, rng)
line.title = "cos sin and cos - sin"
q = line.render_pyquery()
assert len(q(".axis.x")) == 1
assert len(q(".axis.y")) == 1
assert len(q(".plot .series path")) == 3
assert len(q(".legend")) == 3
assert len(q(".x.axis .guides")) == 13
assert len(q(".y.axis .guides")) == 13
assert len(q(".dots")) == 3 * 13
assert q(".axis.x text").map(texts) == [
'-30', '-25', '-20', '-15', '-10', '-5', '0', '5', '10', '15', '20',
'25', '30'
]
assert q(".axis.y text").map(texts) == [
'-1.2', '-1', '-0.8', '-0.6', '-0.4', '-0.2', '0', '0.2', '0.4', '0.6',
'0.8', '1', '1.2'
]
assert q(".title").text() == 'cos sin and cos - sin'
assert q(".legend text").map(texts) == ['test1', 'test2', 'test3']
def test_line():
"""Another simple line test"""
line = Line()
rng = [8, 12, 23, 73, 39, 57]
line.add('Single serie', rng)
line.title = "One serie"
q = line.render_pyquery()
assert len(q(".axis.x")) == 0
assert len(q(".axis.y")) == 1
assert len(q(".plot .series path")) == 1
assert len(q(".x.axis .guides")) == 0
assert len(q(".y.axis .guides")) == 7
def test_one_dot():
"""Line test with an unique value"""
line = Line()
line.add('one dot', [12])
line.x_labels = ['one']
q = line.render_pyquery()
assert len(q(".axis.x")) == 1
assert len(q(".axis.y")) == 1
assert len(q(".y.axis .guides")) == 1
def test_no_dot():
"""Line test with an empty serie"""
line = Line()
line.add('no dot', [])
q = line.render_pyquery()
assert q(".text-overlay text").text() == 'No data'
def test_no_dot_at_all():
"""Line test with no value"""
q = Line().render_pyquery()
assert q(".text-overlay text").text() == 'No data'
def test_not_equal_x_labels():
"""Test x_labels"""
line = Line()
line.add('test1', range(100))
line.truncate_label = -1
line.x_labels = map(str, range(11))
q = line.render_pyquery()
assert len(q(".dots")) == 100
assert len(q(".axis.x")) == 1
assert q(".axis.x text").map(texts) == [
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '10'
]
def test_int_x_labels():
"""Test x_labels"""
line = Line()
line.add('test1', range(100))
line.truncate_label = -1
line.x_labels = list(range(11))
q = line.render_pyquery()
assert len(q(".dots")) == 100
assert len(q(".axis.x")) == 1
assert q(".axis.x text").map(texts) == [
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '10'
]
def test_only_major_dots_every():
"""Test major dots"""
line = Line(show_only_major_dots=True, x_labels_major_every=3)
line.add('test', range(12))
line.x_labels = map(str, range(12))
q = line.render_pyquery()
assert len(q(".dots")) == 4
def test_only_major_dots_no_labels():
"""Test major dots with no labels"""
line = Line(show_only_major_dots=True)
line.add('test', range(12))
q = line.render_pyquery()
assert len(q(".dots")) == 12
def test_only_major_dots_count():
"""Test major dots with a major label count"""
line = Line(show_only_major_dots=True)
line.add('test', range(12))
line.x_labels = map(str, range(12))
line.x_labels_major_count = 2
q = line.render_pyquery()
assert len(q(".dots")) == 2
def test_only_major_dots():
"""Test major dots with specified major labels"""
line = Line(show_only_major_dots=True, )
line.add('test', range(12))
line.x_labels = map(str, range(12))
line.x_labels_major = ['1', '5', '11']
q = line.render_pyquery()
assert len(q(".dots")) == 3
def test_line_secondary():
"""Test line with a secondary serie"""
line = Line()
rng = [8, 12, 23, 73, 39, 57]
line.add('First serie', rng)
line.add('Secondary serie', map(lambda x: x * 2, rng), secondary=True)
line.title = "One serie"
q = line.render_pyquery()
assert len(q(".axis.x")) == 0
assert len(q(".axis.y")) == 1
assert len(q(".plot .series path")) == 2
assert len(q(".x.axis .guides")) == 0
assert len(q(".y.axis .guides")) == 7
|
import argparse
import datetime
import logging
import dateutil.parser
from dateutil import tz
from pytimeparse import timeparse
from paasta_tools import marathon_tools
log = logging.getLogger(__name__)
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument(
"-a",
"--age",
dest="age",
type=timedelta_type,
default="1h",
help="Max age of a Marathon deployment before it is stopped."
"Any pytimeparse unit is supported",
)
parser.add_argument(
"-n",
"--dry-run",
action="store_true",
help="Don't actually stop any Marathon deployments",
)
parser.add_argument("-v", "--verbose", action="store_true")
options = parser.parse_args()
return options
def timedelta_type(value):
"""Return the :class:`datetime.datetime.DateTime` for a time in the past.
:param value: a string containing a time format supported by :mod:`pytimeparse`
"""
if value is None:
return None
return datetime_seconds_ago(timeparse.timeparse(value))
def datetime_seconds_ago(seconds):
return now() - datetime.timedelta(seconds=seconds)
def now():
return datetime.datetime.now(tz.tzutc())
def delete_deployment_if_too_old(client, deployment, max_date, dry_run):
started_at = dateutil.parser.parse(deployment.version)
age = now() - started_at
if started_at < max_date:
if dry_run is True:
log.warning(
f"Would delete {deployment.id} for {deployment.affected_apps[0]} as it is {age} old"
)
else:
log.warning(
f"Deleting {deployment.id} for {deployment.affected_apps[0]} as it is {age} old"
)
client.delete_deployment(deployment_id=deployment.id, force=False)
else:
if dry_run is True:
log.warning(
f"NOT deleting {deployment.id} for {deployment.affected_apps[0]} as it is {age} old"
)
def main():
args = parse_args()
if args.verbose:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.WARNING)
clients = marathon_tools.get_list_of_marathon_clients()
for client in clients:
for deployment in client.list_deployments():
delete_deployment_if_too_old(
client=client,
deployment=deployment,
max_date=args.age,
dry_run=args.dry_run,
)
if __name__ == "__main__":
main()
|
import asyncio
import contextlib
import json
import logging
from copy import copy
from pathlib import Path
from typing import TYPE_CHECKING, Mapping, Optional, Union
import aiohttp
from lavalink.rest_api import LoadResult
from redbot.core import Config
from redbot.core.bot import Red
from redbot.core.commands import Cog
from redbot.core.i18n import Translator
from ..audio_dataclasses import Query
from ..audio_logging import IS_DEBUG, debug_exc_log
if TYPE_CHECKING:
from .. import Audio
_API_URL = "https://api.redbot.app/"
_ = Translator("Audio", Path(__file__))
log = logging.getLogger("red.cogs.Audio.api.GlobalDB")
class GlobalCacheWrapper:
def __init__(
self, bot: Red, config: Config, session: aiohttp.ClientSession, cog: Union["Audio", Cog]
):
# Place Holder for the Global Cache PR
self.bot = bot
self.config = config
self.session = session
self.api_key = None
self._handshake_token = ""
self.has_api_key = None
self._token: Mapping[str, str] = {}
self.cog = cog
async def update_token(self, new_token: Mapping[str, str]):
self._token = new_token
await self.get_perms()
async def _get_api_key(
self,
) -> Optional[str]:
if not self._token:
self._token = await self.bot.get_shared_api_tokens("audiodb")
self.api_key = self._token.get("api_key", None)
self.has_api_key = self.cog.global_api_user.get("can_post")
id_list = list(self.bot.owner_ids)
self._handshake_token = "||".join(map(str, id_list))
return self.api_key
async def get_call(self, query: Optional[Query] = None) -> dict:
api_url = f"{_API_URL}api/v2/queries"
if not self.cog.global_api_user.get("can_read"):
return {}
try:
query = Query.process_input(query, self.cog.local_folder_current_path)
if any([not query or not query.valid or query.is_spotify or query.is_local]):
return {}
await self._get_api_key()
if self.api_key is None:
return {}
search_response = "error"
query = query.lavalink_query
with contextlib.suppress(aiohttp.ContentTypeError, asyncio.TimeoutError):
async with self.session.get(
api_url,
timeout=aiohttp.ClientTimeout(total=await self.config.global_db_get_timeout()),
headers={"Authorization": self.api_key, "X-Token": self._handshake_token},
params={"query": query},
) as r:
search_response = await r.json(loads=json.loads)
if IS_DEBUG and "x-process-time" in r.headers:
log.debug(
f"GET || Ping {r.headers.get('x-process-time')} || "
f"Status code {r.status} || {query}"
)
if "tracks" not in search_response:
return {}
return search_response
except Exception as err:
debug_exc_log(log, err, f"Failed to Get query: {api_url}/{query}")
return {}
async def get_spotify(self, title: str, author: Optional[str]) -> dict:
if not self.cog.global_api_user.get("can_read"):
return {}
api_url = f"{_API_URL}api/v2/queries/spotify"
try:
search_response = "error"
params = {"title": title, "author": author}
await self._get_api_key()
if self.api_key is None:
return {}
with contextlib.suppress(aiohttp.ContentTypeError, asyncio.TimeoutError):
async with self.session.get(
api_url,
timeout=aiohttp.ClientTimeout(total=await self.config.global_db_get_timeout()),
headers={"Authorization": self.api_key, "X-Token": self._handshake_token},
params=params,
) as r:
search_response = await r.json(loads=json.loads)
if IS_DEBUG and "x-process-time" in r.headers:
log.debug(
f"GET/spotify || Ping {r.headers.get('x-process-time')} || "
f"Status code {r.status} || {title} - {author}"
)
if "tracks" not in search_response:
return {}
return search_response
except Exception as err:
debug_exc_log(log, err, f"Failed to Get query: {api_url}")
return {}
async def post_call(self, llresponse: LoadResult, query: Optional[Query]) -> None:
try:
if not self.cog.global_api_user.get("can_post"):
return
query = Query.process_input(query, self.cog.local_folder_current_path)
if llresponse.has_error or llresponse.load_type.value in ["NO_MATCHES", "LOAD_FAILED"]:
return
if query and query.valid and query.is_youtube:
query = query.lavalink_query
else:
return None
await self._get_api_key()
if self.api_key is None:
return None
api_url = f"{_API_URL}api/v2/queries"
async with self.session.post(
api_url,
json=llresponse._raw,
headers={"Authorization": self.api_key, "X-Token": self._handshake_token},
params={"query": query},
) as r:
await r.read()
if IS_DEBUG and "x-process-time" in r.headers:
log.debug(
f"POST || Ping {r.headers.get('x-process-time')} ||"
f" Status code {r.status} || {query}"
)
except Exception as err:
debug_exc_log(log, err, f"Failed to post query: {query}")
await asyncio.sleep(0)
async def update_global(self, llresponse: LoadResult, query: Optional[Query] = None):
await self.post_call(llresponse=llresponse, query=query)
async def report_invalid(self, id: str) -> None:
if not self.cog.global_api_user.get("can_delete"):
return
api_url = f"{_API_URL}api/v2/queries/es/id"
with contextlib.suppress(Exception):
async with self.session.delete(
api_url,
headers={"Authorization": self.api_key, "X-Token": self._handshake_token},
params={"id": id},
) as r:
await r.read()
async def get_perms(self):
global_api_user = copy(self.cog.global_api_user)
await self._get_api_key()
is_enabled = await self.config.global_db_enabled()
if (not is_enabled) or self.api_key is None:
return global_api_user
with contextlib.suppress(Exception):
async with aiohttp.ClientSession(json_serialize=json.dumps) as session:
async with session.get(
f"{_API_URL}api/v2/users/me",
headers={"Authorization": self.api_key, "X-Token": self._handshake_token},
) as resp:
if resp.status == 200:
search_response = await resp.json(loads=json.loads)
global_api_user["fetched"] = True
global_api_user["can_read"] = search_response.get("can_read", False)
global_api_user["can_post"] = search_response.get("can_post", False)
global_api_user["can_delete"] = search_response.get("can_delete", False)
return global_api_user
|
import functools
from absl import flags
from perfkitbenchmarker import configs
from perfkitbenchmarker import disk
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.linux_packages import aerospike_server
from perfkitbenchmarker.linux_packages import ycsb
FLAGS = flags.FLAGS
# TODO(user): unify overrides into --client_machine_type/server_machine_type
flags.DEFINE_string('aerospike_client_machine_type', None,
'Machine type to use for the aerospike client if different '
'from aerospike server machine type.')
flags.DEFINE_string('aerospike_server_machine_type', None,
'Machine type to use for the aerospike server if different '
'from aerospike client machine type.')
BENCHMARK_NAME = 'aerospike_ycsb'
BENCHMARK_CONFIG = """
aerospike_ycsb:
description: >
Run YCSB against an Aerospike
installation. Specify the number of YCSB VMs with
--ycsb_client_vms.
vm_groups:
workers:
vm_spec: *default_single_core
disk_spec: *default_500_gb
vm_count: null
disk_count: 0
clients:
vm_spec: *default_dual_core
"""
def GetConfig(user_config):
config = configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
if FLAGS.aerospike_storage_type == aerospike_server.DISK:
if FLAGS.data_disk_type == disk.LOCAL:
# Didn't know max number of local disks, decide later.
config['vm_groups']['workers']['disk_count'] = (
config['vm_groups']['workers']['disk_count'] or None)
else:
config['vm_groups']['workers']['disk_count'] = (
config['vm_groups']['workers']['disk_count'] or 1)
if FLAGS.aerospike_server_machine_type:
vm_spec = config['vm_groups']['workers']['vm_spec']
for cloud in vm_spec:
vm_spec[cloud]['machine_type'] = FLAGS.aerospike_server_machine_type
if FLAGS.aerospike_client_machine_type:
vm_spec = config['vm_groups']['clients']['vm_spec']
for cloud in vm_spec:
vm_spec[cloud]['machine_type'] = FLAGS.aerospike_client_machine_type
if FLAGS['aerospike_vms'].present:
config['vm_groups']['workers']['vm_count'] = FLAGS.aerospike_vms
if FLAGS['ycsb_client_vms'].present:
config['vm_groups']['clients']['vm_count'] = FLAGS.ycsb_client_vms
return config
def CheckPrerequisites(benchmark_config):
"""Verifies that the required resources are present.
Raises:
perfkitbenchmarker.data.ResourceNotFound: On missing resource.
"""
ycsb.CheckPrerequisites()
def Prepare(benchmark_spec):
"""Prepare the virtual machines to run YCSB against Aerospike.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
loaders = benchmark_spec.vm_groups['clients']
assert loaders, benchmark_spec.vm_groups
# Aerospike cluster
aerospike_vms = benchmark_spec.vm_groups['workers']
assert aerospike_vms, 'No aerospike VMs: {0}'.format(
benchmark_spec.vm_groups)
seed_ips = [vm.internal_ip for vm in aerospike_vms]
aerospike_install_fns = [functools.partial(aerospike_server.ConfigureAndStart,
vm, seed_node_ips=seed_ips)
for vm in aerospike_vms]
ycsb_install_fns = [functools.partial(vm.Install, 'ycsb')
for vm in loaders]
vm_util.RunThreaded(lambda f: f(), aerospike_install_fns + ycsb_install_fns)
benchmark_spec.executor = ycsb.YCSBExecutor(
'aerospike',
**{'as.host': aerospike_vms[0].internal_ip,
'as.namespace': 'test'})
def Run(benchmark_spec):
"""Spawn YCSB and gather the results.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
Returns:
A list of sample.Sample instances.
"""
loaders = benchmark_spec.vm_groups['clients']
aerospike_vms = benchmark_spec.vm_groups['workers']
metadata = {
'ycsb_client_vms':
FLAGS.ycsb_client_vms,
'num_vms':
len(aerospike_vms),
'Storage Type':
FLAGS.aerospike_storage_type,
'memory_size':
int(aerospike_vms[0].total_memory_kb * 0.8),
'transaction_threads_per_queue':
FLAGS.aerospike_transaction_threads_per_queue,
'replication_factor':
FLAGS.aerospike_replication_factor,
}
samples = list(benchmark_spec.executor.LoadAndRun(loaders))
for sample in samples:
sample.metadata.update(metadata)
return samples
def Cleanup(benchmark_spec):
"""Cleanup.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
def StopAerospike(server):
server.RemoteCommand('cd %s && nohup sudo make stop' %
aerospike_server.AEROSPIKE_DIR)
server.RemoteCommand('sudo rm -rf aerospike*')
aerospike_vms = benchmark_spec.vm_groups['workers']
vm_util.RunThreaded(StopAerospike, aerospike_vms)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from compare_gan.architectures import abstract_arch
from compare_gan.architectures.arch_ops import conv2d
from compare_gan.architectures.arch_ops import deconv2d
from compare_gan.architectures.arch_ops import linear
from compare_gan.architectures.arch_ops import lrelu
import numpy as np
import tensorflow as tf
def conv_out_size_same(size, stride):
return int(np.ceil(float(size) / float(stride)))
class Generator(abstract_arch.AbstractGenerator):
"""SNDCGAN generator.
Details are available at https://openreview.net/pdf?id=B1QRgziT-.
"""
def apply(self, z, y, is_training):
"""Build the generator network for the given inputs.
Args:
z: `Tensor` of shape [batch_size, z_dim] with latent code.
y: `Tensor` of shape [batch_size, num_classes] of one hot encoded labels.
is_training: boolean, are we in train or eval model.
Returns:
A tensor of size [batch_size] + self._image_shape with values in [0, 1].
"""
batch_size = z.shape[0].value
s_h, s_w, colors = self._image_shape
s_h2, s_w2 = conv_out_size_same(s_h, 2), conv_out_size_same(s_w, 2)
s_h4, s_w4 = conv_out_size_same(s_h2, 2), conv_out_size_same(s_w2, 2)
s_h8, s_w8 = conv_out_size_same(s_h4, 2), conv_out_size_same(s_w4, 2)
net = linear(z, s_h8 * s_w8 * 512, scope="g_fc1")
net = self.batch_norm(net, z=z, y=y, is_training=is_training, name="g_bn1")
net = tf.nn.relu(net)
net = tf.reshape(net, [batch_size, s_h8, s_w8, 512])
net = deconv2d(net, [batch_size, s_h4, s_w4, 256], 4, 4, 2, 2, name="g_dc2")
net = self.batch_norm(net, z=z, y=y, is_training=is_training, name="g_bn2")
net = tf.nn.relu(net)
net = deconv2d(net, [batch_size, s_h2, s_w2, 128], 4, 4, 2, 2, name="g_dc3")
net = self.batch_norm(net, z=z, y=y, is_training=is_training, name="g_bn3")
net = tf.nn.relu(net)
net = deconv2d(net, [batch_size, s_h, s_w, 64], 4, 4, 2, 2, name="g_dc4")
net = self.batch_norm(net, z=z, y=y, is_training=is_training, name="g_bn4")
net = tf.nn.relu(net)
net = deconv2d(
net, [batch_size, s_h, s_w, colors], 3, 3, 1, 1, name="g_dc5")
out = tf.tanh(net)
# This normalization from [-1, 1] to [0, 1] is introduced for consistency
# with other models.
out = tf.div(out + 1.0, 2.0)
return out
class Discriminator(abstract_arch.AbstractDiscriminator):
"""SNDCGAN discriminator.
Details are available at https://openreview.net/pdf?id=B1QRgziT-.
"""
def apply(self, x, y, is_training):
"""Apply the discriminator on a input.
Args:
x: `Tensor` of shape [batch_size, ?, ?, ?] with real or fake images.
y: `Tensor` of shape [batch_size, num_classes] with one hot encoded
labels.
is_training: Boolean, whether the architecture should be constructed for
training or inference.
Returns:
Tuple of 3 Tensors, the final prediction of the discriminator, the logits
before the final output activation function and logits form the second
last layer.
"""
del is_training, y
use_sn = self._spectral_norm
# In compare gan framework, the image preprocess normalize image pixel to
# range [0, 1], while author used [-1, 1]. Apply this trick to input image
# instead of changing our preprocessing function.
x = x * 2.0 - 1.0
net = conv2d(x, 64, 3, 3, 1, 1, name="d_conv1", use_sn=use_sn)
net = lrelu(net, leak=0.1)
net = conv2d(net, 128, 4, 4, 2, 2, name="d_conv2", use_sn=use_sn)
net = lrelu(net, leak=0.1)
net = conv2d(net, 128, 3, 3, 1, 1, name="d_conv3", use_sn=use_sn)
net = lrelu(net, leak=0.1)
net = conv2d(net, 256, 4, 4, 2, 2, name="d_conv4", use_sn=use_sn)
net = lrelu(net, leak=0.1)
net = conv2d(net, 256, 3, 3, 1, 1, name="d_conv5", use_sn=use_sn)
net = lrelu(net, leak=0.1)
net = conv2d(net, 512, 4, 4, 2, 2, name="d_conv6", use_sn=use_sn)
net = lrelu(net, leak=0.1)
net = conv2d(net, 512, 3, 3, 1, 1, name="d_conv7", use_sn=use_sn)
net = lrelu(net, leak=0.1)
batch_size = x.shape.as_list()[0]
net = tf.reshape(net, [batch_size, -1])
out_logit = linear(net, 1, scope="d_fc1", use_sn=use_sn)
out = tf.nn.sigmoid(out_logit)
return out, out_logit, net
|
from pysmartthings import Attribute, Capability
from pysmartthings.device import Status
from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN
from homeassistant.components.smartthings.const import DOMAIN, SIGNAL_SMARTTHINGS_UPDATE
from homeassistant.helpers.dispatcher import async_dispatcher_send
from .conftest import setup_platform
async def test_entity_and_device_attributes(hass, device_factory):
"""Test the attributes of the entity are correct."""
# Arrange
device = device_factory("Lock_1", [Capability.lock], {Attribute.lock: "unlocked"})
entity_registry = await hass.helpers.entity_registry.async_get_registry()
device_registry = await hass.helpers.device_registry.async_get_registry()
# Act
await setup_platform(hass, LOCK_DOMAIN, devices=[device])
# Assert
entry = entity_registry.async_get("lock.lock_1")
assert entry
assert entry.unique_id == device.device_id
entry = device_registry.async_get_device({(DOMAIN, device.device_id)}, [])
assert entry
assert entry.name == device.label
assert entry.model == device.device_type_name
assert entry.manufacturer == "Unavailable"
async def test_lock(hass, device_factory):
"""Test the lock locks successfully."""
# Arrange
device = device_factory("Lock_1", [Capability.lock])
device.status.attributes[Attribute.lock] = Status(
"unlocked",
None,
{
"method": "Manual",
"codeId": None,
"codeName": "Code 1",
"lockName": "Front Door",
"usedCode": "Code 2",
},
)
await setup_platform(hass, LOCK_DOMAIN, devices=[device])
# Act
await hass.services.async_call(
LOCK_DOMAIN, "lock", {"entity_id": "lock.lock_1"}, blocking=True
)
# Assert
state = hass.states.get("lock.lock_1")
assert state is not None
assert state.state == "locked"
assert state.attributes["method"] == "Manual"
assert state.attributes["lock_state"] == "locked"
assert state.attributes["code_name"] == "Code 1"
assert state.attributes["used_code"] == "Code 2"
assert state.attributes["lock_name"] == "Front Door"
assert "code_id" not in state.attributes
async def test_unlock(hass, device_factory):
"""Test the lock unlocks successfully."""
# Arrange
device = device_factory("Lock_1", [Capability.lock], {Attribute.lock: "locked"})
await setup_platform(hass, LOCK_DOMAIN, devices=[device])
# Act
await hass.services.async_call(
LOCK_DOMAIN, "unlock", {"entity_id": "lock.lock_1"}, blocking=True
)
# Assert
state = hass.states.get("lock.lock_1")
assert state is not None
assert state.state == "unlocked"
async def test_update_from_signal(hass, device_factory):
"""Test the lock updates when receiving a signal."""
# Arrange
device = device_factory("Lock_1", [Capability.lock], {Attribute.lock: "unlocked"})
await setup_platform(hass, LOCK_DOMAIN, devices=[device])
await device.lock(True)
# Act
async_dispatcher_send(hass, SIGNAL_SMARTTHINGS_UPDATE, [device.device_id])
# Assert
await hass.async_block_till_done()
state = hass.states.get("lock.lock_1")
assert state is not None
assert state.state == "locked"
async def test_unload_config_entry(hass, device_factory):
"""Test the lock is removed when the config entry is unloaded."""
# Arrange
device = device_factory("Lock_1", [Capability.lock], {Attribute.lock: "locked"})
config_entry = await setup_platform(hass, LOCK_DOMAIN, devices=[device])
# Act
await hass.config_entries.async_forward_entry_unload(config_entry, "lock")
# Assert
assert not hass.states.get("lock.lock_1")
|
from enum import Enum
import homeassistant.const as const
CONF_PROFILES = "profiles"
CONF_USE_WEBHOOK = "use_webhook"
DATA_MANAGER = "data_manager"
CONFIG = "config"
DOMAIN = "withings"
LOG_NAMESPACE = "homeassistant.components.withings"
PROFILE = "profile"
PUSH_HANDLER = "push_handler"
CONF_WEBHOOK_URL = "webhook_url"
class Measurement(Enum):
"""Measurement supported by the withings integration."""
BODY_TEMP_C = "body_temperature_c"
BONE_MASS_KG = "bone_mass_kg"
DIASTOLIC_MMHG = "diastolic_blood_pressure_mmhg"
FAT_FREE_MASS_KG = "fat_free_mass_kg"
FAT_MASS_KG = "fat_mass_kg"
FAT_RATIO_PCT = "fat_ratio_pct"
HEART_PULSE_BPM = "heart_pulse_bpm"
HEIGHT_M = "height_m"
HYDRATION = "hydration"
IN_BED = "in_bed"
MUSCLE_MASS_KG = "muscle_mass_kg"
PWV = "pulse_wave_velocity"
SKIN_TEMP_C = "skin_temperature_c"
SLEEP_BREATHING_DISTURBANCES_INTENSITY = "sleep_breathing_disturbances_intensity"
SLEEP_DEEP_DURATION_SECONDS = "sleep_deep_duration_seconds"
SLEEP_HEART_RATE_AVERAGE = "sleep_heart_rate_average_bpm"
SLEEP_HEART_RATE_MAX = "sleep_heart_rate_max_bpm"
SLEEP_HEART_RATE_MIN = "sleep_heart_rate_min_bpm"
SLEEP_LIGHT_DURATION_SECONDS = "sleep_light_duration_seconds"
SLEEP_REM_DURATION_SECONDS = "sleep_rem_duration_seconds"
SLEEP_RESPIRATORY_RATE_AVERAGE = "sleep_respiratory_average_bpm"
SLEEP_RESPIRATORY_RATE_MAX = "sleep_respiratory_max_bpm"
SLEEP_RESPIRATORY_RATE_MIN = "sleep_respiratory_min_bpm"
SLEEP_SCORE = "sleep_score"
SLEEP_SNORING = "sleep_snoring"
SLEEP_SNORING_EPISODE_COUNT = "sleep_snoring_eposode_count"
SLEEP_TOSLEEP_DURATION_SECONDS = "sleep_tosleep_duration_seconds"
SLEEP_TOWAKEUP_DURATION_SECONDS = "sleep_towakeup_duration_seconds"
SLEEP_WAKEUP_COUNT = "sleep_wakeup_count"
SLEEP_WAKEUP_DURATION_SECONDS = "sleep_wakeup_duration_seconds"
SPO2_PCT = "spo2_pct"
SYSTOLIC_MMGH = "systolic_blood_pressure_mmhg"
TEMP_C = "temperature_c"
WEIGHT_KG = "weight_kg"
UOM_BEATS_PER_MINUTE = "bpm"
UOM_BREATHS_PER_MINUTE = f"br/{const.TIME_MINUTES}"
UOM_FREQUENCY = "times"
UOM_MMHG = "mmhg"
UOM_LENGTH_M = const.LENGTH_METERS
UOM_TEMP_C = const.TEMP_CELSIUS
|
from __future__ import division
import unittest
import math
import numpy as np
from chainer import testing
from chainercv.transforms import random_sized_crop
@testing.parameterize(
{'H': 256, 'W': 256},
{'H': 129, 'W': 352},
{'H': 352, 'W': 129},
{'H': 35, 'W': 500},
)
class TestRandomSizedCrop(unittest.TestCase):
def test_random_sized_crop(self):
img = np.random.uniform(size=(3, self.H, self.W))
scale_ratio_interval = (0.08, 1)
aspect_ratio_interval = (3 / 4, 4 / 3)
out, params = random_sized_crop(img, scale_ratio_interval,
aspect_ratio_interval,
return_param=True)
expected = img[:, params['y_slice'], params['x_slice']]
np.testing.assert_equal(out, expected)
_, H_crop, W_crop = out.shape
scale_ratio = params['scale_ratio']
aspect_ratio = params['aspect_ratio']
area = scale_ratio * self.H * self.W
expected_H_crop = int(math.floor(
np.sqrt(area * aspect_ratio)))
expected_W_crop = int(math.floor(
np.sqrt(area / aspect_ratio)))
self.assertEqual(H_crop, expected_H_crop)
self.assertEqual(W_crop, expected_W_crop)
self.assertTrue(
(aspect_ratio_interval[0] <= aspect_ratio) and
(aspect_ratio <= aspect_ratio_interval[1]))
self.assertTrue(
scale_ratio <= scale_ratio_interval[1])
scale_ratio_max = min((scale_ratio_interval[1],
self.H / (self.W * aspect_ratio),
(aspect_ratio * self.W) / self.H))
self.assertTrue(
min((scale_ratio_max, scale_ratio_interval[0])) <= scale_ratio)
testing.run_module(__name__, __file__)
|
import logging
from absl import flags
from perfkitbenchmarker import configs
from perfkitbenchmarker import errors
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.linux_packages import ycsb
from perfkitbenchmarker.providers.gcp import gcp_spanner
from perfkitbenchmarker.providers.gcp import util
BENCHMARK_NAME = 'cloud_spanner_ycsb'
BENCHMARK_DESCRIPTION = 'YCSB'
BENCHMARK_TABLE = 'usertable'
BENCHMARK_ZERO_PADDING = 12
REQUIRED_SCOPES = (
'https://www.googleapis.com/auth/spanner.admin',
'https://www.googleapis.com/auth/spanner.data')
BENCHMARK_CONFIG = f"""
cloud_spanner_ycsb:
description: >
Run YCSB against Google Cloud Spanner.
Configure the number of VMs via --ycsb_client_vms.
vm_groups:
default:
vm_spec: *default_single_core
vm_count: 1
spanner:
service_type: {gcp_spanner.DEFAULT_SPANNER_TYPE}
nodes: 1
description: {BENCHMARK_DESCRIPTION}
flags:
gcloud_scopes: >
{' '.join(REQUIRED_SCOPES)}"""
CLIENT_TAR_URL = {
'go': 'https://storage.googleapis.com/cloud-spanner-client-packages/'
'ycsb-go-20180531_f0afaf5fad3c46ae392ebab6b7553d37d65d07ac.tar.gz',
}
FLAGS = flags.FLAGS
flags.DEFINE_enum('cloud_spanner_ycsb_client_type', 'java', ['java', 'go'],
'The type of the client.')
flags.DEFINE_integer('cloud_spanner_ycsb_batchinserts',
1,
'The Cloud Spanner batch inserts used in the YCSB '
'benchmark.')
flags.DEFINE_integer('cloud_spanner_ycsb_boundedstaleness',
0,
'The Cloud Spanner bounded staleness used in the YCSB '
'benchmark.')
flags.DEFINE_enum('cloud_spanner_ycsb_readmode',
'query', ['query', 'read'],
'The Cloud Spanner read mode used in the YCSB benchmark.')
flags.DEFINE_list('cloud_spanner_ycsb_custom_vm_install_commands', [],
'A list of strings. If specified, execute them on every '
'VM during the installation phase.')
def GetConfig(user_config):
config = configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
if FLAGS['ycsb_client_vms'].present:
config['vm_groups']['default']['vm_count'] = FLAGS.ycsb_client_vms
config['spanner']['ddl'] = _BuildSchema()
return config
def CheckPrerequisites(benchmark_config):
for scope in REQUIRED_SCOPES:
if scope not in FLAGS.gcloud_scopes:
raise ValueError('Scope {0} required.'.format(scope))
def Prepare(benchmark_spec):
"""Prepare the virtual machines to run cloud spanner benchmarks.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
benchmark_spec.always_call_cleanup = True
if FLAGS.cloud_spanner_ycsb_client_type != 'java':
ycsb.SetYcsbTarUrl(CLIENT_TAR_URL[FLAGS.cloud_spanner_ycsb_client_type])
vms = benchmark_spec.vms
# Install required packages and copy credential files
vm_util.RunThreaded(_Install, vms)
benchmark_spec.executor = ycsb.YCSBExecutor('cloudspanner')
def Run(benchmark_spec):
"""Spawn YCSB and gather the results.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
Returns:
A list of sample.Sample instances.
"""
vms = benchmark_spec.vms
run_kwargs = {
'table': BENCHMARK_TABLE,
'zeropadding': BENCHMARK_ZERO_PADDING,
'cloudspanner.instance': benchmark_spec.spanner.name,
'cloudspanner.database': benchmark_spec.spanner.database,
'cloudspanner.readmode': FLAGS.cloud_spanner_ycsb_readmode,
'cloudspanner.boundedstaleness':
FLAGS.cloud_spanner_ycsb_boundedstaleness,
'cloudspanner.batchinserts': FLAGS.cloud_spanner_ycsb_batchinserts,
}
if FLAGS.cloud_spanner_ycsb_client_type == 'go':
run_kwargs['cloudspanner.project'] = util.GetDefaultProject()
load_kwargs = run_kwargs.copy()
samples = list(benchmark_spec.executor.LoadAndRun(
vms, load_kwargs=load_kwargs, run_kwargs=run_kwargs))
metadata = {'ycsb_client_type': FLAGS.cloud_spanner_ycsb_client_type}
for sample in samples:
# YCSB writes error samples, there is no direct output to parse.
if 'Return=ERROR' in sample.metric:
raise errors.Benchmarks.RunError(
'Error running YCSB, please check the output log.')
sample.metadata.update(metadata)
return samples
def Cleanup(benchmark_spec):
"""Cleanup.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
del benchmark_spec
def _BuildSchema():
"""BuildSchema.
Returns:
A string of DDL for creating a Spanner table.
"""
fields = ',\n'.join(
[f'field{i} STRING(MAX)' for i in range(FLAGS.ycsb_field_count)]
)
return f"""
CREATE TABLE {BENCHMARK_TABLE} (
id STRING(MAX),
{fields}
) PRIMARY KEY(id)
"""
def _Install(vm):
if FLAGS.cloud_spanner_ycsb_client_type == 'go':
logging.info('Installing go packages.')
vm.Install('go_lang')
vm.Install('google_cloud_go')
vm.Install('ycsb')
# Run custom VM installation commands.
for command in FLAGS.cloud_spanner_ycsb_custom_vm_install_commands:
_, _ = vm.RemoteCommand(command)
|
import argparse
import logging
import sys
from paasta_tools.autoscaling import load_boost
from paasta_tools.utils import load_system_paasta_config
log = logging.getLogger(__name__)
def parse_args():
"""Parses the command line arguments passed to this script"""
parser = argparse.ArgumentParser()
parser.add_argument(
"-p",
"--pool",
type=str,
default="default",
help="Name of the pool you want to increase the capacity. Default is 'default' pool.",
)
parser.add_argument(
"-b",
"--boost",
type=float,
default=load_boost.DEFAULT_BOOST_FACTOR,
help="Boost factor to apply. Default is 1.5. A big failover should be 2, 3 is the max.",
)
parser.add_argument(
"-d",
"--duration",
type=int,
default=load_boost.DEFAULT_BOOST_DURATION,
help="Duration of the capacity boost in minutes. Default is 40min.",
)
parser.add_argument(
"-f",
"--force",
action="store_true",
dest="override",
help="Replace an existing boost. Default is false",
)
parser.add_argument(
"action",
choices=["set", "status", "clear"],
help="You can view the status, set or clear a boost.",
)
parser.add_argument(
"-v",
"--verbose",
action="count",
dest="verbose",
default=0,
help="Print out more output.",
)
return parser.parse_args()
def paasta_cluster_boost(
action: str, pool: str, boost: float, duration: int, override: bool
) -> bool:
""" Set, Get or clear a boost on a paasta cluster for a given pool in a given region
:returns: None
"""
system_config = load_system_paasta_config()
if not system_config.get_cluster_boost_enabled():
print("ERROR: cluster_boost feature is not enabled.")
return False
regions = system_config.get_boost_regions()
if len(regions) == 0:
print(f"ERROR: no boost_regions configured in {system_config.directory}")
return False
for region in regions:
zk_boost_path = load_boost.get_zk_cluster_boost_path(region=region, pool=pool)
if action == "set":
if not load_boost.set_boost_factor(
zk_boost_path=zk_boost_path,
region=region,
pool=pool,
factor=boost,
duration_minutes=duration,
override=override,
):
print(
f"ERROR: Failed to set the boost for pool {pool}, region {region}."
)
return False
elif action == "status":
pass
elif action == "clear":
if not load_boost.clear_boost(zk_boost_path, region=region, pool=pool):
print("ERROR: Failed to clear the boost for pool {}, region {}.")
return False
else:
raise NotImplementedError("Action: '%s' is not implemented." % action)
return False
print(
"Current boost value for path: {}: {}".format(
zk_boost_path, load_boost.get_boost_factor(zk_boost_path=zk_boost_path)
)
)
return True
def main() -> bool:
args = parse_args()
if args.verbose >= 2:
logging.basicConfig(level=logging.DEBUG)
elif args.verbose == 1:
logging.basicConfig(level=logging.INFO)
else:
logging.basicConfig(level=logging.WARNING)
if paasta_cluster_boost(
action=args.action,
pool=args.pool,
boost=args.boost,
duration=args.duration,
override=args.override,
):
sys.exit(0)
sys.exit(1)
if __name__ == "__main__":
main()
|
import copy
import json
from homeassistant.components import websocket_api
from homeassistant.components.tasmota.const import DEFAULT_PREFIX
from .test_common import DEFAULT_CONFIG
from tests.async_mock import call
from tests.common import MockConfigEntry, async_fire_mqtt_message
async def test_device_remove(
hass, mqtt_mock, caplog, device_reg, entity_reg, setup_tasmota
):
"""Test removing a discovered device through device registry."""
config = copy.deepcopy(DEFAULT_CONFIG)
mac = config["mac"]
async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config))
await hass.async_block_till_done()
# Verify device entry is created
device_entry = device_reg.async_get_device(set(), {("mac", mac)})
assert device_entry is not None
device_reg.async_remove_device(device_entry.id)
await hass.async_block_till_done()
# Verify device entry is removed
device_entry = device_reg.async_get_device(set(), {("mac", mac)})
assert device_entry is None
# Verify retained discovery topic has been cleared
mqtt_mock.async_publish.assert_has_calls(
[
call(f"tasmota/discovery/{mac}/config", "", 0, True),
call(f"tasmota/discovery/{mac}/sensors", "", 0, True),
],
any_order=True,
)
async def test_device_remove_non_tasmota_device(
hass, device_reg, hass_ws_client, mqtt_mock, setup_tasmota
):
"""Test removing a non Tasmota device through device registry."""
config_entry = MockConfigEntry(domain="test")
config_entry.add_to_hass(hass)
mac = "12:34:56:AB:CD:EF"
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={("mac", mac)},
)
assert device_entry is not None
device_reg.async_remove_device(device_entry.id)
await hass.async_block_till_done()
# Verify device entry is removed
device_entry = device_reg.async_get_device(set(), {("mac", mac)})
assert device_entry is None
# Verify no Tasmota discovery message was sent
mqtt_mock.async_publish.assert_not_called()
async def test_device_remove_stale_tasmota_device(
hass, device_reg, hass_ws_client, mqtt_mock, setup_tasmota
):
"""Test removing a stale (undiscovered) Tasmota device through device registry."""
config_entry = hass.config_entries.async_entries("tasmota")[0]
mac = "12:34:56:AB:CD:EF"
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={("mac", mac)},
)
assert device_entry is not None
device_reg.async_remove_device(device_entry.id)
await hass.async_block_till_done()
# Verify device entry is removed
device_entry = device_reg.async_get_device(set(), {("mac", mac)})
assert device_entry is None
# Verify retained discovery topic has been cleared
mac = mac.replace(":", "")
mqtt_mock.async_publish.assert_has_calls(
[
call(f"tasmota/discovery/{mac}/config", "", 0, True),
call(f"tasmota/discovery/{mac}/sensors", "", 0, True),
],
any_order=True,
)
async def test_tasmota_ws_remove_discovered_device(
hass, device_reg, entity_reg, hass_ws_client, mqtt_mock, setup_tasmota
):
"""Test Tasmota websocket device removal."""
config = copy.deepcopy(DEFAULT_CONFIG)
mac = config["mac"]
async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config))
await hass.async_block_till_done()
# Verify device entry is created
device_entry = device_reg.async_get_device(set(), {("mac", mac)})
assert device_entry is not None
client = await hass_ws_client(hass)
await client.send_json(
{"id": 5, "type": "tasmota/device/remove", "device_id": device_entry.id}
)
response = await client.receive_json()
assert response["success"]
# Verify device entry is cleared
device_entry = device_reg.async_get_device(set(), {("mac", mac)})
assert device_entry is None
async def test_tasmota_ws_remove_discovered_device_twice(
hass, device_reg, hass_ws_client, mqtt_mock, setup_tasmota
):
"""Test Tasmota websocket device removal."""
config = copy.deepcopy(DEFAULT_CONFIG)
mac = config["mac"]
async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config))
await hass.async_block_till_done()
# Verify device entry is created
device_entry = device_reg.async_get_device(set(), {("mac", mac)})
assert device_entry is not None
client = await hass_ws_client(hass)
await client.send_json(
{"id": 5, "type": "tasmota/device/remove", "device_id": device_entry.id}
)
response = await client.receive_json()
assert response["success"]
await client.send_json(
{"id": 6, "type": "tasmota/device/remove", "device_id": device_entry.id}
)
response = await client.receive_json()
assert not response["success"]
assert response["error"]["code"] == websocket_api.const.ERR_NOT_FOUND
assert response["error"]["message"] == "Device not found"
async def test_tasmota_ws_remove_non_tasmota_device(
hass, device_reg, hass_ws_client, mqtt_mock, setup_tasmota
):
"""Test Tasmota websocket device removal of device belonging to other domain."""
config_entry = MockConfigEntry(domain="test")
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={("mac", "12:34:56:AB:CD:EF")},
)
assert device_entry is not None
client = await hass_ws_client(hass)
await client.send_json(
{"id": 5, "type": "tasmota/device/remove", "device_id": device_entry.id}
)
response = await client.receive_json()
assert not response["success"]
assert response["error"]["code"] == websocket_api.const.ERR_NOT_FOUND
|
from cerberus import errors
from cerberus.tests import assert_fail, assert_success
def test_nested_readonly_with_defaults():
schema = {
'some_field': {
'type': 'dict',
'schema': {
'created': {'type': 'string', 'readonly': True, 'default': 'today'},
'modified': {
'type': 'string',
'readonly': True,
'default_setter': lambda d: d['created'],
},
},
}
}
assert_success(document={'some_field': {}}, schema=schema)
expected_errors = [
(
('some_field', 'created'),
('some_field', 'schema', 'created', 'readonly'),
errors.READONLY_FIELD,
schema['some_field']['schema']['created']['readonly'],
),
(
('some_field', 'modified'),
('some_field', 'schema', 'modified', 'readonly'),
errors.READONLY_FIELD,
schema['some_field']['schema']['modified']['readonly'],
),
]
assert_fail(
document={'some_field': {'created': 'tomorrow', 'modified': 'now'}},
schema=schema,
errors=expected_errors,
)
assert_fail(
document={'some_field': {'created': 'today', 'modified': 'today'}},
schema=schema,
errors=expected_errors,
)
def test_readonly():
field = 'a_readonly_string'
assert_fail(
{field: 'update me if you can'},
error=(field, (field, 'readonly'), errors.READONLY_FIELD, True),
)
def test_readonly_skips_further_validation(validator):
# test that readonly rule is checked before any other rule, and blocks.
# https://github.com/pyeve/cerberus/issues/63
field = "a_readonly_number"
assert_fail(
schema={field: {'type': 'integer', 'readonly': True, 'max': 1}},
document={field: 2},
errors=[(field, (field, "readonly"), errors.READONLY_FIELD, True)],
)
def test_readonly_with_defaults():
schema = {
'created': {'type': 'string', 'readonly': True, 'default': 'today'},
'modified': {
'type': 'string',
'readonly': True,
'default_setter': lambda d: d['created'],
},
}
assert_success(document={}, schema=schema)
expected_errors = [
(
'created',
('created', 'readonly'),
errors.READONLY_FIELD,
schema['created']['readonly'],
),
(
'modified',
('modified', 'readonly'),
errors.READONLY_FIELD,
schema['modified']['readonly'],
),
]
assert_fail(
document={'created': 'tomorrow', 'modified': 'today'},
schema=schema,
errors=expected_errors,
)
assert_fail(
document={'created': 'today', 'modified': 'today'},
schema=schema,
errors=expected_errors,
)
def test_repeated_readonly(validator):
# https://github.com/pyeve/cerberus/issues/311
validator.schema = {'id': {'readonly': True}}
assert_fail({'id': 0}, validator=validator)
assert_fail({'id': 0}, validator=validator)
|
from filterpy.common import Saver
from filterpy.kalman import CubatureKalmanFilter as CKF
from filterpy.kalman import UnscentedKalmanFilter as UKF
from filterpy.kalman import MerweScaledSigmaPoints
import numpy as np
from numpy.random import randn
from pytest import approx
from scipy.spatial.distance import mahalanobis as scipy_mahalanobis
def test_1d():
def fx(x, dt):
F = np.array([[1., dt],
[0, 1]])
return np.dot(F, x)
def hx(x):
return x[0:1]
ckf = CKF(dim_x=2, dim_z=1, dt=0.1, hx=hx, fx=fx)
ckf.x = np.array([[1.], [2.]])
ckf.P = np.array([[1, 1.1],
[1.1, 3]])
ckf.R = np.eye(1) * .05
ckf.Q = np.array([[0., 0], [0., .001]])
dt = 0.1
points = MerweScaledSigmaPoints(2, .1, 2., -1)
kf = UKF(dim_x=2, dim_z=1, dt=dt, fx=fx, hx=hx, points=points)
kf.x = np.array([1, 2])
kf.P = np.array([[1, 1.1],
[1.1, 3]])
kf.R *= 0.05
kf.Q = np.array([[0., 0], [0., .001]])
s = Saver(kf)
for i in range(50):
z = np.array([[i+randn()*0.1]])
ckf.predict()
ckf.update(z)
kf.predict()
kf.update(z[0])
assert abs(ckf.x[0] - kf.x[0]) < 1e-10
assert abs(ckf.x[1] - kf.x[1]) < 1e-10
s.save()
# test mahalanobis
a = np.zeros(kf.y.shape)
maha = scipy_mahalanobis(a, kf.y, kf.SI)
assert kf.mahalanobis == approx(maha)
s.to_array()
if __name__ == "__main__":
test_1d()
|
import asyncio
import re
from typing import Any, Iterable
from xbox.webapi.api.client import XboxLiveClient
from xbox.webapi.api.provider.smartglass.models import (
InputKeyType,
PowerState,
SmartglassConsole,
SmartglassConsoleList,
)
from homeassistant.components.remote import (
ATTR_DELAY_SECS,
ATTR_NUM_REPEATS,
DEFAULT_DELAY_SECS,
RemoteEntity,
)
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from . import ConsoleData, XboxUpdateCoordinator
from .const import DOMAIN
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up Xbox media_player from a config entry."""
client: XboxLiveClient = hass.data[DOMAIN][entry.entry_id]["client"]
consoles: SmartglassConsoleList = hass.data[DOMAIN][entry.entry_id]["consoles"]
coordinator: XboxUpdateCoordinator = hass.data[DOMAIN][entry.entry_id][
"coordinator"
]
async_add_entities(
[XboxRemote(client, console, coordinator) for console in consoles.result]
)
class XboxRemote(CoordinatorEntity, RemoteEntity):
"""Representation of an Xbox remote."""
def __init__(
self,
client: XboxLiveClient,
console: SmartglassConsole,
coordinator: XboxUpdateCoordinator,
) -> None:
"""Initialize the Xbox Media Player."""
super().__init__(coordinator)
self.client: XboxLiveClient = client
self._console: SmartglassConsole = console
@property
def name(self):
"""Return the device name."""
return f"{self._console.name} Remote"
@property
def unique_id(self):
"""Console device ID."""
return self._console.id
@property
def data(self) -> ConsoleData:
"""Return coordinator data for this console."""
return self.coordinator.data.consoles[self._console.id]
@property
def is_on(self):
"""Return True if device is on."""
return self.data.status.power_state == PowerState.On
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn the Xbox on."""
await self.client.smartglass.wake_up(self._console.id)
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn the Xbox off."""
await self.client.smartglass.turn_off(self._console.id)
async def async_send_command(self, command: Iterable[str], **kwargs: Any) -> None:
"""Send controller or text input to the Xbox."""
num_repeats = kwargs[ATTR_NUM_REPEATS]
delay = kwargs.get(ATTR_DELAY_SECS, DEFAULT_DELAY_SECS)
for _ in range(num_repeats):
for single_command in command:
try:
button = InputKeyType(single_command)
await self.client.smartglass.press_button(self._console.id, button)
except ValueError:
await self.client.smartglass.insert_text(
self._console.id, single_command
)
await asyncio.sleep(delay)
@property
def device_info(self):
"""Return a device description for device registry."""
# Turns "XboxOneX" into "Xbox One X" for display
matches = re.finditer(
".+?(?:(?<=[a-z])(?=[A-Z])|(?<=[A-Z])(?=[A-Z][a-z])|$)",
self._console.console_type,
)
model = " ".join([m.group(0) for m in matches])
return {
"identifiers": {(DOMAIN, self._console.id)},
"name": self._console.name,
"manufacturer": "Microsoft",
"model": model,
}
|
from chainercv.chainer_experimental.datasets.sliceable import SliceableDataset
class ConcatenatedDataset(SliceableDataset):
"""A sliceable version of :class:`chainer.datasets.ConcatenatedDataset`.
Here is an example.
>>> dataset_a = TupleDataset([0, 1, 2], [0, 1, 4])
>>> dataset_b = TupleDataset([3, 4, 5], [9, 16, 25])
>>>
>>> dataset = ConcatenatedDataset(dataset_a, dataset_b)
>>> dataset.slice[:, 0][:] # [0, 1, 2, 3, 4, 5]
Args:
datasets: The underlying datasets.
Each dataset should inherit
:class:`~chainercv.chainer_experimental.datasets.sliceable.Sliceabledataset`
and should have the same keys.
"""
def __init__(self, *datasets):
if len(datasets) == 0:
raise ValueError('At least one dataset is required')
self._datasets = datasets
self._keys = datasets[0].keys
for dataset in datasets[1:]:
if not dataset.keys == self._keys:
raise ValueError('All datasets should have the same keys')
def __len__(self):
return sum(len(dataset) for dataset in self._datasets)
@property
def keys(self):
return self._keys
def get_example_by_keys(self, index, key_indices):
if index < 0:
raise IndexError
for dataset in self._datasets:
if index < len(dataset):
return dataset.get_example_by_keys(index, key_indices)
index -= len(dataset)
raise IndexError
|
import datetime
import pytest
import voluptuous as vol
from homeassistant.components.input_datetime import (
ATTR_DATE,
ATTR_DATETIME,
ATTR_EDITABLE,
ATTR_TIME,
ATTR_TIMESTAMP,
CONF_HAS_DATE,
CONF_HAS_TIME,
CONF_ID,
CONF_INITIAL,
CONF_NAME,
DEFAULT_TIME,
DOMAIN,
SERVICE_RELOAD,
SERVICE_SET_DATETIME,
)
from homeassistant.const import ATTR_ENTITY_ID, ATTR_FRIENDLY_NAME, ATTR_NAME
from homeassistant.core import Context, CoreState, State
from homeassistant.exceptions import Unauthorized
from homeassistant.helpers import entity_registry
from homeassistant.setup import async_setup_component
from homeassistant.util import dt as dt_util
from tests.async_mock import patch
from tests.common import mock_restore_cache
INITIAL_DATE = "2020-01-10"
INITIAL_TIME = "23:45:56"
INITIAL_DATETIME = f"{INITIAL_DATE} {INITIAL_TIME}"
@pytest.fixture
def storage_setup(hass, hass_storage):
"""Storage setup."""
async def _storage(items=None, config=None):
if items is None:
hass_storage[DOMAIN] = {
"key": DOMAIN,
"version": 1,
"data": {
"items": [
{
CONF_ID: "from_storage",
CONF_NAME: "datetime from storage",
CONF_INITIAL: INITIAL_DATETIME,
CONF_HAS_DATE: True,
CONF_HAS_TIME: True,
}
]
},
}
else:
hass_storage[DOMAIN] = {
"key": DOMAIN,
"version": 1,
"data": {"items": items},
}
if config is None:
config = {DOMAIN: {}}
return await async_setup_component(hass, DOMAIN, config)
return _storage
async def async_set_date_and_time(hass, entity_id, dt_value):
"""Set date and / or time of input_datetime."""
await hass.services.async_call(
DOMAIN,
SERVICE_SET_DATETIME,
{
ATTR_ENTITY_ID: entity_id,
ATTR_DATE: dt_value.date(),
ATTR_TIME: dt_value.time(),
},
blocking=True,
)
async def async_set_datetime(hass, entity_id, dt_value):
"""Set date and / or time of input_datetime."""
await hass.services.async_call(
DOMAIN,
SERVICE_SET_DATETIME,
{ATTR_ENTITY_ID: entity_id, ATTR_DATETIME: dt_value},
blocking=True,
)
async def async_set_timestamp(hass, entity_id, timestamp):
"""Set date and / or time of input_datetime."""
await hass.services.async_call(
DOMAIN,
SERVICE_SET_DATETIME,
{ATTR_ENTITY_ID: entity_id, ATTR_TIMESTAMP: timestamp},
blocking=True,
)
async def test_invalid_configs(hass):
"""Test config."""
invalid_configs = [
None,
{},
{"name with space": None},
{"test_no_value": {"has_time": False, "has_date": False}},
]
for cfg in invalid_configs:
assert not await async_setup_component(hass, DOMAIN, {DOMAIN: cfg})
async def test_set_datetime(hass):
"""Test set_datetime method using date & time."""
await async_setup_component(
hass, DOMAIN, {DOMAIN: {"test_datetime": {"has_time": True, "has_date": True}}}
)
entity_id = "input_datetime.test_datetime"
dt_obj = datetime.datetime(2017, 9, 7, 19, 46, 30)
await async_set_date_and_time(hass, entity_id, dt_obj)
state = hass.states.get(entity_id)
assert state.state == str(dt_obj)
assert state.attributes["has_time"]
assert state.attributes["has_date"]
assert state.attributes["year"] == 2017
assert state.attributes["month"] == 9
assert state.attributes["day"] == 7
assert state.attributes["hour"] == 19
assert state.attributes["minute"] == 46
assert state.attributes["second"] == 30
assert state.attributes["timestamp"] == dt_obj.timestamp()
async def test_set_datetime_2(hass):
"""Test set_datetime method using datetime."""
await async_setup_component(
hass, DOMAIN, {DOMAIN: {"test_datetime": {"has_time": True, "has_date": True}}}
)
entity_id = "input_datetime.test_datetime"
dt_obj = datetime.datetime(2017, 9, 7, 19, 46, 30)
await async_set_datetime(hass, entity_id, dt_obj)
state = hass.states.get(entity_id)
assert state.state == str(dt_obj)
assert state.attributes["has_time"]
assert state.attributes["has_date"]
assert state.attributes["year"] == 2017
assert state.attributes["month"] == 9
assert state.attributes["day"] == 7
assert state.attributes["hour"] == 19
assert state.attributes["minute"] == 46
assert state.attributes["second"] == 30
assert state.attributes["timestamp"] == dt_obj.timestamp()
async def test_set_datetime_3(hass):
"""Test set_datetime method using timestamp."""
await async_setup_component(
hass, DOMAIN, {DOMAIN: {"test_datetime": {"has_time": True, "has_date": True}}}
)
entity_id = "input_datetime.test_datetime"
dt_obj = datetime.datetime(2017, 9, 7, 19, 46, 30)
await async_set_timestamp(hass, entity_id, dt_util.as_utc(dt_obj).timestamp())
state = hass.states.get(entity_id)
assert state.state == str(dt_obj)
assert state.attributes["has_time"]
assert state.attributes["has_date"]
assert state.attributes["year"] == 2017
assert state.attributes["month"] == 9
assert state.attributes["day"] == 7
assert state.attributes["hour"] == 19
assert state.attributes["minute"] == 46
assert state.attributes["second"] == 30
assert state.attributes["timestamp"] == dt_obj.timestamp()
async def test_set_datetime_time(hass):
"""Test set_datetime method with only time."""
await async_setup_component(
hass, DOMAIN, {DOMAIN: {"test_time": {"has_time": True, "has_date": False}}}
)
entity_id = "input_datetime.test_time"
dt_obj = datetime.datetime(2017, 9, 7, 19, 46, 30)
time_portion = dt_obj.time()
await async_set_date_and_time(hass, entity_id, dt_obj)
state = hass.states.get(entity_id)
assert state.state == str(time_portion)
assert state.attributes["has_time"]
assert not state.attributes["has_date"]
assert state.attributes["timestamp"] == (19 * 3600) + (46 * 60) + 30
async def test_set_invalid(hass):
"""Test set_datetime method with only time."""
initial = "2017-01-01"
await async_setup_component(
hass,
DOMAIN,
{
DOMAIN: {
"test_date": {"has_time": False, "has_date": True, "initial": initial}
}
},
)
entity_id = "input_datetime.test_date"
dt_obj = datetime.datetime(2017, 9, 7, 19, 46)
time_portion = dt_obj.time()
with pytest.raises(vol.Invalid):
await hass.services.async_call(
"input_datetime",
"set_datetime",
{"entity_id": entity_id, "time": time_portion},
blocking=True,
)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == initial
async def test_set_invalid_2(hass):
"""Test set_datetime method with date and datetime."""
initial = "2017-01-01"
await async_setup_component(
hass,
DOMAIN,
{
DOMAIN: {
"test_date": {"has_time": False, "has_date": True, "initial": initial}
}
},
)
entity_id = "input_datetime.test_date"
dt_obj = datetime.datetime(2017, 9, 7, 19, 46)
time_portion = dt_obj.time()
with pytest.raises(vol.Invalid):
await hass.services.async_call(
"input_datetime",
"set_datetime",
{"entity_id": entity_id, "time": time_portion, "datetime": dt_obj},
blocking=True,
)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == initial
async def test_set_datetime_date(hass):
"""Test set_datetime method with only date."""
await async_setup_component(
hass, DOMAIN, {DOMAIN: {"test_date": {"has_time": False, "has_date": True}}}
)
entity_id = "input_datetime.test_date"
dt_obj = datetime.datetime(2017, 9, 7, 19, 46)
date_portion = dt_obj.date()
await async_set_date_and_time(hass, entity_id, dt_obj)
state = hass.states.get(entity_id)
assert state.state == str(date_portion)
assert not state.attributes["has_time"]
assert state.attributes["has_date"]
date_dt_obj = datetime.datetime(2017, 9, 7)
assert state.attributes["timestamp"] == date_dt_obj.timestamp()
async def test_restore_state(hass):
"""Ensure states are restored on startup."""
mock_restore_cache(
hass,
(
State("input_datetime.test_time", "19:46:00"),
State("input_datetime.test_date", "2017-09-07"),
State("input_datetime.test_datetime", "2017-09-07 19:46:00"),
State("input_datetime.test_bogus_data", "this is not a date"),
State("input_datetime.test_was_time", "19:46:00"),
State("input_datetime.test_was_date", "2017-09-07"),
),
)
hass.state = CoreState.starting
initial = datetime.datetime(2017, 1, 1, 23, 42)
default = datetime.datetime(1970, 1, 1, 0, 0)
await async_setup_component(
hass,
DOMAIN,
{
DOMAIN: {
"test_time": {"has_time": True, "has_date": False},
"test_date": {"has_time": False, "has_date": True},
"test_datetime": {"has_time": True, "has_date": True},
"test_bogus_data": {
"has_time": True,
"has_date": True,
"initial": str(initial),
},
"test_was_time": {"has_time": False, "has_date": True},
"test_was_date": {"has_time": True, "has_date": False},
}
},
)
dt_obj = datetime.datetime(2017, 9, 7, 19, 46)
state_time = hass.states.get("input_datetime.test_time")
assert state_time.state == str(dt_obj.time())
state_date = hass.states.get("input_datetime.test_date")
assert state_date.state == str(dt_obj.date())
state_datetime = hass.states.get("input_datetime.test_datetime")
assert state_datetime.state == str(dt_obj)
state_bogus = hass.states.get("input_datetime.test_bogus_data")
assert state_bogus.state == str(initial)
state_was_time = hass.states.get("input_datetime.test_was_time")
assert state_was_time.state == str(default.date())
state_was_date = hass.states.get("input_datetime.test_was_date")
assert state_was_date.state == str(default.time())
async def test_default_value(hass):
"""Test default value if none has been set via initial or restore state."""
await async_setup_component(
hass,
DOMAIN,
{
DOMAIN: {
"test_time": {"has_time": True, "has_date": False},
"test_date": {"has_time": False, "has_date": True},
"test_datetime": {"has_time": True, "has_date": True},
}
},
)
dt_obj = datetime.datetime(1970, 1, 1, 0, 0)
state_time = hass.states.get("input_datetime.test_time")
assert state_time.state == str(dt_obj.time())
assert state_time.attributes.get("timestamp") is not None
state_date = hass.states.get("input_datetime.test_date")
assert state_date.state == str(dt_obj.date())
assert state_date.attributes.get("timestamp") is not None
state_datetime = hass.states.get("input_datetime.test_datetime")
assert state_datetime.state == str(dt_obj)
assert state_datetime.attributes.get("timestamp") is not None
async def test_input_datetime_context(hass, hass_admin_user):
"""Test that input_datetime context works."""
assert await async_setup_component(
hass, "input_datetime", {"input_datetime": {"only_date": {"has_date": True}}}
)
state = hass.states.get("input_datetime.only_date")
assert state is not None
await hass.services.async_call(
"input_datetime",
"set_datetime",
{"entity_id": state.entity_id, "date": "2018-01-02"},
blocking=True,
context=Context(user_id=hass_admin_user.id),
)
state2 = hass.states.get("input_datetime.only_date")
assert state2 is not None
assert state.state != state2.state
assert state2.context.user_id == hass_admin_user.id
async def test_reload(hass, hass_admin_user, hass_read_only_user):
"""Test reload service."""
count_start = len(hass.states.async_entity_ids())
ent_reg = await entity_registry.async_get_registry(hass)
assert await async_setup_component(
hass,
DOMAIN,
{
DOMAIN: {
"dt1": {"has_time": False, "has_date": True, "initial": "2019-1-1"},
"dt3": {CONF_HAS_TIME: True, CONF_HAS_DATE: True},
}
},
)
assert count_start + 2 == len(hass.states.async_entity_ids())
state_1 = hass.states.get("input_datetime.dt1")
state_2 = hass.states.get("input_datetime.dt2")
state_3 = hass.states.get("input_datetime.dt3")
dt_obj = datetime.datetime(2019, 1, 1, 0, 0)
assert state_1 is not None
assert state_2 is None
assert state_3 is not None
assert str(dt_obj.date()) == state_1.state
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, "dt1") == f"{DOMAIN}.dt1"
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, "dt2") is None
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, "dt3") == f"{DOMAIN}.dt3"
with patch(
"homeassistant.config.load_yaml_config_file",
autospec=True,
return_value={
DOMAIN: {
"dt1": {"has_time": True, "has_date": False, "initial": "23:32"},
"dt2": {"has_time": True, "has_date": True},
}
},
):
with pytest.raises(Unauthorized):
await hass.services.async_call(
DOMAIN,
SERVICE_RELOAD,
blocking=True,
context=Context(user_id=hass_read_only_user.id),
)
await hass.services.async_call(
DOMAIN,
SERVICE_RELOAD,
blocking=True,
context=Context(user_id=hass_admin_user.id),
)
await hass.async_block_till_done()
assert count_start + 2 == len(hass.states.async_entity_ids())
state_1 = hass.states.get("input_datetime.dt1")
state_2 = hass.states.get("input_datetime.dt2")
state_3 = hass.states.get("input_datetime.dt3")
assert state_1 is not None
assert state_2 is not None
assert state_3 is None
assert str(DEFAULT_TIME) == state_1.state
assert str(datetime.datetime(1970, 1, 1, 0, 0)) == state_2.state
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, "dt1") == f"{DOMAIN}.dt1"
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, "dt2") == f"{DOMAIN}.dt2"
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, "dt3") is None
async def test_load_from_storage(hass, storage_setup):
"""Test set up from storage."""
assert await storage_setup()
state = hass.states.get(f"{DOMAIN}.datetime_from_storage")
assert state.state == INITIAL_DATETIME
assert state.attributes.get(ATTR_EDITABLE)
async def test_editable_state_attribute(hass, storage_setup):
"""Test editable attribute."""
assert await storage_setup(
config={
DOMAIN: {
"from_yaml": {
CONF_HAS_DATE: True,
CONF_HAS_TIME: True,
CONF_NAME: "yaml datetime",
CONF_INITIAL: "2001-01-02 12:34:56",
}
}
}
)
state = hass.states.get(f"{DOMAIN}.datetime_from_storage")
assert state.state == INITIAL_DATETIME
assert state.attributes.get(ATTR_EDITABLE)
state = hass.states.get(f"{DOMAIN}.from_yaml")
assert state.state == "2001-01-02 12:34:56"
assert not state.attributes[ATTR_EDITABLE]
async def test_ws_list(hass, hass_ws_client, storage_setup):
"""Test listing via WS."""
assert await storage_setup(config={DOMAIN: {"from_yaml": {CONF_HAS_DATE: True}}})
client = await hass_ws_client(hass)
await client.send_json({"id": 6, "type": f"{DOMAIN}/list"})
resp = await client.receive_json()
assert resp["success"]
storage_ent = "from_storage"
yaml_ent = "from_yaml"
result = {item["id"]: item for item in resp["result"]}
assert len(result) == 1
assert storage_ent in result
assert yaml_ent not in result
assert result[storage_ent][ATTR_NAME] == "datetime from storage"
async def test_ws_delete(hass, hass_ws_client, storage_setup):
"""Test WS delete cleans up entity registry."""
assert await storage_setup()
input_id = "from_storage"
input_entity_id = f"{DOMAIN}.datetime_from_storage"
ent_reg = await entity_registry.async_get_registry(hass)
state = hass.states.get(input_entity_id)
assert state is not None
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, input_id) == input_entity_id
client = await hass_ws_client(hass)
await client.send_json(
{"id": 6, "type": f"{DOMAIN}/delete", f"{DOMAIN}_id": f"{input_id}"}
)
resp = await client.receive_json()
assert resp["success"]
state = hass.states.get(input_entity_id)
assert state is None
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, input_id) is None
async def test_update(hass, hass_ws_client, storage_setup):
"""Test updating min/max updates the state."""
assert await storage_setup()
input_id = "from_storage"
input_entity_id = f"{DOMAIN}.datetime_from_storage"
ent_reg = await entity_registry.async_get_registry(hass)
state = hass.states.get(input_entity_id)
assert state.attributes[ATTR_FRIENDLY_NAME] == "datetime from storage"
assert state.state == INITIAL_DATETIME
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, input_id) == input_entity_id
client = await hass_ws_client(hass)
await client.send_json(
{
"id": 6,
"type": f"{DOMAIN}/update",
f"{DOMAIN}_id": f"{input_id}",
ATTR_NAME: "even newer name",
CONF_HAS_DATE: False,
}
)
resp = await client.receive_json()
assert resp["success"]
state = hass.states.get(input_entity_id)
assert state.state == INITIAL_TIME
assert state.attributes[ATTR_FRIENDLY_NAME] == "even newer name"
async def test_ws_create(hass, hass_ws_client, storage_setup):
"""Test create WS."""
assert await storage_setup(items=[])
input_id = "new_datetime"
input_entity_id = f"{DOMAIN}.{input_id}"
ent_reg = await entity_registry.async_get_registry(hass)
state = hass.states.get(input_entity_id)
assert state is None
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, input_id) is None
client = await hass_ws_client(hass)
await client.send_json(
{
"id": 6,
"type": f"{DOMAIN}/create",
CONF_NAME: "New DateTime",
CONF_INITIAL: "1991-01-02 01:02:03",
CONF_HAS_DATE: True,
CONF_HAS_TIME: True,
}
)
resp = await client.receive_json()
assert resp["success"]
state = hass.states.get(input_entity_id)
assert state.state == "1991-01-02 01:02:03"
assert state.attributes[ATTR_FRIENDLY_NAME] == "New DateTime"
assert state.attributes[ATTR_EDITABLE]
async def test_setup_no_config(hass, hass_admin_user):
"""Test component setup with no config."""
count_start = len(hass.states.async_entity_ids())
assert await async_setup_component(hass, DOMAIN, {})
with patch(
"homeassistant.config.load_yaml_config_file", autospec=True, return_value={}
):
await hass.services.async_call(
DOMAIN,
SERVICE_RELOAD,
blocking=True,
context=Context(user_id=hass_admin_user.id),
)
await hass.async_block_till_done()
assert count_start == len(hass.states.async_entity_ids())
|
from pysmartthings import ATTRIBUTES, CAPABILITIES, Attribute, Capability
from homeassistant.components.sensor import DEVICE_CLASSES, DOMAIN as SENSOR_DOMAIN
from homeassistant.components.smartthings import sensor
from homeassistant.components.smartthings.const import DOMAIN, SIGNAL_SMARTTHINGS_UPDATE
from homeassistant.const import (
ATTR_FRIENDLY_NAME,
ATTR_UNIT_OF_MEASUREMENT,
PERCENTAGE,
STATE_UNKNOWN,
)
from homeassistant.helpers.dispatcher import async_dispatcher_send
from .conftest import setup_platform
async def test_mapping_integrity():
"""Test ensures the map dicts have proper integrity."""
for capability, maps in sensor.CAPABILITY_TO_SENSORS.items():
assert capability in CAPABILITIES, capability
for sensor_map in maps:
assert sensor_map.attribute in ATTRIBUTES, sensor_map.attribute
if sensor_map.device_class:
assert (
sensor_map.device_class in DEVICE_CLASSES
), sensor_map.device_class
async def test_entity_state(hass, device_factory):
"""Tests the state attributes properly match the sensor types."""
device = device_factory("Sensor 1", [Capability.battery], {Attribute.battery: 100})
await setup_platform(hass, SENSOR_DOMAIN, devices=[device])
state = hass.states.get("sensor.sensor_1_battery")
assert state.state == "100"
assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE
assert state.attributes[ATTR_FRIENDLY_NAME] == f"{device.label} Battery"
async def test_entity_three_axis_state(hass, device_factory):
"""Tests the state attributes properly match the three axis types."""
device = device_factory(
"Three Axis", [Capability.three_axis], {Attribute.three_axis: [100, 75, 25]}
)
await setup_platform(hass, SENSOR_DOMAIN, devices=[device])
state = hass.states.get("sensor.three_axis_x_coordinate")
assert state.state == "100"
assert state.attributes[ATTR_FRIENDLY_NAME] == f"{device.label} X Coordinate"
state = hass.states.get("sensor.three_axis_y_coordinate")
assert state.state == "75"
assert state.attributes[ATTR_FRIENDLY_NAME] == f"{device.label} Y Coordinate"
state = hass.states.get("sensor.three_axis_z_coordinate")
assert state.state == "25"
assert state.attributes[ATTR_FRIENDLY_NAME] == f"{device.label} Z Coordinate"
async def test_entity_three_axis_invalid_state(hass, device_factory):
"""Tests the state attributes properly match the three axis types."""
device = device_factory(
"Three Axis", [Capability.three_axis], {Attribute.three_axis: []}
)
await setup_platform(hass, SENSOR_DOMAIN, devices=[device])
state = hass.states.get("sensor.three_axis_x_coordinate")
assert state.state == STATE_UNKNOWN
state = hass.states.get("sensor.three_axis_y_coordinate")
assert state.state == STATE_UNKNOWN
state = hass.states.get("sensor.three_axis_z_coordinate")
assert state.state == STATE_UNKNOWN
async def test_entity_and_device_attributes(hass, device_factory):
"""Test the attributes of the entity are correct."""
# Arrange
device = device_factory("Sensor 1", [Capability.battery], {Attribute.battery: 100})
entity_registry = await hass.helpers.entity_registry.async_get_registry()
device_registry = await hass.helpers.device_registry.async_get_registry()
# Act
await setup_platform(hass, SENSOR_DOMAIN, devices=[device])
# Assert
entry = entity_registry.async_get("sensor.sensor_1_battery")
assert entry
assert entry.unique_id == f"{device.device_id}.{Attribute.battery}"
entry = device_registry.async_get_device({(DOMAIN, device.device_id)}, [])
assert entry
assert entry.name == device.label
assert entry.model == device.device_type_name
assert entry.manufacturer == "Unavailable"
async def test_update_from_signal(hass, device_factory):
"""Test the binary_sensor updates when receiving a signal."""
# Arrange
device = device_factory("Sensor 1", [Capability.battery], {Attribute.battery: 100})
await setup_platform(hass, SENSOR_DOMAIN, devices=[device])
device.status.apply_attribute_update(
"main", Capability.battery, Attribute.battery, 75
)
# Act
async_dispatcher_send(hass, SIGNAL_SMARTTHINGS_UPDATE, [device.device_id])
# Assert
await hass.async_block_till_done()
state = hass.states.get("sensor.sensor_1_battery")
assert state is not None
assert state.state == "75"
async def test_unload_config_entry(hass, device_factory):
"""Test the binary_sensor is removed when the config entry is unloaded."""
# Arrange
device = device_factory("Sensor 1", [Capability.battery], {Attribute.battery: 100})
config_entry = await setup_platform(hass, SENSOR_DOMAIN, devices=[device])
# Act
await hass.config_entries.async_forward_entry_unload(config_entry, "sensor")
# Assert
assert not hass.states.get("sensor.sensor_1_battery")
|
from homeassistant.components import remote
from homeassistant.const import CONF_HOST, CONF_NAME
from . import ATTR_ATV, ATTR_POWER, DATA_APPLE_TV
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Apple TV remote platform."""
if not discovery_info:
return
name = discovery_info[CONF_NAME]
host = discovery_info[CONF_HOST]
atv = hass.data[DATA_APPLE_TV][host][ATTR_ATV]
power = hass.data[DATA_APPLE_TV][host][ATTR_POWER]
async_add_entities([AppleTVRemote(atv, power, name)])
class AppleTVRemote(remote.RemoteEntity):
"""Device that sends commands to an Apple TV."""
def __init__(self, atv, power, name):
"""Initialize device."""
self._atv = atv
self._name = name
self._power = power
self._power.listeners.append(self)
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def unique_id(self):
"""Return a unique ID."""
return self._atv.metadata.device_id
@property
def is_on(self):
"""Return true if device is on."""
return self._power.turned_on
@property
def should_poll(self):
"""No polling needed for Apple TV."""
return False
async def async_turn_on(self, **kwargs):
"""Turn the device on.
This method is a coroutine.
"""
self._power.set_power_on(True)
async def async_turn_off(self, **kwargs):
"""Turn the device off.
This method is a coroutine.
"""
self._power.set_power_on(False)
async def async_send_command(self, command, **kwargs):
"""Send a command to one device."""
for single_command in command:
if not hasattr(self._atv.remote_control, single_command):
continue
await getattr(self._atv.remote_control, single_command)()
|
import urllib2
import base64
from xml.dom.minidom import parseString
import diamond.collector
from diamond.collector import str_to_bool
class MonitCollector(diamond.collector.Collector):
def get_default_config_help(self):
config_help = super(MonitCollector, self).get_default_config_help()
config_help.update({
'send_totals': 'Send cpu and memory totals',
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(MonitCollector, self).get_default_config()
config.update({
'host': '127.0.0.1',
'port': 2812,
'user': 'monit',
'passwd': 'monit',
'path': 'monit',
'byte_unit': ['byte'],
'send_totals': False,
})
return config
def collect(self):
url = 'http://%s:%i/_status?format=xml' % (self.config['host'],
int(self.config['port']))
try:
request = urllib2.Request(url)
#
# shouldn't need to check this
base64string = base64.encodestring('%s:%s' % (
self.config['user'], self.config['passwd'])).replace('\n', '')
request.add_header("Authorization", "Basic %s" % base64string)
response = urllib2.urlopen(request)
except urllib2.HTTPError as err:
self.log.error("%s: %s", err, url)
return
metrics = {}
try:
dom = parseString("".join(response.readlines()))
except:
self.log.error("Got an empty response from the monit server")
return
for svc in dom.getElementsByTagName('service'):
if int(svc.getAttribute('type')) == 3:
name = svc.getElementsByTagName('name')[0].firstChild.data
status = svc.getElementsByTagName('status')[0].firstChild.data
monitor = svc.getElementsByTagName(
'monitor')[0].firstChild.data
if status == '0' and monitor == '1':
try:
uptime = svc.getElementsByTagName(
'uptime')[0].firstChild.data
metrics["%s.uptime" % name] = uptime
cpu = svc.getElementsByTagName(
'cpu')[0].getElementsByTagName(
'percent')[0].firstChild.data
metrics["%s.cpu.percent" % name] = cpu
if str_to_bool(self.config['send_totals']):
cpu_total = svc.getElementsByTagName(
'cpu')[0].getElementsByTagName(
'percenttotal')[0].firstChild.data
metrics["%s.cpu.percent_total" % name] = cpu_total
mem = int(svc.getElementsByTagName(
'memory')[0].getElementsByTagName(
'kilobyte')[0].firstChild.data)
for unit in self.config['byte_unit']:
metrics["%s.memory.%s_usage" % (name, unit)] = (
diamond.convertor.binary.convert(
value=mem,
oldUnit='kilobyte',
newUnit=unit))
metrics["%s.uptime" % name] = uptime
if str_to_bool(self.config['send_totals']):
mem_total = int(svc.getElementsByTagName(
'memory')[0].getElementsByTagName(
'kilobytetotal')[0].firstChild.data)
for unit in self.config['byte_unit']:
metrics["%s.memory_total.%s_usage" % (
name, unit)] = (
diamond.convertor.binary.convert(
value=mem_total,
oldUnit='kilobyte',
newUnit=unit))
except:
pass
for key in metrics:
self.publish(key, metrics[key])
|
from django.contrib.auth.decorators import login_required
from django.core.exceptions import ObjectDoesNotExist, PermissionDenied, ValidationError
from django.shortcuts import redirect
from django.utils.decorators import method_decorator
from django.utils.translation import gettext as _
from django.views.generic import DetailView, ListView
from weblate.fonts.forms import FontForm, FontGroupForm, FontOverrideForm
from weblate.fonts.models import Font, FontGroup
from weblate.utils import messages
from weblate.utils.views import ProjectViewMixin
@method_decorator(login_required, name="dispatch")
class FontListView(ProjectViewMixin, ListView):
model = Font
_font_form = None
_group_form = None
def get_queryset(self):
return self.project.font_set.order_by("family", "style")
def get_context_data(self, **kwargs):
result = super().get_context_data(**kwargs)
result["object"] = self.project
result["font_list"] = result["object_list"]
result["group_list"] = self.project.fontgroup_set.order()
result["font_form"] = self._font_form or FontForm()
result["group_form"] = self._group_form or FontGroupForm(
auto_id="id_group_%s", project=self.project
)
result["can_edit"] = self.request.user.has_perm("project.edit", self.project)
return result
def post(self, request, **kwargs):
if not request.user.has_perm("project.edit", self.project):
raise PermissionDenied()
if request.FILES:
form = self._font_form = FontForm(request.POST, request.FILES)
else:
form = self._group_form = FontGroupForm(
request.POST, auto_id="id_group_%s", project=self.project
)
if form.is_valid():
instance = form.save(commit=False)
instance.project = self.project
instance.user = self.request.user
try:
instance.validate_unique()
instance.save()
return redirect(instance)
except ValidationError:
messages.error(request, _("Entry by the same name already exists."))
else:
messages.error(request, _("Creation failed, please fix the errors below."))
return self.get(request, **kwargs)
@method_decorator(login_required, name="dispatch")
class FontDetailView(ProjectViewMixin, DetailView):
model = Font
def get_queryset(self):
return self.project.font_set.all()
def get_context_data(self, **kwargs):
result = super().get_context_data(**kwargs)
result["can_edit"] = self.request.user.has_perm("project.edit", self.project)
return result
def post(self, request, **kwargs):
self.object = self.get_object()
if not request.user.has_perm("project.edit", self.project):
raise PermissionDenied()
self.object.delete()
messages.error(request, _("Font deleted."))
return redirect("fonts", project=self.project.slug)
@method_decorator(login_required, name="dispatch")
class FontGroupDetailView(ProjectViewMixin, DetailView):
model = FontGroup
_form = None
_override_form = None
def get_queryset(self):
return self.project.fontgroup_set.all()
def get_context_data(self, **kwargs):
result = super().get_context_data(**kwargs)
result["form"] = self._form or FontGroupForm(
instance=self.object, project=self.project
)
result["override_form"] = self._override_form or FontOverrideForm()
result["can_edit"] = self.request.user.has_perm("project.edit", self.project)
return result
def post(self, request, **kwargs):
self.object = self.get_object()
if not request.user.has_perm("project.edit", self.project):
raise PermissionDenied()
if "name" in request.POST:
form = self._form = FontGroupForm(
request.POST, instance=self.object, project=self.project
)
if form.is_valid():
instance = form.save(commit=False)
try:
instance.validate_unique()
instance.save()
return redirect(self.object)
except ValidationError:
messages.error(request, _("Entry by the same name already exists."))
return self.get(request, **kwargs)
if "language" in request.POST:
form = self._form = FontOverrideForm(request.POST)
if form.is_valid():
instance = form.save(commit=False)
instance.group = self.object
try:
instance.validate_unique()
instance.save()
return redirect(self.object)
except ValidationError:
messages.error(request, _("Entry by the same name already exists."))
return self.get(request, **kwargs)
if "override" in request.POST:
try:
self.object.fontoverride_set.filter(
pk=int(request.POST["override"])
).delete()
return redirect(self.object)
except (ValueError, ObjectDoesNotExist):
messages.error(request, _("No override found."))
self.object.delete()
messages.error(request, _("Font group deleted."))
return redirect("fonts", project=self.project.slug)
|
import asyncio
import logging
import socket
from aiohttp import web
from homeassistant import core
from homeassistant.components.http import HomeAssistantView
from .const import HUE_SERIAL_NUMBER, HUE_UUID
_LOGGER = logging.getLogger(__name__)
BROADCAST_PORT = 1900
BROADCAST_ADDR = "239.255.255.250"
class DescriptionXmlView(HomeAssistantView):
"""Handles requests for the description.xml file."""
url = "/description.xml"
name = "description:xml"
requires_auth = False
def __init__(self, config):
"""Initialize the instance of the view."""
self.config = config
@core.callback
def get(self, request):
"""Handle a GET request."""
resp_text = f"""<?xml version="1.0" encoding="UTF-8" ?>
<root xmlns="urn:schemas-upnp-org:device-1-0">
<specVersion>
<major>1</major>
<minor>0</minor>
</specVersion>
<URLBase>http://{self.config.advertise_ip}:{self.config.advertise_port}/</URLBase>
<device>
<deviceType>urn:schemas-upnp-org:device:Basic:1</deviceType>
<friendlyName>Home Assistant Bridge ({self.config.advertise_ip})</friendlyName>
<manufacturer>Royal Philips Electronics</manufacturer>
<manufacturerURL>http://www.philips.com</manufacturerURL>
<modelDescription>Philips hue Personal Wireless Lighting</modelDescription>
<modelName>Philips hue bridge 2015</modelName>
<modelNumber>BSB002</modelNumber>
<modelURL>http://www.meethue.com</modelURL>
<serialNumber>{HUE_SERIAL_NUMBER}</serialNumber>
<UDN>uuid:{HUE_UUID}</UDN>
</device>
</root>
"""
return web.Response(text=resp_text, content_type="text/xml")
@core.callback
def create_upnp_datagram_endpoint(
host_ip_addr,
upnp_bind_multicast,
advertise_ip,
advertise_port,
):
"""Create the UPNP socket and protocol."""
# Listen for UDP port 1900 packets sent to SSDP multicast address
ssdp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
ssdp_socket.setblocking(False)
# Required for receiving multicast
ssdp_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
ssdp_socket.setsockopt(
socket.SOL_IP, socket.IP_MULTICAST_IF, socket.inet_aton(host_ip_addr)
)
ssdp_socket.setsockopt(
socket.SOL_IP,
socket.IP_ADD_MEMBERSHIP,
socket.inet_aton(BROADCAST_ADDR) + socket.inet_aton(host_ip_addr),
)
ssdp_socket.bind(("" if upnp_bind_multicast else host_ip_addr, BROADCAST_PORT))
loop = asyncio.get_event_loop()
return loop.create_datagram_endpoint(
lambda: UPNPResponderProtocol(loop, ssdp_socket, advertise_ip, advertise_port),
sock=ssdp_socket,
)
class UPNPResponderProtocol:
"""Handle responding to UPNP/SSDP discovery requests."""
def __init__(self, loop, ssdp_socket, advertise_ip, advertise_port):
"""Initialize the class."""
self.transport = None
self._loop = loop
self._sock = ssdp_socket
self.advertise_ip = advertise_ip
self.advertise_port = advertise_port
self._upnp_root_response = self._prepare_response(
"upnp:rootdevice", f"uuid:{HUE_UUID}::upnp:rootdevice"
)
self._upnp_device_response = self._prepare_response(
"urn:schemas-upnp-org:device:basic:1", f"uuid:{HUE_UUID}"
)
def connection_made(self, transport):
"""Set the transport."""
self.transport = transport
def connection_lost(self, exc):
"""Handle connection lost."""
def datagram_received(self, data, addr):
"""Respond to msearch packets."""
decoded_data = data.decode("utf-8", errors="ignore")
if "M-SEARCH" not in decoded_data:
return
_LOGGER.debug("UPNP Responder M-SEARCH method received: %s", data)
# SSDP M-SEARCH method received, respond to it with our info
response = self._handle_request(decoded_data)
_LOGGER.debug("UPNP Responder responding with: %s", response)
self.transport.sendto(response, addr)
def error_received(self, exc): # pylint: disable=no-self-use
"""Log UPNP errors."""
_LOGGER.error("UPNP Error received: %s", exc)
def close(self):
"""Stop the server."""
_LOGGER.info("UPNP responder shutting down")
if self.transport:
self.transport.close()
self._loop.remove_writer(self._sock.fileno())
self._loop.remove_reader(self._sock.fileno())
self._sock.close()
def _handle_request(self, decoded_data):
if "upnp:rootdevice" in decoded_data:
return self._upnp_root_response
return self._upnp_device_response
def _prepare_response(self, search_target, unique_service_name):
# Note that the double newline at the end of
# this string is required per the SSDP spec
response = f"""HTTP/1.1 200 OK
CACHE-CONTROL: max-age=60
EXT:
LOCATION: http://{self.advertise_ip}:{self.advertise_port}/description.xml
SERVER: FreeRTOS/6.0.5, UPnP/1.0, IpBridge/1.16.0
hue-bridgeid: {HUE_SERIAL_NUMBER}
ST: {search_target}
USN: {unique_service_name}
"""
return response.replace("\n", "\r\n").encode("utf-8")
|
import logging
from docker_registry.core import compat
json = compat.json
class NullHandler(logging.Handler):
"""A logging handler that discards all logging records."""
def emit(self, record):
pass
# Clients can add handlers if they are interested.
log = logging.getLogger('qr')
log.addHandler(NullHandler())
class worker(object):
def __init__(self, q, *args, **kwargs):
self.q = q
self.err = kwargs.get('err', None)
self.args = args
self.kwargs = kwargs
def __call__(self, f):
def wrapped():
while True:
# Blocking pop
next = self.q.pop(block=True)
if not next:
continue
try:
# Try to execute the user's callback.
f(next, *self.args, **self.kwargs)
except Exception as e:
try:
# Failing that, let's call the user's
# err-back, which we should keep from
# ever throwing an exception
self.err(e, *self.args, **self.kwargs)
except Exception:
pass
return wrapped
class BaseQueue(object):
"""Base functionality common to queues."""
def __init__(self, r_conn, key, **kwargs):
self.serializer = json
self.redis = r_conn
self.key = key
def __len__(self):
"""Return the length of the queue."""
return self.redis.llen(self.key)
def __getitem__(self, val):
"""Get a slice or a particular index."""
try:
slice = self.redis.lrange(self.key, val.start, val.stop - 1)
return [self._unpack(i) for i in slice]
except AttributeError:
return self._unpack(self.redis.lindex(self.key, val))
except Exception as e:
log.error('Get item failed ** %s' % repr(e))
return None
def _pack(self, val):
"""Prepares a message to go into Redis."""
return self.serializer.dumps(val, 1)
def _unpack(self, val):
"""Unpacks a message stored in Redis."""
try:
return self.serializer.loads(val)
except TypeError:
return None
def dump(self, fobj):
"""Destructively dump the contents of the queue into fp."""
next = self.redis.rpop(self.key)
while next:
fobj.write(next)
next = self.redis.rpop(self.key)
def load(self, fobj):
"""Load the contents of the provided fobj into the queue."""
try:
while True:
val = self._pack(self.serializer.load(fobj))
self.redis.lpush(self.key, val)
except Exception:
return
def dumpfname(self, fname, truncate=False):
"""Destructively dump the contents of the queue into fname."""
if truncate:
with file(fname, 'w+') as f:
self.dump(f)
else:
with file(fname, 'a+') as f:
self.dump(f)
def loadfname(self, fname):
"""Load the contents of the contents of fname into the queue."""
with file(fname) as f:
self.load(f)
def extend(self, vals):
"""Extends the elements in the queue."""
with self.redis.pipeline(transaction=False) as pipe:
for val in vals:
pipe.lpush(self.key, self._pack(val))
pipe.execute()
def peek(self):
"""Look at the next item in the queue."""
return self[-1]
def elements(self):
"""Return all elements as a Python list."""
return [self._unpack(o) for o in self.redis.lrange(self.key, 0, -1)]
def elements_as_json(self):
"""Return all elements as JSON object."""
return json.dumps(self.elements)
def clear(self):
"""Removes all the elements in the queue."""
self.redis.delete(self.key)
class CappedCollection(BaseQueue):
"""a bounded queue
Implements a capped collection (the collection never
gets larger than the specified size).
"""
def __init__(self, r_conn, key, size, **kwargs):
BaseQueue.__init__(self, r_conn, key, **kwargs)
self.size = size
def push(self, element):
size = self.size
with self.redis.pipeline() as pipe:
# ltrim is zero-indexed
val = self._pack(element)
pipe = pipe.lpush(self.key, val).ltrim(self.key, 0, size - 1)
pipe.execute()
def extend(self, vals):
"""Extends the elements in the queue."""
with self.redis.pipeline() as pipe:
for val in vals:
pipe.lpush(self.key, self._pack(val))
pipe.ltrim(self.key, 0, self.size - 1)
pipe.execute()
def pop(self, block=False):
if not block:
popped = self.redis.rpop(self.key)
else:
queue, popped = self.redis.brpop(self.key)
log.debug('Popped ** %s ** from key ** %s **' % (popped, self.key))
return self._unpack(popped)
|
from asyncio import run_coroutine_threadsafe
import datetime as dt
from datetime import timedelta
import logging
from typing import Any, Callable, Dict, List, Optional
from aiohttp import ClientError
from spotipy import Spotify, SpotifyException
from yarl import URL
from homeassistant.components.media_player import BrowseMedia, MediaPlayerEntity
from homeassistant.components.media_player.const import (
MEDIA_CLASS_ALBUM,
MEDIA_CLASS_ARTIST,
MEDIA_CLASS_DIRECTORY,
MEDIA_CLASS_EPISODE,
MEDIA_CLASS_GENRE,
MEDIA_CLASS_PLAYLIST,
MEDIA_CLASS_PODCAST,
MEDIA_CLASS_TRACK,
MEDIA_TYPE_ALBUM,
MEDIA_TYPE_ARTIST,
MEDIA_TYPE_EPISODE,
MEDIA_TYPE_MUSIC,
MEDIA_TYPE_PLAYLIST,
MEDIA_TYPE_TRACK,
SUPPORT_BROWSE_MEDIA,
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_PLAY_MEDIA,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_SEEK,
SUPPORT_SELECT_SOURCE,
SUPPORT_SHUFFLE_SET,
SUPPORT_VOLUME_SET,
)
from homeassistant.components.media_player.errors import BrowseError
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
CONF_ID,
CONF_NAME,
STATE_IDLE,
STATE_PAUSED,
STATE_PLAYING,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.config_entry_oauth2_flow import OAuth2Session
from homeassistant.helpers.entity import Entity
from homeassistant.util.dt import utc_from_timestamp
from .const import (
DATA_SPOTIFY_CLIENT,
DATA_SPOTIFY_ME,
DATA_SPOTIFY_SESSION,
DOMAIN,
SPOTIFY_SCOPES,
)
_LOGGER = logging.getLogger(__name__)
ICON = "mdi:spotify"
SCAN_INTERVAL = timedelta(seconds=30)
SUPPORT_SPOTIFY = (
SUPPORT_BROWSE_MEDIA
| SUPPORT_NEXT_TRACK
| SUPPORT_PAUSE
| SUPPORT_PLAY
| SUPPORT_PLAY_MEDIA
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_SEEK
| SUPPORT_SELECT_SOURCE
| SUPPORT_SHUFFLE_SET
| SUPPORT_VOLUME_SET
)
BROWSE_LIMIT = 48
MEDIA_TYPE_SHOW = "show"
PLAYABLE_MEDIA_TYPES = [
MEDIA_TYPE_PLAYLIST,
MEDIA_TYPE_ALBUM,
MEDIA_TYPE_ARTIST,
MEDIA_TYPE_EPISODE,
MEDIA_TYPE_SHOW,
MEDIA_TYPE_TRACK,
]
LIBRARY_MAP = {
"current_user_playlists": "Playlists",
"current_user_followed_artists": "Artists",
"current_user_saved_albums": "Albums",
"current_user_saved_tracks": "Tracks",
"current_user_saved_shows": "Podcasts",
"current_user_recently_played": "Recently played",
"current_user_top_artists": "Top Artists",
"current_user_top_tracks": "Top Tracks",
"categories": "Categories",
"featured_playlists": "Featured Playlists",
"new_releases": "New Releases",
}
CONTENT_TYPE_MEDIA_CLASS = {
"current_user_playlists": {
"parent": MEDIA_CLASS_DIRECTORY,
"children": MEDIA_CLASS_PLAYLIST,
},
"current_user_followed_artists": {
"parent": MEDIA_CLASS_DIRECTORY,
"children": MEDIA_CLASS_ARTIST,
},
"current_user_saved_albums": {
"parent": MEDIA_CLASS_DIRECTORY,
"children": MEDIA_CLASS_ALBUM,
},
"current_user_saved_tracks": {
"parent": MEDIA_CLASS_DIRECTORY,
"children": MEDIA_CLASS_TRACK,
},
"current_user_saved_shows": {
"parent": MEDIA_CLASS_DIRECTORY,
"children": MEDIA_CLASS_PODCAST,
},
"current_user_recently_played": {
"parent": MEDIA_CLASS_DIRECTORY,
"children": MEDIA_CLASS_TRACK,
},
"current_user_top_artists": {
"parent": MEDIA_CLASS_DIRECTORY,
"children": MEDIA_CLASS_ARTIST,
},
"current_user_top_tracks": {
"parent": MEDIA_CLASS_DIRECTORY,
"children": MEDIA_CLASS_TRACK,
},
"featured_playlists": {
"parent": MEDIA_CLASS_DIRECTORY,
"children": MEDIA_CLASS_PLAYLIST,
},
"categories": {"parent": MEDIA_CLASS_DIRECTORY, "children": MEDIA_CLASS_GENRE},
"category_playlists": {
"parent": MEDIA_CLASS_DIRECTORY,
"children": MEDIA_CLASS_PLAYLIST,
},
"new_releases": {"parent": MEDIA_CLASS_DIRECTORY, "children": MEDIA_CLASS_ALBUM},
MEDIA_TYPE_PLAYLIST: {
"parent": MEDIA_CLASS_PLAYLIST,
"children": MEDIA_CLASS_TRACK,
},
MEDIA_TYPE_ALBUM: {"parent": MEDIA_CLASS_ALBUM, "children": MEDIA_CLASS_TRACK},
MEDIA_TYPE_ARTIST: {"parent": MEDIA_CLASS_ARTIST, "children": MEDIA_CLASS_ALBUM},
MEDIA_TYPE_EPISODE: {"parent": MEDIA_CLASS_EPISODE, "children": None},
MEDIA_TYPE_SHOW: {"parent": MEDIA_CLASS_PODCAST, "children": MEDIA_CLASS_EPISODE},
MEDIA_TYPE_TRACK: {"parent": MEDIA_CLASS_TRACK, "children": None},
}
class MissingMediaInformation(BrowseError):
"""Missing media required information."""
class UnknownMediaType(BrowseError):
"""Unknown media type."""
async def async_setup_entry(
hass: HomeAssistant,
entry: ConfigEntry,
async_add_entities: Callable[[List[Entity], bool], None],
) -> None:
"""Set up Spotify based on a config entry."""
spotify = SpotifyMediaPlayer(
hass.data[DOMAIN][entry.entry_id][DATA_SPOTIFY_SESSION],
hass.data[DOMAIN][entry.entry_id][DATA_SPOTIFY_CLIENT],
hass.data[DOMAIN][entry.entry_id][DATA_SPOTIFY_ME],
entry.data[CONF_ID],
entry.data[CONF_NAME],
)
async_add_entities([spotify], True)
def spotify_exception_handler(func):
"""Decorate Spotify calls to handle Spotify exception.
A decorator that wraps the passed in function, catches Spotify errors,
aiohttp exceptions and handles the availability of the media player.
"""
def wrapper(self, *args, **kwargs):
try:
result = func(self, *args, **kwargs)
self.player_available = True
return result
except (SpotifyException, ClientError):
self.player_available = False
return wrapper
class SpotifyMediaPlayer(MediaPlayerEntity):
"""Representation of a Spotify controller."""
def __init__(
self,
session: OAuth2Session,
spotify: Spotify,
me: dict,
user_id: str,
name: str,
):
"""Initialize."""
self._id = user_id
self._me = me
self._name = f"Spotify {name}"
self._session = session
self._spotify = spotify
self._scope_ok = set(session.token["scope"].split(" ")) == set(SPOTIFY_SCOPES)
self._currently_playing: Optional[dict] = {}
self._devices: Optional[List[dict]] = []
self._playlist: Optional[dict] = None
self._spotify: Spotify = None
self.player_available = False
@property
def name(self) -> str:
"""Return the name."""
return self._name
@property
def icon(self) -> str:
"""Return the icon."""
return ICON
@property
def available(self) -> bool:
"""Return True if entity is available."""
return self.player_available
@property
def unique_id(self) -> str:
"""Return the unique ID."""
return self._id
@property
def device_info(self) -> Dict[str, Any]:
"""Return device information about this entity."""
if self._me is not None:
model = self._me["product"]
return {
"identifiers": {(DOMAIN, self._id)},
"manufacturer": "Spotify AB",
"model": f"Spotify {model}".rstrip(),
"name": self._name,
}
@property
def state(self) -> Optional[str]:
"""Return the playback state."""
if not self._currently_playing:
return STATE_IDLE
if self._currently_playing["is_playing"]:
return STATE_PLAYING
return STATE_PAUSED
@property
def volume_level(self) -> Optional[float]:
"""Return the device volume."""
return self._currently_playing.get("device", {}).get("volume_percent", 0) / 100
@property
def media_content_id(self) -> Optional[str]:
"""Return the media URL."""
item = self._currently_playing.get("item") or {}
return item.get("uri")
@property
def media_content_type(self) -> Optional[str]:
"""Return the media type."""
return MEDIA_TYPE_MUSIC
@property
def media_duration(self) -> Optional[int]:
"""Duration of current playing media in seconds."""
if self._currently_playing.get("item") is None:
return None
return self._currently_playing["item"]["duration_ms"] / 1000
@property
def media_position(self) -> Optional[str]:
"""Position of current playing media in seconds."""
if not self._currently_playing:
return None
return self._currently_playing["progress_ms"] / 1000
@property
def media_position_updated_at(self) -> Optional[dt.datetime]:
"""When was the position of the current playing media valid."""
if not self._currently_playing:
return None
return utc_from_timestamp(self._currently_playing["timestamp"] / 1000)
@property
def media_image_url(self) -> Optional[str]:
"""Return the media image URL."""
if (
self._currently_playing.get("item") is None
or not self._currently_playing["item"]["album"]["images"]
):
return None
return fetch_image_url(self._currently_playing["item"]["album"])
@property
def media_image_remotely_accessible(self) -> bool:
"""If the image url is remotely accessible."""
return False
@property
def media_title(self) -> Optional[str]:
"""Return the media title."""
item = self._currently_playing.get("item") or {}
return item.get("name")
@property
def media_artist(self) -> Optional[str]:
"""Return the media artist."""
if self._currently_playing.get("item") is None:
return None
return ", ".join(
[artist["name"] for artist in self._currently_playing["item"]["artists"]]
)
@property
def media_album_name(self) -> Optional[str]:
"""Return the media album."""
if self._currently_playing.get("item") is None:
return None
return self._currently_playing["item"]["album"]["name"]
@property
def media_track(self) -> Optional[int]:
"""Track number of current playing media, music track only."""
item = self._currently_playing.get("item") or {}
return item.get("track_number")
@property
def media_playlist(self):
"""Title of Playlist currently playing."""
if self._playlist is None:
return None
return self._playlist["name"]
@property
def source(self) -> Optional[str]:
"""Return the current playback device."""
return self._currently_playing.get("device", {}).get("name")
@property
def source_list(self) -> Optional[List[str]]:
"""Return a list of source devices."""
if not self._devices:
return None
return [device["name"] for device in self._devices]
@property
def shuffle(self) -> bool:
"""Shuffling state."""
return bool(self._currently_playing.get("shuffle_state"))
@property
def supported_features(self) -> int:
"""Return the media player features that are supported."""
if self._me["product"] != "premium":
return 0
return SUPPORT_SPOTIFY
@spotify_exception_handler
def set_volume_level(self, volume: int) -> None:
"""Set the volume level."""
self._spotify.volume(int(volume * 100))
@spotify_exception_handler
def media_play(self) -> None:
"""Start or resume playback."""
self._spotify.start_playback()
@spotify_exception_handler
def media_pause(self) -> None:
"""Pause playback."""
self._spotify.pause_playback()
@spotify_exception_handler
def media_previous_track(self) -> None:
"""Skip to previous track."""
self._spotify.previous_track()
@spotify_exception_handler
def media_next_track(self) -> None:
"""Skip to next track."""
self._spotify.next_track()
@spotify_exception_handler
def media_seek(self, position):
"""Send seek command."""
self._spotify.seek_track(int(position * 1000))
@spotify_exception_handler
def play_media(self, media_type: str, media_id: str, **kwargs) -> None:
"""Play media."""
kwargs = {}
# Spotify can't handle URI's with query strings or anchors
# Yet, they do generate those types of URI in their official clients.
media_id = str(URL(media_id).with_query(None).with_fragment(None))
if media_type in (MEDIA_TYPE_TRACK, MEDIA_TYPE_EPISODE, MEDIA_TYPE_MUSIC):
kwargs["uris"] = [media_id]
elif media_type in PLAYABLE_MEDIA_TYPES:
kwargs["context_uri"] = media_id
else:
_LOGGER.error("Media type %s is not supported", media_type)
return
if not self._currently_playing.get("device") and self._devices:
kwargs["device_id"] = self._devices[0].get("id")
self._spotify.start_playback(**kwargs)
@spotify_exception_handler
def select_source(self, source: str) -> None:
"""Select playback device."""
for device in self._devices:
if device["name"] == source:
self._spotify.transfer_playback(
device["id"], self.state == STATE_PLAYING
)
return
@spotify_exception_handler
def set_shuffle(self, shuffle: bool) -> None:
"""Enable/Disable shuffle mode."""
self._spotify.shuffle(shuffle)
@spotify_exception_handler
def update(self) -> None:
"""Update state and attributes."""
if not self.enabled:
return
if not self._session.valid_token or self._spotify is None:
run_coroutine_threadsafe(
self._session.async_ensure_token_valid(), self.hass.loop
).result()
self._spotify = Spotify(auth=self._session.token["access_token"])
current = self._spotify.current_playback()
self._currently_playing = current or {}
self._playlist = None
context = self._currently_playing.get("context")
if context is not None and context["type"] == MEDIA_TYPE_PLAYLIST:
self._playlist = self._spotify.playlist(current["context"]["uri"])
devices = self._spotify.devices() or {}
self._devices = devices.get("devices", [])
async def async_browse_media(self, media_content_type=None, media_content_id=None):
"""Implement the websocket media browsing helper."""
if not self._scope_ok:
raise NotImplementedError
if media_content_type in [None, "library"]:
return await self.hass.async_add_executor_job(library_payload)
payload = {
"media_content_type": media_content_type,
"media_content_id": media_content_id,
}
response = await self.hass.async_add_executor_job(
build_item_response, self._spotify, self._me, payload
)
if response is None:
raise BrowseError(
f"Media not found: {media_content_type} / {media_content_id}"
)
return response
def build_item_response(spotify, user, payload):
"""Create response payload for the provided media query."""
media_content_type = payload["media_content_type"]
media_content_id = payload["media_content_id"]
title = None
image = None
if media_content_type == "current_user_playlists":
media = spotify.current_user_playlists(limit=BROWSE_LIMIT)
items = media.get("items", [])
elif media_content_type == "current_user_followed_artists":
media = spotify.current_user_followed_artists(limit=BROWSE_LIMIT)
items = media.get("artists", {}).get("items", [])
elif media_content_type == "current_user_saved_albums":
media = spotify.current_user_saved_albums(limit=BROWSE_LIMIT)
items = [item["album"] for item in media.get("items", [])]
elif media_content_type == "current_user_saved_tracks":
media = spotify.current_user_saved_tracks(limit=BROWSE_LIMIT)
items = [item["track"] for item in media.get("items", [])]
elif media_content_type == "current_user_saved_shows":
media = spotify.current_user_saved_shows(limit=BROWSE_LIMIT)
items = [item["show"] for item in media.get("items", [])]
elif media_content_type == "current_user_recently_played":
media = spotify.current_user_recently_played(limit=BROWSE_LIMIT)
items = [item["track"] for item in media.get("items", [])]
elif media_content_type == "current_user_top_artists":
media = spotify.current_user_top_artists(limit=BROWSE_LIMIT)
items = media.get("items", [])
elif media_content_type == "current_user_top_tracks":
media = spotify.current_user_top_tracks(limit=BROWSE_LIMIT)
items = media.get("items", [])
elif media_content_type == "featured_playlists":
media = spotify.featured_playlists(country=user["country"], limit=BROWSE_LIMIT)
items = media.get("playlists", {}).get("items", [])
elif media_content_type == "categories":
media = spotify.categories(country=user["country"], limit=BROWSE_LIMIT)
items = media.get("categories", {}).get("items", [])
elif media_content_type == "category_playlists":
media = spotify.category_playlists(
category_id=media_content_id,
country=user["country"],
limit=BROWSE_LIMIT,
)
category = spotify.category(media_content_id, country=user["country"])
title = category.get("name")
image = fetch_image_url(category, key="icons")
items = media.get("playlists", {}).get("items", [])
elif media_content_type == "new_releases":
media = spotify.new_releases(country=user["country"], limit=BROWSE_LIMIT)
items = media.get("albums", {}).get("items", [])
elif media_content_type == MEDIA_TYPE_PLAYLIST:
media = spotify.playlist(media_content_id)
items = [item["track"] for item in media.get("tracks", {}).get("items", [])]
elif media_content_type == MEDIA_TYPE_ALBUM:
media = spotify.album(media_content_id)
items = media.get("tracks", {}).get("items", [])
elif media_content_type == MEDIA_TYPE_ARTIST:
media = spotify.artist_albums(media_content_id, limit=BROWSE_LIMIT)
artist = spotify.artist(media_content_id)
title = artist.get("name")
image = fetch_image_url(artist)
items = media.get("items", [])
elif media_content_type == MEDIA_TYPE_SHOW:
media = spotify.show_episodes(media_content_id, limit=BROWSE_LIMIT)
show = spotify.show(media_content_id)
title = show.get("name")
image = fetch_image_url(show)
items = media.get("items", [])
else:
media = None
items = []
if media is None:
return None
try:
media_class = CONTENT_TYPE_MEDIA_CLASS[media_content_type]
except KeyError:
_LOGGER.debug("Unknown media type received: %s", media_content_type)
return None
if media_content_type == "categories":
media_item = BrowseMedia(
title=LIBRARY_MAP.get(media_content_id),
media_class=media_class["parent"],
children_media_class=media_class["children"],
media_content_id=media_content_id,
media_content_type=media_content_type,
can_play=False,
can_expand=True,
children=[],
)
for item in items:
try:
item_id = item["id"]
except KeyError:
_LOGGER.debug("Missing id for media item: %s", item)
continue
media_item.children.append(
BrowseMedia(
title=item.get("name"),
media_class=MEDIA_CLASS_PLAYLIST,
children_media_class=MEDIA_CLASS_TRACK,
media_content_id=item_id,
media_content_type="category_playlists",
thumbnail=fetch_image_url(item, key="icons"),
can_play=False,
can_expand=True,
)
)
return media_item
if title is None:
if "name" in media:
title = media.get("name")
else:
title = LIBRARY_MAP.get(payload["media_content_id"])
params = {
"title": title,
"media_class": media_class["parent"],
"children_media_class": media_class["children"],
"media_content_id": media_content_id,
"media_content_type": media_content_type,
"can_play": media_content_type in PLAYABLE_MEDIA_TYPES,
"children": [],
"can_expand": True,
}
for item in items:
try:
params["children"].append(item_payload(item))
except (MissingMediaInformation, UnknownMediaType):
continue
if "images" in media:
params["thumbnail"] = fetch_image_url(media)
elif image:
params["thumbnail"] = image
return BrowseMedia(**params)
def item_payload(item):
"""
Create response payload for a single media item.
Used by async_browse_media.
"""
try:
media_type = item["type"]
media_id = item["uri"]
except KeyError as err:
_LOGGER.debug("Missing type or uri for media item: %s", item)
raise MissingMediaInformation from err
try:
media_class = CONTENT_TYPE_MEDIA_CLASS[media_type]
except KeyError as err:
_LOGGER.debug("Unknown media type received: %s", media_type)
raise UnknownMediaType from err
can_expand = media_type not in [
MEDIA_TYPE_TRACK,
MEDIA_TYPE_EPISODE,
]
payload = {
"title": item.get("name"),
"media_class": media_class["parent"],
"children_media_class": media_class["children"],
"media_content_id": media_id,
"media_content_type": media_type,
"can_play": media_type in PLAYABLE_MEDIA_TYPES,
"can_expand": can_expand,
}
if "images" in item:
payload["thumbnail"] = fetch_image_url(item)
elif MEDIA_TYPE_ALBUM in item:
payload["thumbnail"] = fetch_image_url(item[MEDIA_TYPE_ALBUM])
return BrowseMedia(**payload)
def library_payload():
"""
Create response payload to describe contents of a specific library.
Used by async_browse_media.
"""
library_info = {
"title": "Media Library",
"media_class": MEDIA_CLASS_DIRECTORY,
"media_content_id": "library",
"media_content_type": "library",
"can_play": False,
"can_expand": True,
"children": [],
}
for item in [{"name": n, "type": t} for t, n in LIBRARY_MAP.items()]:
library_info["children"].append(
item_payload(
{"name": item["name"], "type": item["type"], "uri": item["type"]}
)
)
response = BrowseMedia(**library_info)
response.children_media_class = MEDIA_CLASS_DIRECTORY
return response
def fetch_image_url(item, key="images"):
"""Fetch image url."""
try:
return item.get(key, [])[0].get("url")
except IndexError:
return None
|
import os
import socket
import threading
from collections import deque
from contextlib import contextmanager
from functools import partial
from itertools import count
from uuid import uuid5, uuid4, uuid3, NAMESPACE_OID
from amqp import ChannelError, RecoverableConnectionError
from .entity import Exchange, Queue
from .log import get_logger
from .serialization import registry as serializers
from .utils.uuid import uuid
try:
from _thread import get_ident
except ImportError: # pragma: no cover
try: # noqa
from thread import get_ident # noqa
except ImportError: # pragma: no cover
from dummy_thread import get_ident # noqa
__all__ = ('Broadcast', 'maybe_declare', 'uuid',
'itermessages', 'send_reply',
'collect_replies', 'insured', 'drain_consumer',
'eventloop')
#: Prefetch count can't exceed short.
PREFETCH_COUNT_MAX = 0xFFFF
logger = get_logger(__name__)
_node_id = None
def get_node_id():
global _node_id
if _node_id is None:
_node_id = uuid4().int
return _node_id
def generate_oid(node_id, process_id, thread_id, instance):
ent = '{:x}-{:x}-{:x}-{:x}'.format(
node_id, process_id, thread_id, id(instance))
try:
ret = str(uuid3(NAMESPACE_OID, ent))
except ValueError:
ret = str(uuid5(NAMESPACE_OID, ent))
return ret
def oid_from(instance, threads=True):
return generate_oid(
get_node_id(),
os.getpid(),
get_ident() if threads else 0,
instance,
)
class Broadcast(Queue):
"""Broadcast queue.
Convenience class used to define broadcast queues.
Every queue instance will have a unique name,
and both the queue and exchange is configured with auto deletion.
Arguments:
name (str): This is used as the name of the exchange.
queue (str): By default a unique id is used for the queue
name for every consumer. You can specify a custom
queue name here.
unique (bool): Always create a unique queue
even if a queue name is supplied.
**kwargs (Any): See :class:`~kombu.Queue` for a list
of additional keyword arguments supported.
"""
attrs = Queue.attrs + (('queue', None),)
def __init__(self,
name=None,
queue=None,
unique=False,
auto_delete=True,
exchange=None,
alias=None,
**kwargs):
if unique:
queue = '{}.{}'.format(queue or 'bcast', uuid())
else:
queue = queue or f'bcast.{uuid()}'
super().__init__(
alias=alias or name,
queue=queue,
name=queue,
auto_delete=auto_delete,
exchange=(exchange if exchange is not None
else Exchange(name, type='fanout')),
**kwargs
)
def declaration_cached(entity, channel):
return entity in channel.connection.client.declared_entities
def maybe_declare(entity, channel=None, retry=False, **retry_policy):
"""Declare entity (cached)."""
if retry:
return _imaybe_declare(entity, channel, **retry_policy)
return _maybe_declare(entity, channel)
def _ensure_channel_is_bound(entity, channel):
"""Make sure the channel is bound to the entity.
:param entity: generic kombu nomenclature, generally an exchange or queue
:param channel: channel to bind to the entity
:return: the updated entity
"""
is_bound = entity.is_bound
if not is_bound:
if not channel:
raise ChannelError(
f"Cannot bind channel {channel} to entity {entity}")
entity = entity.bind(channel)
return entity
def _maybe_declare(entity, channel):
# _maybe_declare sets name on original for autogen queues
orig = entity
_ensure_channel_is_bound(entity, channel)
if channel is None:
if not entity.is_bound:
raise ChannelError(
f"channel is None and entity {entity} not bound.")
channel = entity.channel
declared = ident = None
if channel.connection and entity.can_cache_declaration:
declared = channel.connection.client.declared_entities
ident = hash(entity)
if ident in declared:
return False
if not channel.connection:
raise RecoverableConnectionError('channel disconnected')
entity.declare(channel=channel)
if declared is not None and ident:
declared.add(ident)
if orig is not None:
orig.name = entity.name
return True
def _imaybe_declare(entity, channel, **retry_policy):
_ensure_channel_is_bound(entity, channel)
if not entity.channel.connection:
raise RecoverableConnectionError('channel disconnected')
return entity.channel.connection.client.ensure(
entity, _maybe_declare, **retry_policy)(entity, channel)
def drain_consumer(consumer, limit=1, timeout=None, callbacks=None):
"""Drain messages from consumer instance."""
acc = deque()
def on_message(body, message):
acc.append((body, message))
consumer.callbacks = [on_message] + (callbacks or [])
with consumer:
for _ in eventloop(consumer.channel.connection.client,
limit=limit, timeout=timeout, ignore_timeouts=True):
try:
yield acc.popleft()
except IndexError:
pass
def itermessages(conn, channel, queue, limit=1, timeout=None,
callbacks=None, **kwargs):
"""Iterator over messages."""
return drain_consumer(
conn.Consumer(queues=[queue], channel=channel, **kwargs),
limit=limit, timeout=timeout, callbacks=callbacks,
)
def eventloop(conn, limit=None, timeout=None, ignore_timeouts=False):
"""Best practice generator wrapper around ``Connection.drain_events``.
Able to drain events forever, with a limit, and optionally ignoring
timeout errors (a timeout of 1 is often used in environments where
the socket can get "stuck", and is a best practice for Kombu consumers).
``eventloop`` is a generator.
Examples:
>>> from kombu.common import eventloop
>>> def run(conn):
... it = eventloop(conn, timeout=1, ignore_timeouts=True)
... next(it) # one event consumed, or timed out.
...
... for _ in eventloop(conn, timeout=1, ignore_timeouts=True):
... pass # loop forever.
It also takes an optional limit parameter, and timeout errors
are propagated by default::
for _ in eventloop(connection, limit=1, timeout=1):
pass
See Also:
:func:`itermessages`, which is an event loop bound to one or more
consumers, that yields any messages received.
"""
for i in limit and range(limit) or count():
try:
yield conn.drain_events(timeout=timeout)
except socket.timeout:
if timeout and not ignore_timeouts: # pragma: no cover
raise
def send_reply(exchange, req, msg,
producer=None, retry=False, retry_policy=None, **props):
"""Send reply for request.
Arguments:
exchange (kombu.Exchange, str): Reply exchange
req (~kombu.Message): Original request, a message with
a ``reply_to`` property.
producer (kombu.Producer): Producer instance
retry (bool): If true must retry according to
the ``reply_policy`` argument.
retry_policy (Dict): Retry settings.
**props (Any): Extra properties.
"""
return producer.publish(
msg, exchange=exchange,
retry=retry, retry_policy=retry_policy,
**dict({'routing_key': req.properties['reply_to'],
'correlation_id': req.properties.get('correlation_id'),
'serializer': serializers.type_to_name[req.content_type],
'content_encoding': req.content_encoding}, **props)
)
def collect_replies(conn, channel, queue, *args, **kwargs):
"""Generator collecting replies from ``queue``."""
no_ack = kwargs.setdefault('no_ack', True)
received = False
try:
for body, message in itermessages(conn, channel, queue,
*args, **kwargs):
if not no_ack:
message.ack()
received = True
yield body
finally:
if received:
channel.after_reply_message_received(queue.name)
def _ensure_errback(exc, interval):
logger.error(
'Connection error: %r. Retry in %ss\n', exc, interval,
exc_info=True,
)
@contextmanager
def _ignore_errors(conn):
try:
yield
except conn.connection_errors + conn.channel_errors:
pass
def ignore_errors(conn, fun=None, *args, **kwargs):
"""Ignore connection and channel errors.
The first argument must be a connection object, or any other object
with ``connection_error`` and ``channel_error`` attributes.
Can be used as a function:
.. code-block:: python
def example(connection):
ignore_errors(connection, consumer.channel.close)
or as a context manager:
.. code-block:: python
def example(connection):
with ignore_errors(connection):
consumer.channel.close()
Note:
Connection and channel errors should be properly handled,
and not ignored. Using this function is only acceptable in a cleanup
phase, like when a connection is lost or at shutdown.
"""
if fun:
with _ignore_errors(conn):
return fun(*args, **kwargs)
return _ignore_errors(conn)
def revive_connection(connection, channel, on_revive=None):
if on_revive:
on_revive(channel)
def insured(pool, fun, args, kwargs, errback=None, on_revive=None, **opts):
"""Function wrapper to handle connection errors.
Ensures function performing broker commands completes
despite intermittent connection failures.
"""
errback = errback or _ensure_errback
with pool.acquire(block=True) as conn:
conn.ensure_connection(errback=errback)
# we cache the channel for subsequent calls, this has to be
# reset on revival.
channel = conn.default_channel
revive = partial(revive_connection, conn, on_revive=on_revive)
insured = conn.autoretry(fun, channel, errback=errback,
on_revive=revive, **opts)
retval, _ = insured(*args, **dict(kwargs, connection=conn))
return retval
class QoS:
"""Thread safe increment/decrement of a channels prefetch_count.
Arguments:
callback (Callable): Function used to set new prefetch count,
e.g. ``consumer.qos`` or ``channel.basic_qos``. Will be called
with a single ``prefetch_count`` keyword argument.
initial_value (int): Initial prefetch count value..
Example:
>>> from kombu import Consumer, Connection
>>> connection = Connection('amqp://')
>>> consumer = Consumer(connection)
>>> qos = QoS(consumer.qos, initial_prefetch_count=2)
>>> qos.update() # set initial
>>> qos.value
2
>>> def in_some_thread():
... qos.increment_eventually()
>>> def in_some_other_thread():
... qos.decrement_eventually()
>>> while 1:
... if qos.prev != qos.value:
... qos.update() # prefetch changed so update.
It can be used with any function supporting a ``prefetch_count`` keyword
argument::
>>> channel = connection.channel()
>>> QoS(channel.basic_qos, 10)
>>> def set_qos(prefetch_count):
... print('prefetch count now: %r' % (prefetch_count,))
>>> QoS(set_qos, 10)
"""
prev = None
def __init__(self, callback, initial_value):
self.callback = callback
self._mutex = threading.RLock()
self.value = initial_value or 0
def increment_eventually(self, n=1):
"""Increment the value, but do not update the channels QoS.
Note:
The MainThread will be responsible for calling :meth:`update`
when necessary.
"""
with self._mutex:
if self.value:
self.value = self.value + max(n, 0)
return self.value
def decrement_eventually(self, n=1):
"""Decrement the value, but do not update the channels QoS.
Note:
The MainThread will be responsible for calling :meth:`update`
when necessary.
"""
with self._mutex:
if self.value:
self.value -= n
if self.value < 1:
self.value = 1
return self.value
def set(self, pcount):
"""Set channel prefetch_count setting."""
if pcount != self.prev:
new_value = pcount
if pcount > PREFETCH_COUNT_MAX:
logger.warning('QoS: Disabled: prefetch_count exceeds %r',
PREFETCH_COUNT_MAX)
new_value = 0
logger.debug('basic.qos: prefetch_count->%s', new_value)
self.callback(prefetch_count=new_value)
self.prev = pcount
return pcount
def update(self):
"""Update prefetch count with current value."""
with self._mutex:
return self.set(self.value)
|
import os
import sys
import functools
import tempfile
import datetime
import argparse
from typing import Iterable, Optional, cast
from PyQt5.QtWidgets import QApplication, QWidget
from PyQt5.QtGui import QDesktopServices, QPixmap, QIcon
from PyQt5.QtCore import pyqtSlot, QUrl, QObject, QEvent, pyqtSignal, Qt
import qutebrowser
import qutebrowser.resources
from qutebrowser.commands import runners
from qutebrowser.config import (config, websettings, configfiles, configinit,
qtargs)
from qutebrowser.browser import (urlmarks, history, browsertab,
qtnetworkdownloads, downloads, greasemonkey)
from qutebrowser.browser.network import proxy
from qutebrowser.browser.webkit import cookies, cache
from qutebrowser.browser.webkit.network import networkmanager
from qutebrowser.extensions import loader
from qutebrowser.keyinput import macros, eventfilter
from qutebrowser.mainwindow import mainwindow, prompt, windowundo
from qutebrowser.misc import (ipc, savemanager, sessions, crashsignal,
earlyinit, sql, cmdhistory, backendproblem,
objects, quitter)
from qutebrowser.utils import (log, version, message, utils, urlutils, objreg,
usertypes, standarddir, error, qtutils)
# pylint: disable=unused-import
# We import those to run the cmdutils.register decorators.
from qutebrowser.mainwindow.statusbar import command
from qutebrowser.misc import utilcmds
# pylint: enable=unused-import
q_app = cast(QApplication, None)
def run(args):
"""Initialize everything and run the application."""
if args.temp_basedir:
args.basedir = tempfile.mkdtemp(prefix='qutebrowser-basedir-')
log.init.debug("Main process PID: {}".format(os.getpid()))
log.init.debug("Initializing directories...")
standarddir.init(args)
utils.preload_resources()
log.init.debug("Initializing config...")
configinit.early_init(args)
log.init.debug("Initializing application...")
global q_app
q_app = Application(args)
q_app.setOrganizationName("qutebrowser")
q_app.setApplicationName("qutebrowser")
q_app.setDesktopFileName("org.qutebrowser.qutebrowser")
q_app.setApplicationVersion(qutebrowser.__version__)
if args.version:
print(version.version_info())
sys.exit(usertypes.Exit.ok)
quitter.init(args)
crashsignal.init(q_app=q_app, args=args, quitter=quitter.instance)
try:
server = ipc.send_or_listen(args)
except ipc.Error:
# ipc.send_or_listen already displays the error message for us.
# We didn't really initialize much so far, so we just quit hard.
sys.exit(usertypes.Exit.err_ipc)
if server is None:
if args.backend is not None:
log.init.warning(
"Backend from the running instance will be used")
sys.exit(usertypes.Exit.ok)
else:
quitter.instance.shutting_down.connect(server.shutdown)
server.got_args.connect(lambda args, target_arg, cwd:
process_pos_args(args, cwd=cwd, via_ipc=True,
target_arg=target_arg))
init(args=args)
ret = qt_mainloop()
return ret
def qt_mainloop():
"""Simple wrapper to get a nicer stack trace for segfaults.
WARNING: misc/crashdialog.py checks the stacktrace for this function
name, so if this is changed, it should be changed there as well!
"""
return q_app.exec_()
def init(*, args: argparse.Namespace) -> None:
"""Initialize everything."""
log.init.debug("Starting init...")
crashsignal.crash_handler.init_faulthandler()
q_app.setQuitOnLastWindowClosed(False)
quitter.instance.shutting_down.connect(QApplication.closeAllWindows)
_init_icon()
_init_pulseaudio()
loader.init()
loader.load_components()
try:
_init_modules(args=args)
except (OSError, UnicodeDecodeError, browsertab.WebTabError) as e:
error.handle_fatal_exc(e, "Error while initializing!",
no_err_windows=args.no_err_windows,
pre_text="Error while initializing")
sys.exit(usertypes.Exit.err_init)
log.init.debug("Initializing eventfilter...")
eventfilter.init()
log.init.debug("Connecting signals...")
q_app.focusChanged.connect(on_focus_changed)
_process_args(args)
for scheme in ['http', 'https', 'qute']:
QDesktopServices.setUrlHandler(
scheme, open_desktopservices_url)
log.init.debug("Init done!")
crashsignal.crash_handler.raise_crashdlg()
def _init_icon():
"""Initialize the icon of qutebrowser."""
fallback_icon = QIcon()
for size in [16, 24, 32, 48, 64, 96, 128, 256, 512]:
filename = ':/icons/qutebrowser-{size}x{size}.png'.format(size=size)
pixmap = QPixmap(filename)
if pixmap.isNull():
log.init.warning("Failed to load {}".format(filename))
else:
fallback_icon.addPixmap(pixmap)
icon = QIcon.fromTheme('qutebrowser', fallback_icon)
if icon.isNull():
log.init.warning("Failed to load icon")
else:
q_app.setWindowIcon(icon)
def _init_pulseaudio():
"""Set properties for PulseAudio.
WORKAROUND for https://bugreports.qt.io/browse/QTBUG-85363
Affected Qt versions:
- Older than 5.11 (which is unsupported)
- 5.14.0 to 5.15.0 (inclusive)
However, we set this on all versions so that qutebrowser's icon gets picked
up as well.
"""
for prop in ['application.name', 'application.icon_name']:
os.environ['PULSE_PROP_OVERRIDE_' + prop] = 'qutebrowser'
def _process_args(args):
"""Open startpage etc. and process commandline args."""
if not args.override_restore:
sessions.load_default(args.session)
if not sessions.session_manager.did_load:
log.init.debug("Initializing main window...")
private = args.target == 'private-window'
if (config.val.content.private_browsing or
private) and qtutils.is_single_process():
err = Exception("Private windows are unavailable with "
"the single-process process model.")
error.handle_fatal_exc(err, 'Cannot start in private mode',
no_err_windows=args.no_err_windows)
sys.exit(usertypes.Exit.err_init)
window = mainwindow.MainWindow(private=private)
if not args.nowindow:
window.show()
q_app.setActiveWindow(window)
process_pos_args(args.command)
_open_startpage()
_open_special_pages(args)
delta = datetime.datetime.now() - earlyinit.START_TIME
log.init.debug("Init finished after {}s".format(delta.total_seconds()))
def process_pos_args(args, via_ipc=False, cwd=None, target_arg=None):
"""Process positional commandline args.
URLs to open have no prefix, commands to execute begin with a colon.
Args:
args: A list of arguments to process.
via_ipc: Whether the arguments were transmitted over IPC.
cwd: The cwd to use for fuzzy_url.
target_arg: Command line argument received by a running instance via
ipc. If the --target argument was not specified, target_arg
will be an empty string.
"""
new_window_target = ('private-window' if target_arg == 'private-window'
else 'window')
command_target = config.val.new_instance_open_target
if command_target in {'window', 'private-window'}:
command_target = 'tab-silent'
win_id: Optional[int] = None
if via_ipc and not args:
win_id = mainwindow.get_window(via_ipc=via_ipc,
target=new_window_target)
_open_startpage(win_id)
return
for cmd in args:
if cmd.startswith(':'):
if win_id is None:
win_id = mainwindow.get_window(via_ipc=via_ipc,
target=command_target)
log.init.debug("Startup cmd {!r}".format(cmd))
commandrunner = runners.CommandRunner(win_id)
commandrunner.run_safely(cmd[1:])
elif not cmd:
log.init.debug("Empty argument")
win_id = mainwindow.get_window(via_ipc=via_ipc,
target=new_window_target)
else:
if via_ipc and target_arg and target_arg != 'auto':
open_target = target_arg
else:
open_target = None
if not cwd: # could also be an empty string due to the PyQt signal
cwd = None
try:
url = urlutils.fuzzy_url(cmd, cwd, relative=True)
except urlutils.InvalidUrlError as e:
message.error("Error in startup argument '{}': {}".format(
cmd, e))
else:
win_id = open_url(url, target=open_target, via_ipc=via_ipc)
def open_url(url, target=None, no_raise=False, via_ipc=True):
"""Open a URL in new window/tab.
Args:
url: A URL to open.
target: same as new_instance_open_target (used as a default).
no_raise: suppress target window raising.
via_ipc: Whether the arguments were transmitted over IPC.
Return:
ID of a window that was used to open URL
"""
target = target or config.val.new_instance_open_target
background = target in {'tab-bg', 'tab-bg-silent'}
win_id = mainwindow.get_window(via_ipc=via_ipc, target=target,
no_raise=no_raise)
tabbed_browser = objreg.get('tabbed-browser', scope='window',
window=win_id)
log.init.debug("About to open URL: {}".format(url.toDisplayString()))
tabbed_browser.tabopen(url, background=background, related=False)
return win_id
def _open_startpage(win_id=None):
"""Open startpage.
The startpage is never opened if the given windows are not empty.
Args:
win_id: If None, open startpage in all empty windows.
If set, open the startpage in the given window.
"""
if win_id is not None:
window_ids: Iterable[int] = [win_id]
else:
window_ids = objreg.window_registry
for cur_win_id in list(window_ids): # Copying as the dict could change
tabbed_browser = objreg.get('tabbed-browser', scope='window',
window=cur_win_id)
if tabbed_browser.widget.count() == 0:
log.init.debug("Opening start pages")
for url in config.val.url.start_pages:
tabbed_browser.tabopen(url)
def _open_special_pages(args):
"""Open special notification pages which are only shown once.
Args:
args: The argparse namespace.
"""
if args.basedir is not None:
# With --basedir given, don't open anything.
return
general_sect = configfiles.state['general']
tabbed_browser = objreg.get('tabbed-browser', scope='window',
window='last-focused')
pages = [
# state, condition, URL
('quickstart-done',
True,
'https://www.qutebrowser.org/quickstart.html'),
('config-migration-shown',
os.path.exists(os.path.join(standarddir.config(),
'qutebrowser.conf')),
'qute://help/configuring.html'),
('webkit-warning-shown',
objects.backend == usertypes.Backend.QtWebKit,
'qute://warning/webkit'),
('session-warning-shown',
qtutils.version_check('5.15', compiled=False),
'qute://warning/sessions'),
]
for state, condition, url in pages:
if general_sect.get(state) != '1' and condition:
tabbed_browser.tabopen(QUrl(url), background=False)
general_sect[state] = '1'
def on_focus_changed(_old, new):
"""Register currently focused main window in the object registry."""
if new is None:
return
if not isinstance(new, QWidget):
log.misc.debug("on_focus_changed called with non-QWidget {!r}".format(
new))
return
window = new.window()
if isinstance(window, mainwindow.MainWindow):
objreg.register('last-focused-main-window', window, update=True)
# A focused window must also be visible, and in this case we should
# consider it as the most recently looked-at window
objreg.register('last-visible-main-window', window, update=True)
def open_desktopservices_url(url):
"""Handler to open a URL via QDesktopServices."""
target = config.val.new_instance_open_target
win_id = mainwindow.get_window(via_ipc=True, target=target)
tabbed_browser = objreg.get('tabbed-browser', scope='window',
window=win_id)
tabbed_browser.tabopen(url)
# This is effectively a @config.change_filter
# Howerver, logging is initialized too early to use that annotation
def _on_config_changed(name: str) -> None:
if name.startswith('logging.'):
log.init_from_config(config.val)
def _init_modules(*, args):
"""Initialize all 'modules' which need to be initialized.
Args:
args: The argparse namespace.
"""
log.init.debug("Initializing logging from config...")
log.init_from_config(config.val)
config.instance.changed.connect(_on_config_changed)
log.init.debug("Initializing save manager...")
save_manager = savemanager.SaveManager(q_app)
objreg.register('save-manager', save_manager)
quitter.instance.shutting_down.connect(save_manager.shutdown)
configinit.late_init(save_manager)
log.init.debug("Checking backend requirements...")
backendproblem.init(args=args, save_manager=save_manager)
log.init.debug("Initializing prompts...")
prompt.init()
log.init.debug("Initializing network...")
networkmanager.init()
log.init.debug("Initializing proxy...")
proxy.init()
quitter.instance.shutting_down.connect(proxy.shutdown)
log.init.debug("Initializing downloads...")
downloads.init()
quitter.instance.shutting_down.connect(downloads.shutdown)
try:
log.init.debug("Initializing SQL...")
sql.init(os.path.join(standarddir.data(), 'history.sqlite'))
log.init.debug("Initializing web history...")
history.init(q_app)
except sql.KnownError as e:
error.handle_fatal_exc(e, 'Error initializing SQL',
pre_text='Error initializing SQL',
no_err_windows=args.no_err_windows)
sys.exit(usertypes.Exit.err_init)
log.init.debug("Initializing command history...")
cmdhistory.init()
log.init.debug("Initializing sessions...")
sessions.init(q_app)
log.init.debug("Initializing websettings...")
websettings.init(args)
quitter.instance.shutting_down.connect(websettings.shutdown)
if not args.no_err_windows:
crashsignal.crash_handler.display_faulthandler()
log.init.debug("Initializing quickmarks...")
quickmark_manager = urlmarks.QuickmarkManager(q_app)
objreg.register('quickmark-manager', quickmark_manager)
log.init.debug("Initializing bookmarks...")
bookmark_manager = urlmarks.BookmarkManager(q_app)
objreg.register('bookmark-manager', bookmark_manager)
log.init.debug("Initializing cookies...")
cookies.init(q_app)
log.init.debug("Initializing cache...")
cache.init(q_app)
log.init.debug("Initializing downloads...")
qtnetworkdownloads.init()
log.init.debug("Initializing Greasemonkey...")
greasemonkey.init()
log.init.debug("Misc initialization...")
macros.init()
windowundo.init()
# Init backend-specific stuff
browsertab.init()
class Application(QApplication):
"""Main application instance.
Attributes:
_args: ArgumentParser instance.
_last_focus_object: The last focused object's repr.
Signals:
new_window: A new window was created.
window_closing: A window is being closed.
"""
new_window = pyqtSignal(mainwindow.MainWindow)
window_closing = pyqtSignal(mainwindow.MainWindow)
def __init__(self, args):
"""Constructor.
Args:
Argument namespace from argparse.
"""
self._last_focus_object = None
qt_args = qtargs.qt_args(args)
log.init.debug("Commandline args: {}".format(sys.argv[1:]))
log.init.debug("Parsed: {}".format(args))
log.init.debug("Qt arguments: {}".format(qt_args[1:]))
super().__init__(qt_args)
objects.args = args
log.init.debug("Initializing application...")
self.launch_time = datetime.datetime.now()
self.focusObjectChanged.connect( # type: ignore[attr-defined]
self.on_focus_object_changed)
self.setAttribute(Qt.AA_UseHighDpiPixmaps, True)
self.setAttribute(Qt.AA_MacDontSwapCtrlAndMeta, True)
self.new_window.connect(self._on_new_window)
@pyqtSlot(mainwindow.MainWindow)
def _on_new_window(self, window):
window.tabbed_browser.shutting_down.connect(functools.partial(
self.window_closing.emit, window))
@pyqtSlot(QObject)
def on_focus_object_changed(self, obj):
"""Log when the focus object changed."""
output = repr(obj)
if self._last_focus_object != output:
log.misc.debug("Focus object changed: {}".format(output))
self._last_focus_object = output
def event(self, e):
"""Handle macOS FileOpen events."""
if e.type() != QEvent.FileOpen:
return super().event(e)
url = e.url()
if url.isValid():
open_url(url, no_raise=True)
else:
message.error("Invalid URL: {}".format(url.errorString()))
return True
def __repr__(self):
return utils.get_repr(self)
|
from simplipy.entity import EntityTypes
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_BATTERY,
DEVICE_CLASS_DOOR,
DEVICE_CLASS_GAS,
DEVICE_CLASS_MOISTURE,
DEVICE_CLASS_SMOKE,
BinarySensorEntity,
)
from homeassistant.core import callback
from . import SimpliSafeEntity
from .const import DATA_CLIENT, DOMAIN, LOGGER
SUPPORTED_BATTERY_SENSOR_TYPES = [
EntityTypes.carbon_monoxide,
EntityTypes.entry,
EntityTypes.leak,
EntityTypes.lock,
EntityTypes.smoke,
EntityTypes.temperature,
]
SUPPORTED_SENSOR_TYPES = [
EntityTypes.entry,
EntityTypes.carbon_monoxide,
EntityTypes.smoke,
EntityTypes.leak,
]
HA_SENSOR_TYPES = {
EntityTypes.entry: DEVICE_CLASS_DOOR,
EntityTypes.carbon_monoxide: DEVICE_CLASS_GAS,
EntityTypes.smoke: DEVICE_CLASS_SMOKE,
EntityTypes.leak: DEVICE_CLASS_MOISTURE,
}
SENSOR_MODELS = {
EntityTypes.entry: "Entry Sensor",
EntityTypes.carbon_monoxide: "Carbon Monoxide Detector",
EntityTypes.smoke: "Smoke Detector",
EntityTypes.leak: "Water Sensor",
}
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up SimpliSafe binary sensors based on a config entry."""
simplisafe = hass.data[DOMAIN][DATA_CLIENT][entry.entry_id]
sensors = []
for system in simplisafe.systems.values():
if system.version == 2:
LOGGER.info("Skipping sensor setup for V2 system: %s", system.system_id)
continue
for sensor in system.sensors.values():
if sensor.type in SUPPORTED_SENSOR_TYPES:
sensors.append(SimpliSafeBinarySensor(simplisafe, system, sensor))
if sensor.type in SUPPORTED_BATTERY_SENSOR_TYPES:
sensors.append(SimpliSafeSensorBattery(simplisafe, system, sensor))
async_add_entities(sensors)
class SimpliSafeBinarySensor(SimpliSafeEntity, BinarySensorEntity):
"""Define a SimpliSafe binary sensor entity."""
def __init__(self, simplisafe, system, sensor):
"""Initialize."""
super().__init__(simplisafe, system, sensor.name, serial=sensor.serial)
self._system = system
self._sensor = sensor
self._is_on = False
@property
def device_class(self):
"""Return type of sensor."""
return HA_SENSOR_TYPES[self._sensor.type]
@property
def device_info(self):
"""Return device registry information for this entity."""
info = super().device_info
info["identifiers"] = {(DOMAIN, self._sensor.serial)}
info["model"] = SENSOR_MODELS[self._sensor.type]
info["name"] = self._sensor.name
return info
@property
def is_on(self):
"""Return true if the sensor is on."""
return self._is_on
@callback
def async_update_from_rest_api(self):
"""Update the entity with the provided REST API data."""
self._is_on = self._sensor.triggered
class SimpliSafeSensorBattery(SimpliSafeEntity, BinarySensorEntity):
"""Define a SimpliSafe battery binary sensor entity."""
def __init__(self, simplisafe, system, sensor):
"""Initialize."""
super().__init__(simplisafe, system, sensor.name, serial=sensor.serial)
self._sensor = sensor
self._is_low = False
self._device_info["identifiers"] = {(DOMAIN, sensor.serial)}
self._device_info["model"] = SENSOR_MODELS[sensor.type]
self._device_info["name"] = sensor.name
@property
def device_class(self):
"""Return type of sensor."""
return DEVICE_CLASS_BATTERY
@property
def unique_id(self):
"""Return unique ID of sensor."""
return f"{self._sensor.serial}-battery"
@property
def is_on(self):
"""Return true if the battery is low."""
return self._is_low
@callback
def async_update_from_rest_api(self):
"""Update the entity with the provided REST API data."""
self._is_low = self._sensor.low_battery
|
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_script import Manager
from flask_migrate import Migrate, MigrateCommand
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///app.db'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db = SQLAlchemy(app)
migrate = Migrate(app, db, compare_type=True)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(10))
if __name__ == '__main__':
manager.run()
|
from mock import Mock
import arctic.fixtures.arctic as fix
from arctic.arctic import Arctic
def test_overlay_library_name(overlay_library_name):
assert(overlay_library_name == 'test.OVERLAY')
def test_overlay_library():
a = Mock(Arctic, autospec=True)
fix._overlay_library(a, 'test')
a.initialize_library.assert_called_with("test_RAW", "VersionStore", segment='year')
def test_tickstore_lib():
a = Mock(Arctic, autospec=True)
fix._tickstore_lib(a, "test")
a.initialize_library.assert_called_with('test', 'TickStoreV3')
a.get_library.assert_called_with('test')
|
import unittest
import numpy as np
from chainer import testing
from chainer.testing import attr
from chainercv.datasets import cub_label_names
from chainercv.datasets import CUBLabelDataset
from chainercv.utils import assert_is_bbox
from chainercv.utils import assert_is_label_dataset
@testing.parameterize(*testing.product({
'return_bbox': [True, False],
'return_prob_map': [True, False]
}))
class TestCUBLabelDataset(unittest.TestCase):
def setUp(self):
self.dataset = CUBLabelDataset(
return_bbox=self.return_bbox, return_prob_map=self.return_prob_map)
@attr.slow
def test_cub_label_dataset(self):
assert_is_label_dataset(
self.dataset, len(cub_label_names), n_example=10)
idx = np.random.choice(np.arange(10))
if self.return_bbox:
bbox = self.dataset[idx][2]
assert_is_bbox(bbox)
if self.return_prob_map:
img = self.dataset[idx][0]
prob_map = self.dataset[idx][-1]
self.assertEqual(prob_map.dtype, np.float32)
self.assertEqual(prob_map.shape, img.shape[1:])
self.assertTrue(np.min(prob_map) >= 0)
self.assertTrue(np.max(prob_map) <= 1)
testing.run_module(__name__, __file__)
|
from homeassistant.components import stt
from homeassistant.const import HTTP_NOT_FOUND
from homeassistant.setup import async_setup_component
async def test_setup_comp(hass):
"""Set up demo component."""
assert await async_setup_component(hass, stt.DOMAIN, {"stt": {}})
async def test_demo_settings_not_exists(hass, hass_client):
"""Test retrieve settings from demo provider."""
assert await async_setup_component(hass, stt.DOMAIN, {"stt": {}})
client = await hass_client()
response = await client.get("/api/stt/beer")
assert response.status == HTTP_NOT_FOUND
async def test_demo_speech_not_exists(hass, hass_client):
"""Test retrieve settings from demo provider."""
assert await async_setup_component(hass, stt.DOMAIN, {"stt": {}})
client = await hass_client()
response = await client.post("/api/stt/beer", data=b"test")
assert response.status == HTTP_NOT_FOUND
|
from abc import ABC, abstractmethod
import logging
import os
import time
from typing import Optional, cast
import voluptuous as vol
from homeassistant.components.frontend import DATA_PANELS
from homeassistant.const import CONF_FILENAME
from homeassistant.core import callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import collection, storage
from homeassistant.util.yaml import load_yaml
from .const import (
CONF_ICON,
CONF_URL_PATH,
DOMAIN,
EVENT_LOVELACE_UPDATED,
LOVELACE_CONFIG_FILE,
MODE_STORAGE,
MODE_YAML,
STORAGE_DASHBOARD_CREATE_FIELDS,
STORAGE_DASHBOARD_UPDATE_FIELDS,
ConfigNotFound,
)
CONFIG_STORAGE_KEY_DEFAULT = DOMAIN
CONFIG_STORAGE_KEY = "lovelace.{}"
CONFIG_STORAGE_VERSION = 1
DASHBOARDS_STORAGE_KEY = f"{DOMAIN}_dashboards"
DASHBOARDS_STORAGE_VERSION = 1
_LOGGER = logging.getLogger(__name__)
class LovelaceConfig(ABC):
"""Base class for Lovelace config."""
def __init__(self, hass, url_path, config):
"""Initialize Lovelace config."""
self.hass = hass
if config:
self.config = {**config, CONF_URL_PATH: url_path}
else:
self.config = None
@property
def url_path(self) -> str:
"""Return url path."""
return self.config[CONF_URL_PATH] if self.config else None
@property
@abstractmethod
def mode(self) -> str:
"""Return mode of the lovelace config."""
@abstractmethod
async def async_get_info(self):
"""Return the config info."""
@abstractmethod
async def async_load(self, force):
"""Load config."""
async def async_save(self, config):
"""Save config."""
raise HomeAssistantError("Not supported")
async def async_delete(self):
"""Delete config."""
raise HomeAssistantError("Not supported")
@callback
def _config_updated(self):
"""Fire config updated event."""
self.hass.bus.async_fire(EVENT_LOVELACE_UPDATED, {"url_path": self.url_path})
class LovelaceStorage(LovelaceConfig):
"""Class to handle Storage based Lovelace config."""
def __init__(self, hass, config):
"""Initialize Lovelace config based on storage helper."""
if config is None:
url_path = None
storage_key = CONFIG_STORAGE_KEY_DEFAULT
else:
url_path = config[CONF_URL_PATH]
storage_key = CONFIG_STORAGE_KEY.format(config["id"])
super().__init__(hass, url_path, config)
self._store = storage.Store(hass, CONFIG_STORAGE_VERSION, storage_key)
self._data = None
@property
def mode(self) -> str:
"""Return mode of the lovelace config."""
return MODE_STORAGE
async def async_get_info(self):
"""Return the Lovelace storage info."""
if self._data is None:
await self._load()
if self._data["config"] is None:
return {"mode": "auto-gen"}
return _config_info(self.mode, self._data["config"])
async def async_load(self, force):
"""Load config."""
if self.hass.config.safe_mode:
raise ConfigNotFound
if self._data is None:
await self._load()
config = self._data["config"]
if config is None:
raise ConfigNotFound
return config
async def async_save(self, config):
"""Save config."""
if self.hass.config.safe_mode:
raise HomeAssistantError("Saving not supported in safe mode")
if self._data is None:
await self._load()
self._data["config"] = config
self._config_updated()
await self._store.async_save(self._data)
async def async_delete(self):
"""Delete config."""
if self.hass.config.safe_mode:
raise HomeAssistantError("Deleting not supported in safe mode")
await self._store.async_remove()
self._data = None
self._config_updated()
async def _load(self):
"""Load the config."""
data = await self._store.async_load()
self._data = data if data else {"config": None}
class LovelaceYAML(LovelaceConfig):
"""Class to handle YAML-based Lovelace config."""
def __init__(self, hass, url_path, config):
"""Initialize the YAML config."""
super().__init__(hass, url_path, config)
self.path = hass.config.path(
config[CONF_FILENAME] if config else LOVELACE_CONFIG_FILE
)
self._cache = None
@property
def mode(self) -> str:
"""Return mode of the lovelace config."""
return MODE_YAML
async def async_get_info(self):
"""Return the YAML storage mode."""
try:
config = await self.async_load(False)
except ConfigNotFound:
return {
"mode": self.mode,
"error": f"{self.path} not found",
}
return _config_info(self.mode, config)
async def async_load(self, force):
"""Load config."""
is_updated, config = await self.hass.async_add_executor_job(
self._load_config, force
)
if is_updated:
self._config_updated()
return config
def _load_config(self, force):
"""Load the actual config."""
# Check for a cached version of the config
if not force and self._cache is not None:
config, last_update = self._cache
modtime = os.path.getmtime(self.path)
if config and last_update > modtime:
return False, config
is_updated = self._cache is not None
try:
config = load_yaml(self.path)
except FileNotFoundError:
raise ConfigNotFound from None
self._cache = (config, time.time())
return is_updated, config
def _config_info(mode, config):
"""Generate info about the config."""
return {
"mode": mode,
"views": len(config.get("views", [])),
}
class DashboardsCollection(collection.StorageCollection):
"""Collection of dashboards."""
CREATE_SCHEMA = vol.Schema(STORAGE_DASHBOARD_CREATE_FIELDS)
UPDATE_SCHEMA = vol.Schema(STORAGE_DASHBOARD_UPDATE_FIELDS)
def __init__(self, hass):
"""Initialize the dashboards collection."""
super().__init__(
storage.Store(hass, DASHBOARDS_STORAGE_VERSION, DASHBOARDS_STORAGE_KEY),
_LOGGER,
)
async def _async_load_data(self) -> Optional[dict]:
"""Load the data."""
data = await self.store.async_load()
if data is None:
return cast(Optional[dict], data)
updated = False
for item in data["items"] or []:
if "-" not in item[CONF_URL_PATH]:
updated = True
item[CONF_URL_PATH] = f"lovelace-{item[CONF_URL_PATH]}"
if updated:
await self.store.async_save(data)
return cast(Optional[dict], data)
async def _process_create_data(self, data: dict) -> dict:
"""Validate the config is valid."""
if "-" not in data[CONF_URL_PATH]:
raise vol.Invalid("Url path needs to contain a hyphen (-)")
if data[CONF_URL_PATH] in self.hass.data[DATA_PANELS]:
raise vol.Invalid("Panel url path needs to be unique")
return self.CREATE_SCHEMA(data)
@callback
def _get_suggested_id(self, info: dict) -> str:
"""Suggest an ID based on the config."""
return info[CONF_URL_PATH]
async def _update_data(self, data: dict, update_data: dict) -> dict:
"""Return a new updated data object."""
update_data = self.UPDATE_SCHEMA(update_data)
updated = {**data, **update_data}
if CONF_ICON in updated and updated[CONF_ICON] is None:
updated.pop(CONF_ICON)
return updated
|
import pytest
from homeassistant.components.met.const import DOMAIN, HOME_LOCATION_NAME
from homeassistant.const import CONF_ELEVATION, CONF_LATITUDE, CONF_LONGITUDE
from tests.async_mock import patch
from tests.common import MockConfigEntry
@pytest.fixture(name="met_setup", autouse=True)
def met_setup_fixture():
"""Patch met setup entry."""
with patch("homeassistant.components.met.async_setup_entry", return_value=True):
yield
async def test_show_config_form(hass):
"""Test show configuration form."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
async def test_flow_with_home_location(hass):
"""Test config flow.
Test the flow when a default location is configured.
Then it should return a form with default values.
"""
hass.config.latitude = 1
hass.config.longitude = 2
hass.config.elevation = 3
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
default_data = result["data_schema"]({})
assert default_data["name"] == HOME_LOCATION_NAME
assert default_data["latitude"] == 1
assert default_data["longitude"] == 2
assert default_data["elevation"] == 3
async def test_create_entry(hass):
"""Test create entry from user input."""
test_data = {
"name": "home",
CONF_LONGITUDE: 0,
CONF_LATITUDE: 0,
CONF_ELEVATION: 0,
}
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=test_data
)
assert result["type"] == "create_entry"
assert result["title"] == "home"
assert result["data"] == test_data
async def test_flow_entry_already_exists(hass):
"""Test user input for config_entry that already exists.
Test when the form should show when user puts existing location
in the config gui. Then the form should show with error.
"""
first_entry = MockConfigEntry(
domain="met",
data={"name": "home", CONF_LATITUDE: 0, CONF_LONGITUDE: 0, CONF_ELEVATION: 0},
)
first_entry.add_to_hass(hass)
test_data = {
"name": "home",
CONF_LONGITUDE: 0,
CONF_LATITUDE: 0,
CONF_ELEVATION: 0,
}
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=test_data
)
assert result["type"] == "form"
assert result["errors"]["name"] == "already_configured"
async def test_onboarding_step(hass):
"""Test initializing via onboarding step."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "onboarding"}, data={}
)
assert result["type"] == "create_entry"
assert result["title"] == HOME_LOCATION_NAME
assert result["data"] == {"track_home": True}
async def test_import_step(hass):
"""Test initializing via import step."""
test_data = {
"name": "home",
CONF_LONGITUDE: None,
CONF_LATITUDE: None,
CONF_ELEVATION: 0,
"track_home": True,
}
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "import"}, data=test_data
)
assert result["type"] == "create_entry"
assert result["title"] == "home"
assert result["data"] == test_data
|
import logging
from sendgrid import SendGridAPIClient
import voluptuous as vol
from homeassistant.components.notify import (
ATTR_TITLE,
ATTR_TITLE_DEFAULT,
PLATFORM_SCHEMA,
BaseNotificationService,
)
from homeassistant.const import (
CONF_API_KEY,
CONF_RECIPIENT,
CONF_SENDER,
CONTENT_TYPE_TEXT_PLAIN,
HTTP_ACCEPTED,
)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
CONF_SENDER_NAME = "sender_name"
DEFAULT_SENDER_NAME = "Home Assistant"
# pylint: disable=no-value-for-parameter
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_SENDER): vol.Email(),
vol.Required(CONF_RECIPIENT): vol.Email(),
vol.Optional(CONF_SENDER_NAME, default=DEFAULT_SENDER_NAME): cv.string,
}
)
def get_service(hass, config, discovery_info=None):
"""Get the SendGrid notification service."""
return SendgridNotificationService(config)
class SendgridNotificationService(BaseNotificationService):
"""Implementation the notification service for email via Sendgrid."""
def __init__(self, config):
"""Initialize the service."""
self.api_key = config[CONF_API_KEY]
self.sender = config[CONF_SENDER]
self.sender_name = config[CONF_SENDER_NAME]
self.recipient = config[CONF_RECIPIENT]
self._sg = SendGridAPIClient(self.api_key)
def send_message(self, message="", **kwargs):
"""Send an email to a user via SendGrid."""
subject = kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT)
data = {
"personalizations": [
{"to": [{"email": self.recipient}], "subject": subject}
],
"from": {"email": self.sender, "name": self.sender_name},
"content": [{"type": CONTENT_TYPE_TEXT_PLAIN, "value": message}],
}
response = self._sg.client.mail.send.post(request_body=data)
if response.status_code != HTTP_ACCEPTED:
_LOGGER.error("Unable to send notification")
|
from __future__ import print_function
import argparse
import os
import shutil
import sys
def main(args):
p = argparse.ArgumentParser(description=__doc__)
p.add_argument("src", action="store", nargs="+", help="one or more source files or folders")
p.add_argument("dest", action="store", help="the destination name or folder")
ns = p.parse_args(args)
status = 0
if len(ns.src) > 1:
# Multiple source files
if os.path.exists(ns.dest):
# Destination must exist...
if os.path.isdir(ns.dest):
# ...and be a directory
for src in ns.src:
try:
# Attempt to move every source into destination
shutil.move(src, os.path.join(ns.dest, os.path.basename(src)))
except Exception as err:
print("mv: {}: {!s}".format(type(err).__name__, err), file=sys.stderr)
status = 1
else:
print("mv: {}: not a directory".format(ns.dest), file=sys.stderr)
else:
print("mv: {}: no such file or directory".format(ns.dest), file=sys.stderr)
status = 1
else:
# Single source file
src = ns.src[0]
if os.path.exists(src):
# Source must exist
if os.path.exists(ns.dest):
# If destination exists...
if os.path.isdir(ns.dest):
# ...it must be a folder
try:
# Attempt to move source into destination
shutil.move(src, os.path.join(ns.dest, os.path.basename(src)))
except Exception as err:
print("mv: {}: {!s}".format(type(err).__name__, err), file=sys.stderr)
status = 1
else:
# Won't overwrite unasked
print("mv: {}: file exists".format(ns.dest), file=sys.stderr)
else:
# Destination doesn't exist
try:
# Try to rename source to destination
shutil.move(src, ns.dest)
except Exception as err:
print("mv: {}: {!s}".format(type(err).__name__, err), file=sys.stderr)
status = 1
else:
print("mv: {}: no such file or directory".format(src), file=sys.stderr)
status = 1
sys.exit(status)
if __name__ == "__main__":
main(sys.argv[1:])
|
from typing import Any, Callable, Collection, Dict, Optional, Union
from homeassistant import core, setup
from homeassistant.const import ATTR_DISCOVERED, ATTR_SERVICE, EVENT_PLATFORM_DISCOVERED
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from homeassistant.loader import bind_hass
from homeassistant.util.async_ import run_callback_threadsafe
EVENT_LOAD_PLATFORM = "load_platform.{}"
ATTR_PLATFORM = "platform"
@bind_hass
def listen(
hass: core.HomeAssistant, service: Union[str, Collection[str]], callback: Callable
) -> None:
"""Set up listener for discovery of specific service.
Service can be a string or a list/tuple.
"""
run_callback_threadsafe(hass.loop, async_listen, hass, service, callback).result()
@core.callback
@bind_hass
def async_listen(
hass: core.HomeAssistant, service: Union[str, Collection[str]], callback: Callable
) -> None:
"""Set up listener for discovery of specific service.
Service can be a string or a list/tuple.
"""
if isinstance(service, str):
service = (service,)
else:
service = tuple(service)
job = core.HassJob(callback)
@core.callback
def discovery_event_listener(event: core.Event) -> None:
"""Listen for discovery events."""
if ATTR_SERVICE in event.data and event.data[ATTR_SERVICE] in service:
hass.async_add_hass_job(
job, event.data[ATTR_SERVICE], event.data.get(ATTR_DISCOVERED)
)
hass.bus.async_listen(EVENT_PLATFORM_DISCOVERED, discovery_event_listener)
@bind_hass
def discover(
hass: core.HomeAssistant,
service: str,
discovered: DiscoveryInfoType,
component: str,
hass_config: ConfigType,
) -> None:
"""Fire discovery event. Can ensure a component is loaded."""
hass.add_job(
async_discover( # type: ignore
hass, service, discovered, component, hass_config
)
)
@bind_hass
async def async_discover(
hass: core.HomeAssistant,
service: str,
discovered: Optional[DiscoveryInfoType],
component: Optional[str],
hass_config: ConfigType,
) -> None:
"""Fire discovery event. Can ensure a component is loaded."""
if component is not None and component not in hass.config.components:
await setup.async_setup_component(hass, component, hass_config)
data: Dict[str, Any] = {ATTR_SERVICE: service}
if discovered is not None:
data[ATTR_DISCOVERED] = discovered
hass.bus.async_fire(EVENT_PLATFORM_DISCOVERED, data)
@bind_hass
def listen_platform(
hass: core.HomeAssistant, component: str, callback: Callable
) -> None:
"""Register a platform loader listener."""
run_callback_threadsafe(
hass.loop, async_listen_platform, hass, component, callback
).result()
@bind_hass
def async_listen_platform(
hass: core.HomeAssistant,
component: str,
callback: Callable[[str, Optional[Dict[str, Any]]], Any],
) -> None:
"""Register a platform loader listener.
This method must be run in the event loop.
"""
service = EVENT_LOAD_PLATFORM.format(component)
job = core.HassJob(callback)
@core.callback
def discovery_platform_listener(event: core.Event) -> None:
"""Listen for platform discovery events."""
if event.data.get(ATTR_SERVICE) != service:
return
platform = event.data.get(ATTR_PLATFORM)
if not platform:
return
hass.async_run_hass_job(job, platform, event.data.get(ATTR_DISCOVERED))
hass.bus.async_listen(EVENT_PLATFORM_DISCOVERED, discovery_platform_listener)
@bind_hass
def load_platform(
hass: core.HomeAssistant,
component: str,
platform: str,
discovered: DiscoveryInfoType,
hass_config: ConfigType,
) -> None:
"""Load a component and platform dynamically.
Target components will be loaded and an EVENT_PLATFORM_DISCOVERED will be
fired to load the platform. The event will contain:
{ ATTR_SERVICE = EVENT_LOAD_PLATFORM + '.' + <<component>>
ATTR_PLATFORM = <<platform>>
ATTR_DISCOVERED = <<discovery info>> }
Use `listen_platform` to register a callback for these events.
"""
hass.add_job(
async_load_platform( # type: ignore
hass, component, platform, discovered, hass_config
)
)
@bind_hass
async def async_load_platform(
hass: core.HomeAssistant,
component: str,
platform: str,
discovered: DiscoveryInfoType,
hass_config: ConfigType,
) -> None:
"""Load a component and platform dynamically.
Target components will be loaded and an EVENT_PLATFORM_DISCOVERED will be
fired to load the platform. The event will contain:
{ ATTR_SERVICE = EVENT_LOAD_PLATFORM + '.' + <<component>>
ATTR_PLATFORM = <<platform>>
ATTR_DISCOVERED = <<discovery info>> }
Use `listen_platform` to register a callback for these events.
Warning: Do not await this inside a setup method to avoid a dead lock.
Use `hass.async_create_task(async_load_platform(..))` instead.
This method is a coroutine.
"""
assert hass_config, "You need to pass in the real hass config"
setup_success = True
if component not in hass.config.components:
setup_success = await setup.async_setup_component(hass, component, hass_config)
# No need to fire event if we could not set up component
if not setup_success:
return
data: Dict[str, Any] = {
ATTR_SERVICE: EVENT_LOAD_PLATFORM.format(component),
ATTR_PLATFORM: platform,
}
if discovered is not None:
data[ATTR_DISCOVERED] = discovered
hass.bus.async_fire(EVENT_PLATFORM_DISCOVERED, data)
|
from homeassistant.util import aiohttp
async def test_request_json():
"""Test a JSON request."""
request = aiohttp.MockRequest(b'{"hello": 2}', mock_source="test")
assert request.status == 200
assert await request.json() == {"hello": 2}
async def test_request_text():
"""Test a JSON request."""
request = aiohttp.MockRequest(b"hello", status=201, mock_source="test")
assert request.status == 201
assert await request.text() == "hello"
async def test_request_post_query():
"""Test a JSON request."""
request = aiohttp.MockRequest(
b"hello=2&post=true", query_string="get=true", method="POST", mock_source="test"
)
assert request.method == "POST"
assert await request.post() == {"hello": "2", "post": "true"}
assert request.query == {"get": "true"}
|
from typing import Any, Dict, List
import voluptuous as vol
from homeassistant.components.automation import AutomationActionType
from homeassistant.components.device_automation.const import (
CONF_IS_OFF,
CONF_IS_ON,
CONF_TOGGLE,
CONF_TURN_OFF,
CONF_TURN_ON,
CONF_TURNED_OFF,
CONF_TURNED_ON,
)
from homeassistant.components.homeassistant.triggers import state as state_trigger
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_CONDITION,
CONF_ENTITY_ID,
CONF_FOR,
CONF_PLATFORM,
CONF_TYPE,
)
from homeassistant.core import CALLBACK_TYPE, Context, HomeAssistant, callback
from homeassistant.helpers import condition, config_validation as cv
from homeassistant.helpers.entity_registry import async_entries_for_device
from homeassistant.helpers.typing import ConfigType, TemplateVarsType
from . import TRIGGER_BASE_SCHEMA
# mypy: allow-untyped-calls, allow-untyped-defs
ENTITY_ACTIONS = [
{
# Turn entity off
CONF_TYPE: CONF_TURN_OFF
},
{
# Turn entity on
CONF_TYPE: CONF_TURN_ON
},
{
# Toggle entity
CONF_TYPE: CONF_TOGGLE
},
]
ENTITY_CONDITIONS = [
{
# True when entity is turned off
CONF_CONDITION: "device",
CONF_TYPE: CONF_IS_OFF,
},
{
# True when entity is turned on
CONF_CONDITION: "device",
CONF_TYPE: CONF_IS_ON,
},
]
ENTITY_TRIGGERS = [
{
# Trigger when entity is turned off
CONF_PLATFORM: "device",
CONF_TYPE: CONF_TURNED_OFF,
},
{
# Trigger when entity is turned on
CONF_PLATFORM: "device",
CONF_TYPE: CONF_TURNED_ON,
},
]
DEVICE_ACTION_TYPES = [CONF_TOGGLE, CONF_TURN_OFF, CONF_TURN_ON]
ACTION_SCHEMA = cv.DEVICE_ACTION_BASE_SCHEMA.extend(
{
vol.Required(CONF_ENTITY_ID): cv.entity_id,
vol.Required(CONF_TYPE): vol.In(DEVICE_ACTION_TYPES),
}
)
CONDITION_SCHEMA = cv.DEVICE_CONDITION_BASE_SCHEMA.extend(
{
vol.Required(CONF_ENTITY_ID): cv.entity_id,
vol.Required(CONF_TYPE): vol.In([CONF_IS_OFF, CONF_IS_ON]),
vol.Optional(CONF_FOR): cv.positive_time_period_dict,
}
)
TRIGGER_SCHEMA = TRIGGER_BASE_SCHEMA.extend(
{
vol.Required(CONF_ENTITY_ID): cv.entity_id,
vol.Required(CONF_TYPE): vol.In([CONF_TURNED_OFF, CONF_TURNED_ON]),
vol.Optional(CONF_FOR): cv.positive_time_period_dict,
}
)
async def async_call_action_from_config(
hass: HomeAssistant,
config: ConfigType,
variables: TemplateVarsType,
context: Context,
domain: str,
) -> None:
"""Change state based on configuration."""
action_type = config[CONF_TYPE]
if action_type == CONF_TURN_ON:
action = "turn_on"
elif action_type == CONF_TURN_OFF:
action = "turn_off"
else:
action = "toggle"
service_data = {ATTR_ENTITY_ID: config[CONF_ENTITY_ID]}
await hass.services.async_call(
domain, action, service_data, blocking=True, context=context
)
@callback
def async_condition_from_config(config: ConfigType) -> condition.ConditionCheckerType:
"""Evaluate state based on configuration."""
condition_type = config[CONF_TYPE]
if condition_type == CONF_IS_ON:
stat = "on"
else:
stat = "off"
state_config = {
condition.CONF_CONDITION: "state",
condition.CONF_ENTITY_ID: config[CONF_ENTITY_ID],
condition.CONF_STATE: stat,
}
if CONF_FOR in config:
state_config[CONF_FOR] = config[CONF_FOR]
return condition.state_from_config(state_config)
async def async_attach_trigger(
hass: HomeAssistant,
config: ConfigType,
action: AutomationActionType,
automation_info: dict,
) -> CALLBACK_TYPE:
"""Listen for state changes based on configuration."""
trigger_type = config[CONF_TYPE]
if trigger_type == CONF_TURNED_ON:
from_state = "off"
to_state = "on"
else:
from_state = "on"
to_state = "off"
state_config = {
CONF_PLATFORM: "state",
state_trigger.CONF_ENTITY_ID: config[CONF_ENTITY_ID],
state_trigger.CONF_FROM: from_state,
state_trigger.CONF_TO: to_state,
}
if CONF_FOR in config:
state_config[CONF_FOR] = config[CONF_FOR]
state_config = state_trigger.TRIGGER_SCHEMA(state_config)
return await state_trigger.async_attach_trigger(
hass, state_config, action, automation_info, platform_type="device"
)
async def _async_get_automations(
hass: HomeAssistant, device_id: str, automation_templates: List[dict], domain: str
) -> List[dict]:
"""List device automations."""
automations: List[Dict[str, Any]] = []
entity_registry = await hass.helpers.entity_registry.async_get_registry()
entries = [
entry
for entry in async_entries_for_device(entity_registry, device_id)
if entry.domain == domain
]
for entry in entries:
automations.extend(
{
**template,
"device_id": device_id,
"entity_id": entry.entity_id,
"domain": domain,
}
for template in automation_templates
)
return automations
async def async_get_actions(
hass: HomeAssistant, device_id: str, domain: str
) -> List[dict]:
"""List device actions."""
return await _async_get_automations(hass, device_id, ENTITY_ACTIONS, domain)
async def async_get_conditions(
hass: HomeAssistant, device_id: str, domain: str
) -> List[Dict[str, str]]:
"""List device conditions."""
return await _async_get_automations(hass, device_id, ENTITY_CONDITIONS, domain)
async def async_get_triggers(
hass: HomeAssistant, device_id: str, domain: str
) -> List[dict]:
"""List device triggers."""
return await _async_get_automations(hass, device_id, ENTITY_TRIGGERS, domain)
async def async_get_condition_capabilities(hass: HomeAssistant, config: dict) -> dict:
"""List condition capabilities."""
return {
"extra_fields": vol.Schema(
{vol.Optional(CONF_FOR): cv.positive_time_period_dict}
)
}
async def async_get_trigger_capabilities(hass: HomeAssistant, config: dict) -> dict:
"""List trigger capabilities."""
return {
"extra_fields": vol.Schema(
{vol.Optional(CONF_FOR): cv.positive_time_period_dict}
)
}
|
import urllib.parse
import string
import re
import attr
import pypeg2 as peg
from qutebrowser.utils import utils
class UniqueNamespace(peg.Namespace):
"""A pyPEG2 namespace which prevents setting a value twice."""
def __setitem__(self, key, value):
if key in self:
raise DuplicateParamError(key)
super().__setitem__(key, value)
# RFC 2616
ctl_chars = ''.join(chr(i) for i in range(32)) + chr(127)
# RFC 5987
attr_chars_nonalnum = '!#$&+-.^_`|~'
attr_chars = string.ascii_letters + string.digits + attr_chars_nonalnum
# RFC 5987 gives this alternative construction of the token character class
token_chars = attr_chars + "*'%" # flake8: disable=S001
# Definitions from https://tools.ietf.org/html/rfc2616#section-2.2
# token was redefined from attr_chars to avoid using AnyBut,
# which might include non-ascii octets.
token_re = '[{}]+'.format(re.escape(token_chars))
class Token(str):
"""A token (RFC 2616, Section 2.2)."""
grammar = re.compile(token_re)
# RFC 2616 says some linear whitespace (LWS) is in fact allowed in text
# and qdtext; however it also mentions folding that whitespace into
# a single SP (which isn't in CTL) before interpretation.
# Assume the caller already that folding when parsing headers.
# NOTE: qdtext also allows non-ascii, which we choose to parse
# as ISO-8859-1; rejecting it entirely would also be permitted.
# Some broken browsers attempt encoding-sniffing, which is broken
# because the spec only allows iso, and because encoding-sniffing
# can mangle valid values.
# Everything else in this grammar (including RFC 5987 ext values)
# is in an ascii-safe encoding.
qdtext_re = r'[^"{}]'.format(re.escape(ctl_chars))
quoted_pair_re = r'\\[{}]'.format(re.escape(
''.join(chr(i) for i in range(128))))
class QuotedString(str):
"""A quoted string (RFC 2616, Section 2.2)."""
grammar = re.compile(r'"({}|{})+"'.format(quoted_pair_re, qdtext_re))
def __str__(self):
s = super().__str__()
s = s[1:-1] # remove quotes
s = re.sub(r'\\(.)', r'\1', s) # drop backslashes
return s
class Value(str):
"""A value. (RFC 2616, Section 3.6)."""
grammar = [re.compile(token_re), QuotedString]
class Charset(str):
"""A charset (RFC5987, Section 3.2.1)."""
# Other charsets are forbidden, the spec reserves them
# for future evolutions.
grammar = re.compile('UTF-8|ISO-8859-1', re.I)
class Language(str):
"""A language-tag (RFC 5646, Section 2.1).
FIXME: This grammar is not 100% correct yet.
https://github.com/qutebrowser/qutebrowser/issues/105
"""
grammar = re.compile('[A-Za-z0-9-]+')
attr_char_re = '[{}]'.format(re.escape(attr_chars))
hex_digit_re = '%[' + string.hexdigits + ']{2}'
class ValueChars(str):
"""A value of an attribute.
FIXME: Can we merge this with Value?
https://github.com/qutebrowser/qutebrowser/issues/105
"""
grammar = re.compile('({}|{})*'.format(attr_char_re, hex_digit_re))
class ExtValue(peg.List):
"""An ext-value of an attribute (RFC 5987, Section 3.2)."""
grammar = peg.contiguous(Charset, "'", peg.optional(Language), "'",
ValueChars)
class ExtToken(peg.Symbol):
"""A token introducing an extended value (RFC 6266, Section 4.1)."""
regex = re.compile(token_re + r'\*')
def __str__(self):
return super().__str__().lower()
class NoExtToken(peg.Symbol):
"""A token introducing a normal value (RFC 6266, Section 4.1)."""
regex = re.compile(token_re + r'(?<!\*)')
def __str__(self):
return super().__str__().lower()
class DispositionParm(str):
"""A parameter for the Disposition-Type header (RFC6266, Section 4.1)."""
grammar = peg.attr('name', NoExtToken), '=', Value
class ExtDispositionParm:
"""An extended parameter (RFC6266, Section 4.1)."""
grammar = peg.attr('name', ExtToken), '=', ExtValue
def __init__(self, value, name=None):
self.name = name
self.value = value
class DispositionType(peg.List):
"""The disposition type (RFC6266, Section 4.1)."""
grammar = [re.compile('(inline|attachment)', re.I), Token]
class DispositionParmList(UniqueNamespace):
"""A list of disposition parameters (RFC6266, Section 4.1)."""
grammar = peg.maybe_some(';', [ExtDispositionParm, DispositionParm])
class ContentDispositionValue:
"""A complete Content-Disposition value (RFC 6266, Section 4.1)."""
# Allows nonconformant final semicolon
# I've seen it in the wild, and browsers accept it
# http://greenbytes.de/tech/tc2231/#attwithasciifilenamenqs
grammar = (peg.attr('dtype', DispositionType),
peg.attr('params', DispositionParmList),
peg.optional(';'))
@attr.s
class LangTagged:
"""A string with an associated language."""
string = attr.ib()
langtag = attr.ib()
class Error(Exception):
"""Base class for RFC6266 errors."""
class DuplicateParamError(Error):
"""Exception raised when a parameter has been given twice."""
class InvalidISO8859Error(Error):
"""Exception raised when a byte is invalid in ISO-8859-1."""
class _ContentDisposition:
"""Records various indications and hints about content disposition.
These can be used to know if a file should be downloaded or
displayed directly, and to hint what filename it should have
in the download case.
"""
def __init__(self, disposition, assocs):
"""Used internally after parsing the header."""
assert len(disposition) == 1
self.disposition = disposition[0]
self.assocs = dict(assocs) # So we can change values
if 'filename*' in self.assocs:
param = self.assocs['filename*']
assert isinstance(param, ExtDispositionParm)
self.assocs['filename*'] = parse_ext_value(param.value).string
def filename(self):
"""The filename from the Content-Disposition header or None.
On safety:
This property records the intent of the sender.
You shouldn't use this sender-controlled value as a filesystem path, it
can be insecure. Serving files with this filename can be dangerous as
well, due to a certain browser using the part after the dot for
mime-sniffing. Saving it to a database is fine by itself though.
"""
if 'filename*' in self.assocs:
return self.assocs['filename*']
elif 'filename' in self.assocs:
# XXX Reject non-ascii (parsed via qdtext) here?
return self.assocs['filename']
return None
def is_inline(self):
"""Return if the file should be handled inline.
If not, and unless your application supports other dispositions
than the standard inline and attachment, it should be handled
as an attachment.
"""
return self.disposition.lower() == 'inline'
def __repr__(self):
return utils.get_repr(self, constructor=True,
disposition=self.disposition, assocs=self.assocs)
def normalize_ws(text):
"""Do LWS (linear whitespace) folding."""
return ' '.join(text.split())
def parse_headers(content_disposition):
"""Build a _ContentDisposition from header values."""
# We allow non-ascii here (it will only be parsed inside of qdtext, and
# rejected by the grammar if it appears in other places), although parsing
# it can be ambiguous. Parsing it ensures that a non-ambiguous filename*
# value won't get dismissed because of an unrelated ambiguity in the
# filename parameter. But it does mean we occasionally give
# less-than-certain values for some legacy senders.
content_disposition = content_disposition.decode('iso-8859-1')
# Our parsing is relaxed in these regards:
# - The grammar allows a final ';' in the header;
# - We do LWS-folding, and possibly normalise other broken
# whitespace, instead of rejecting non-lws-safe text.
# XXX Would prefer to accept only the quoted whitespace
# case, rather than normalising everything.
content_disposition = normalize_ws(content_disposition)
parsed = peg.parse(content_disposition, ContentDispositionValue)
return _ContentDisposition(disposition=parsed.dtype, assocs=parsed.params)
def parse_ext_value(val):
"""Parse the value of an extended attribute."""
if len(val) == 3:
charset, langtag, coded = val
else:
charset, coded = val
langtag = None
decoded = urllib.parse.unquote(coded, charset, errors='strict')
if charset == 'iso-8859-1':
# Fail if the filename contains an invalid ISO-8859-1 char
for c in decoded:
if 0x7F <= ord(c) <= 0x9F:
raise InvalidISO8859Error(c)
return LangTagged(decoded, langtag)
|
import asyncio
from datetime import timedelta
import logging
from typing import Dict
from pyheos import Heos, HeosError, const as heos_const
import voluptuous as vol
from homeassistant.components.media_player.const import DOMAIN as MEDIA_PLAYER_DOMAIN
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_HOST, EVENT_HOMEASSISTANT_STOP
from homeassistant.exceptions import ConfigEntryNotReady
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
from homeassistant.util import Throttle
from . import services
from .config_flow import format_title
from .const import (
COMMAND_RETRY_ATTEMPTS,
COMMAND_RETRY_DELAY,
DATA_CONTROLLER_MANAGER,
DATA_SOURCE_MANAGER,
DOMAIN,
SIGNAL_HEOS_UPDATED,
)
CONFIG_SCHEMA = vol.Schema(
{DOMAIN: vol.Schema({vol.Required(CONF_HOST): cv.string})}, extra=vol.ALLOW_EXTRA
)
MIN_UPDATE_SOURCES = timedelta(seconds=1)
_LOGGER = logging.getLogger(__name__)
async def async_setup(hass: HomeAssistantType, config: ConfigType):
"""Set up the HEOS component."""
if DOMAIN not in config:
return True
host = config[DOMAIN][CONF_HOST]
entries = hass.config_entries.async_entries(DOMAIN)
if not entries:
# Create new entry based on config
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": "import"}, data={CONF_HOST: host}
)
)
else:
# Check if host needs to be updated
entry = entries[0]
if entry.data[CONF_HOST] != host:
hass.config_entries.async_update_entry(
entry, title=format_title(host), data={**entry.data, CONF_HOST: host}
)
return True
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry):
"""Initialize config entry which represents the HEOS controller."""
# For backwards compat
if entry.unique_id is None:
hass.config_entries.async_update_entry(entry, unique_id=DOMAIN)
host = entry.data[CONF_HOST]
# Setting all_progress_events=False ensures that we only receive a
# media position update upon start of playback or when media changes
controller = Heos(host, all_progress_events=False)
try:
await controller.connect(auto_reconnect=True)
# Auto reconnect only operates if initial connection was successful.
except HeosError as error:
await controller.disconnect()
_LOGGER.debug("Unable to connect to controller %s: %s", host, error)
raise ConfigEntryNotReady from error
# Disconnect when shutting down
async def disconnect_controller(event):
await controller.disconnect()
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, disconnect_controller)
# Get players and sources
try:
players = await controller.get_players()
favorites = {}
if controller.is_signed_in:
favorites = await controller.get_favorites()
else:
_LOGGER.warning(
"%s is not logged in to a HEOS account and will be unable to retrieve "
"HEOS favorites: Use the 'heos.sign_in' service to sign-in to a HEOS account",
host,
)
inputs = await controller.get_input_sources()
except HeosError as error:
await controller.disconnect()
_LOGGER.debug("Unable to retrieve players and sources: %s", error)
raise ConfigEntryNotReady from error
controller_manager = ControllerManager(hass, controller)
await controller_manager.connect_listeners()
source_manager = SourceManager(favorites, inputs)
source_manager.connect_update(hass, controller)
hass.data[DOMAIN] = {
DATA_CONTROLLER_MANAGER: controller_manager,
DATA_SOURCE_MANAGER: source_manager,
MEDIA_PLAYER_DOMAIN: players,
}
services.register(hass, controller)
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, MEDIA_PLAYER_DOMAIN)
)
return True
async def async_unload_entry(hass: HomeAssistantType, entry: ConfigEntry):
"""Unload a config entry."""
controller_manager = hass.data[DOMAIN][DATA_CONTROLLER_MANAGER]
await controller_manager.disconnect()
hass.data.pop(DOMAIN)
services.remove(hass)
return await hass.config_entries.async_forward_entry_unload(
entry, MEDIA_PLAYER_DOMAIN
)
class ControllerManager:
"""Class that manages events of the controller."""
def __init__(self, hass, controller):
"""Init the controller manager."""
self._hass = hass
self._device_registry = None
self._entity_registry = None
self.controller = controller
self._signals = []
async def connect_listeners(self):
"""Subscribe to events of interest."""
self._device_registry, self._entity_registry = await asyncio.gather(
self._hass.helpers.device_registry.async_get_registry(),
self._hass.helpers.entity_registry.async_get_registry(),
)
# Handle controller events
self._signals.append(
self.controller.dispatcher.connect(
heos_const.SIGNAL_CONTROLLER_EVENT, self._controller_event
)
)
# Handle connection-related events
self._signals.append(
self.controller.dispatcher.connect(
heos_const.SIGNAL_HEOS_EVENT, self._heos_event
)
)
async def disconnect(self):
"""Disconnect subscriptions."""
for signal_remove in self._signals:
signal_remove()
self._signals.clear()
self.controller.dispatcher.disconnect_all()
await self.controller.disconnect()
async def _controller_event(self, event, data):
"""Handle controller event."""
if event == heos_const.EVENT_PLAYERS_CHANGED:
self.update_ids(data[heos_const.DATA_MAPPED_IDS])
# Update players
self._hass.helpers.dispatcher.async_dispatcher_send(SIGNAL_HEOS_UPDATED)
async def _heos_event(self, event):
"""Handle connection event."""
if event == heos_const.EVENT_CONNECTED:
try:
# Retrieve latest players and refresh status
data = await self.controller.load_players()
self.update_ids(data[heos_const.DATA_MAPPED_IDS])
except HeosError as ex:
_LOGGER.error("Unable to refresh players: %s", ex)
# Update players
self._hass.helpers.dispatcher.async_dispatcher_send(SIGNAL_HEOS_UPDATED)
def update_ids(self, mapped_ids: Dict[int, int]):
"""Update the IDs in the device and entity registry."""
# mapped_ids contains the mapped IDs (new:old)
for new_id, old_id in mapped_ids.items():
# update device registry
entry = self._device_registry.async_get_device({(DOMAIN, old_id)}, set())
new_identifiers = {(DOMAIN, new_id)}
if entry:
self._device_registry.async_update_device(
entry.id, new_identifiers=new_identifiers
)
_LOGGER.debug(
"Updated device %s identifiers to %s", entry.id, new_identifiers
)
# update entity registry
entity_id = self._entity_registry.async_get_entity_id(
MEDIA_PLAYER_DOMAIN, DOMAIN, str(old_id)
)
if entity_id:
self._entity_registry.async_update_entity(
entity_id, new_unique_id=str(new_id)
)
_LOGGER.debug("Updated entity %s unique id to %s", entity_id, new_id)
class SourceManager:
"""Class that manages sources for players."""
def __init__(
self,
favorites,
inputs,
*,
retry_delay: int = COMMAND_RETRY_DELAY,
max_retry_attempts: int = COMMAND_RETRY_ATTEMPTS,
):
"""Init input manager."""
self.retry_delay = retry_delay
self.max_retry_attempts = max_retry_attempts
self.favorites = favorites
self.inputs = inputs
self.source_list = self._build_source_list()
def _build_source_list(self):
"""Build a single list of inputs from various types."""
source_list = []
source_list.extend([favorite.name for favorite in self.favorites.values()])
source_list.extend([source.name for source in self.inputs])
return source_list
async def play_source(self, source: str, player):
"""Determine type of source and play it."""
index = next(
(
index
for index, favorite in self.favorites.items()
if favorite.name == source
),
None,
)
if index is not None:
await player.play_favorite(index)
return
input_source = next(
(
input_source
for input_source in self.inputs
if input_source.name == source
),
None,
)
if input_source is not None:
await player.play_input_source(input_source)
return
_LOGGER.error("Unknown source: %s", source)
def get_current_source(self, now_playing_media):
"""Determine current source from now playing media."""
# Match input by input_name:media_id
if now_playing_media.source_id == heos_const.MUSIC_SOURCE_AUX_INPUT:
return next(
(
input_source.name
for input_source in self.inputs
if input_source.input_name == now_playing_media.media_id
),
None,
)
# Try matching favorite by name:station or media_id:album_id
return next(
(
source.name
for source in self.favorites.values()
if source.name == now_playing_media.station
or source.media_id == now_playing_media.album_id
),
None,
)
def connect_update(self, hass, controller):
"""
Connect listener for when sources change and signal player update.
EVENT_SOURCES_CHANGED is often raised multiple times in response to a
physical event therefore throttle it. Retrieving sources immediately
after the event may fail so retry.
"""
@Throttle(MIN_UPDATE_SOURCES)
async def get_sources():
retry_attempts = 0
while True:
try:
favorites = {}
if controller.is_signed_in:
favorites = await controller.get_favorites()
inputs = await controller.get_input_sources()
return favorites, inputs
except HeosError as error:
if retry_attempts < self.max_retry_attempts:
retry_attempts += 1
_LOGGER.debug(
"Error retrieving sources and will retry: %s", error
)
await asyncio.sleep(self.retry_delay)
else:
_LOGGER.error("Unable to update sources: %s", error)
return
async def update_sources(event, data=None):
if event in (
heos_const.EVENT_SOURCES_CHANGED,
heos_const.EVENT_USER_CHANGED,
heos_const.EVENT_CONNECTED,
):
sources = await get_sources()
# If throttled, it will return None
if sources:
self.favorites, self.inputs = sources
self.source_list = self._build_source_list()
_LOGGER.debug("Sources updated due to changed event")
# Let players know to update
hass.helpers.dispatcher.async_dispatcher_send(SIGNAL_HEOS_UPDATED)
controller.dispatcher.connect(
heos_const.SIGNAL_CONTROLLER_EVENT, update_sources
)
controller.dispatcher.connect(heos_const.SIGNAL_HEOS_EVENT, update_sources)
|
class Geolocation(object):
def __init__(self, latitude, longitude, radius):
self.latitude = latitude
self.longitude = longitude
self.radius = radius
def __str__(self):
return str(self.serialize())
def __eq__(self, other):
"""
This is used to compare 2 objects
:param other:
:return:
"""
return self.__dict__ == other.__dict__
|
from django.utils.translation import gettext_lazy as _
from weblate.addons.base import UpdateBaseAddon
from weblate.addons.events import EVENT_POST_COMMIT, EVENT_POST_UPDATE, EVENT_PRE_COMMIT
from weblate.trans.exceptions import FileParseError
class BaseCleanupAddon(UpdateBaseAddon):
@classmethod
def can_install(cls, component, user):
if not component.has_template():
return False
return super().can_install(component, user)
class CleanupAddon(BaseCleanupAddon):
name = "weblate.cleanup.generic"
verbose = _("Cleanup translation files")
description = _(
"Update all translation files to match the monolingual base file. "
"For most file formats, this means removing stale translation keys "
"no longer present in the base file."
)
icon = "eraser.svg"
events = (EVENT_PRE_COMMIT, EVENT_POST_UPDATE)
def update_translations(self, component, previous_head):
for translation in self.iterate_translations(component):
filenames = translation.store.cleanup_unused()
self.extra_files.extend(filenames)
def pre_commit(self, translation, author):
try:
filenames = translation.store.cleanup_unused()
except FileParseError:
return
self.extra_files.extend(filenames)
class RemoveBlankAddon(BaseCleanupAddon):
name = "weblate.cleanup.blank"
verbose = _("Remove blank strings")
description = _("Removes strings without a translation from translation files.")
events = (EVENT_POST_COMMIT, EVENT_POST_UPDATE)
icon = "eraser.svg"
def update_translations(self, component, previous_head):
for translation in self.iterate_translations(component):
filenames = translation.store.cleanup_blank()
self.extra_files.extend(filenames)
def post_commit(self, component):
self.post_update(component, None, skip_push=True)
|
from typing import List
import voluptuous as vol
from homeassistant.components.automation import AutomationActionType
from homeassistant.components.device_automation import TRIGGER_BASE_SCHEMA
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_DEVICE_ID,
CONF_DOMAIN,
CONF_ENTITY_ID,
CONF_PLATFORM,
CONF_TYPE,
)
from homeassistant.core import CALLBACK_TYPE, Event, HassJob, HomeAssistant, callback
from homeassistant.helpers import config_validation as cv, entity_registry
from homeassistant.helpers.typing import ConfigType
from .const import DOMAIN, EVENT_TURN_ON
TRIGGER_TYPES = {"turn_on"}
TRIGGER_SCHEMA = TRIGGER_BASE_SCHEMA.extend(
{
vol.Required(CONF_ENTITY_ID): cv.entity_id,
vol.Required(CONF_TYPE): vol.In(TRIGGER_TYPES),
}
)
async def async_get_triggers(hass: HomeAssistant, device_id: str) -> List[dict]:
"""List device triggers for Arcam FMJ Receiver control devices."""
registry = await entity_registry.async_get_registry(hass)
triggers = []
# Get all the integrations entities for this device
for entry in entity_registry.async_entries_for_device(registry, device_id):
if entry.domain == "media_player":
triggers.append(
{
CONF_PLATFORM: "device",
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "turn_on",
}
)
return triggers
async def async_attach_trigger(
hass: HomeAssistant,
config: ConfigType,
action: AutomationActionType,
automation_info: dict,
) -> CALLBACK_TYPE:
"""Attach a trigger."""
config = TRIGGER_SCHEMA(config)
job = HassJob(action)
if config[CONF_TYPE] == "turn_on":
entity_id = config[CONF_ENTITY_ID]
@callback
def _handle_event(event: Event):
if event.data[ATTR_ENTITY_ID] == entity_id:
hass.async_run_hass_job(
job,
{"trigger": {**config, "description": f"{DOMAIN} - {entity_id}"}},
event.context,
)
return hass.bus.async_listen(EVENT_TURN_ON, _handle_event)
return lambda: None
|
import discord
from redbot.core.bot import Red
from redbot.core.config import Config
async def is_allowed_by_hierarchy(
bot: Red, config: Config, guild: discord.Guild, mod: discord.Member, user: discord.Member
):
if not await config.guild(guild).respect_hierarchy():
return True
is_special = mod == guild.owner or await bot.is_owner(mod)
return mod.top_role.position > user.top_role.position or is_special
|
import json
from homematicip.aio.class_maps import (
TYPE_CLASS_MAP,
TYPE_GROUP_MAP,
TYPE_SECURITY_EVENT_MAP,
)
from homematicip.aio.device import AsyncDevice
from homematicip.aio.group import AsyncGroup
from homematicip.aio.home import AsyncHome
from homematicip.home import Home
from homeassistant import config_entries
from homeassistant.components.homematicip_cloud import DOMAIN as HMIPC_DOMAIN
from homeassistant.components.homematicip_cloud.generic_entity import (
ATTR_IS_GROUP,
ATTR_MODEL_TYPE,
)
from homeassistant.components.homematicip_cloud.hap import HomematicipHAP
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.setup import async_setup_component
from tests.async_mock import Mock, patch
from tests.common import load_fixture
HAPID = "3014F7110000000000000001"
HAPPIN = "5678"
AUTH_TOKEN = "1234"
HOME_JSON = "homematicip_cloud.json"
FIXTURE_DATA = load_fixture(HOME_JSON)
def get_and_check_entity_basics(hass, mock_hap, entity_id, entity_name, device_model):
"""Get and test basic device."""
ha_state = hass.states.get(entity_id)
assert ha_state is not None
if device_model:
assert ha_state.attributes[ATTR_MODEL_TYPE] == device_model
assert ha_state.name == entity_name
hmip_device = mock_hap.hmip_device_by_entity_id.get(entity_id)
if hmip_device:
if isinstance(hmip_device, AsyncDevice):
assert ha_state.attributes[ATTR_IS_GROUP] is False
elif isinstance(hmip_device, AsyncGroup):
assert ha_state.attributes[ATTR_IS_GROUP]
return ha_state, hmip_device
async def async_manipulate_test_data(
hass, hmip_device, attribute, new_value, channel=1, fire_device=None
):
"""Set new value on hmip device."""
if channel == 1:
setattr(hmip_device, attribute, new_value)
if hasattr(hmip_device, "functionalChannels"):
functional_channel = hmip_device.functionalChannels[channel]
setattr(functional_channel, attribute, new_value)
fire_target = hmip_device if fire_device is None else fire_device
if isinstance(fire_target, AsyncHome):
fire_target.fire_update_event(
fire_target._rawJSONData # pylint: disable=protected-access
)
else:
fire_target.fire_update_event()
await hass.async_block_till_done()
class HomeFactory:
"""Factory to create a HomematicIP Cloud Home."""
def __init__(
self,
hass: HomeAssistantType,
mock_connection,
hmip_config_entry: config_entries.ConfigEntry,
):
"""Initialize the Factory."""
self.hass = hass
self.mock_connection = mock_connection
self.hmip_config_entry = hmip_config_entry
async def async_get_mock_hap(
self, test_devices=[], test_groups=[]
) -> HomematicipHAP:
"""Create a mocked homematic access point."""
home_name = self.hmip_config_entry.data["name"]
mock_home = (
HomeTemplate(
connection=self.mock_connection,
home_name=home_name,
test_devices=test_devices,
test_groups=test_groups,
)
.init_home()
.get_async_home_mock()
)
self.hmip_config_entry.add_to_hass(self.hass)
with patch(
"homeassistant.components.homematicip_cloud.hap.HomematicipHAP.get_hap",
return_value=mock_home,
):
assert await async_setup_component(self.hass, HMIPC_DOMAIN, {})
await self.hass.async_block_till_done()
hap = self.hass.data[HMIPC_DOMAIN][HAPID]
mock_home.on_update(hap.async_update)
mock_home.on_create(hap.async_create_entity)
return hap
class HomeTemplate(Home):
"""
Home template as builder for home mock.
It is based on the upstream libs home class to generate hmip devices
and groups based on the given homematicip_cloud.json.
All further testing activities should be done by using the AsyncHome mock,
that is generated by get_async_home_mock(self).
The class also generated mocks of devices and groups for further testing.
"""
_typeClassMap = TYPE_CLASS_MAP
_typeGroupMap = TYPE_GROUP_MAP
_typeSecurityEventMap = TYPE_SECURITY_EVENT_MAP
def __init__(self, connection=None, home_name="", test_devices=[], test_groups=[]):
"""Init template with connection."""
super().__init__(connection=connection)
self.label = "Access Point"
self.name = home_name
self.model_type = "HmIP-HAP"
self.init_json_state = None
self.test_devices = test_devices
self.test_groups = test_groups
def _cleanup_json(self, json):
if self.test_devices is not None:
new_devices = {}
for json_device in json["devices"].items():
if json_device[1]["label"] in self.test_devices:
new_devices.update([json_device])
json["devices"] = new_devices
if self.test_groups is not None:
new_groups = {}
for json_group in json["groups"].items():
if json_group[1]["label"] in self.test_groups:
new_groups.update([json_group])
json["groups"] = new_groups
return json
def init_home(self):
"""Init template with json."""
self.init_json_state = self._cleanup_json(json.loads(FIXTURE_DATA))
self.update_home(json_state=self.init_json_state, clearConfig=True)
return self
def update_home(self, json_state, clearConfig: bool = False):
"""Update home and ensure that mocks are created."""
result = super().update_home(json_state, clearConfig)
self._generate_mocks()
return result
def _generate_mocks(self):
"""Generate mocks for groups and devices."""
mock_devices = []
for device in self.devices:
mock_devices.append(_get_mock(device))
self.devices = mock_devices
mock_groups = []
for group in self.groups:
mock_groups.append(_get_mock(group))
self.groups = mock_groups
def download_configuration(self):
"""Return the initial json config."""
return self.init_json_state
def get_async_home_mock(self):
"""
Create Mock for Async_Home. based on template to be used for testing.
It adds collections of mocked devices and groups to the home objects,
and sets required attributes.
"""
mock_home = Mock(
spec=AsyncHome, wraps=self, label="Access Point", modelType="HmIP-HAP"
)
mock_home.__dict__.update(self.__dict__)
return mock_home
def _get_mock(instance):
"""Create a mock and copy instance attributes over mock."""
if isinstance(instance, Mock):
instance.__dict__.update(
instance._mock_wraps.__dict__ # pylint: disable=protected-access
)
return instance
mock = Mock(spec=instance, wraps=instance)
mock.__dict__.update(instance.__dict__)
return mock
|
from os import path
import paramiko
from paramiko.ssh_exception import AuthenticationException, NoValidConnectionsError
from flask import current_app
from lemur.plugins import lemur_sftp
from lemur.common.defaults import common_name
from lemur.common.utils import parse_certificate
from lemur.plugins.bases import DestinationPlugin
class SFTPDestinationPlugin(DestinationPlugin):
title = "SFTP"
slug = "sftp-destination"
description = "Allow the uploading of certificates to SFTP"
version = lemur_sftp.VERSION
author = "Dmitry Zykov"
author_url = "https://github.com/DmitryZykov"
options = [
{
"name": "host",
"type": "str",
"required": True,
"helpMessage": "The SFTP host.",
},
{
"name": "port",
"type": "int",
"required": True,
"helpMessage": "The SFTP port, default is 22.",
"validation": r"^(6553[0-5]|655[0-2][0-9]\d|65[0-4](\d){2}|6[0-4](\d){3}|[1-5](\d){4}|[1-9](\d){0,3})",
"default": "22",
},
{
"name": "user",
"type": "str",
"required": True,
"helpMessage": "The SFTP user. Default is root.",
"default": "root",
},
{
"name": "password",
"type": "str",
"required": False,
"helpMessage": "The SFTP password (optional when the private key is used).",
"default": None,
},
{
"name": "privateKeyPath",
"type": "str",
"required": False,
"helpMessage": "The path to the RSA private key on the Lemur server (optional).",
"default": None,
},
{
"name": "privateKeyPass",
"type": "str",
"required": False,
"helpMessage": "The password for the encrypted RSA private key (optional).",
"default": None,
},
{
"name": "destinationPath",
"type": "str",
"required": True,
"helpMessage": "The SFTP path where certificates will be uploaded.",
"default": "/etc/nginx/certs",
},
{
"name": "exportFormat",
"required": True,
"value": "NGINX",
"helpMessage": "The export format for certificates.",
"type": "select",
"available": ["NGINX", "Apache"],
},
]
def open_sftp_connection(self, options):
host = self.get_option("host", options)
port = self.get_option("port", options)
user = self.get_option("user", options)
password = self.get_option("password", options)
ssh_priv_key = self.get_option("privateKeyPath", options)
ssh_priv_key_pass = self.get_option("privateKeyPass", options)
# delete files
try:
current_app.logger.debug(
"Connecting to {0}@{1}:{2}".format(user, host, port)
)
ssh = paramiko.SSHClient()
# allow connection to the new unknown host
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
# open the ssh connection
if password:
current_app.logger.debug("Using password")
ssh.connect(host, username=user, port=port, password=password)
elif ssh_priv_key:
current_app.logger.debug("Using RSA private key")
pkey = paramiko.RSAKey.from_private_key_file(
ssh_priv_key, ssh_priv_key_pass
)
ssh.connect(host, username=user, port=port, pkey=pkey)
else:
current_app.logger.error(
"No password or private key provided. Can't proceed"
)
raise AuthenticationException
# open the sftp session inside the ssh connection
return ssh.open_sftp(), ssh
except AuthenticationException as e:
current_app.logger.error("ERROR in {0}: {1}".format(e.__class__, e))
raise AuthenticationException("Couldn't connect to {0}, due to an Authentication exception.")
except NoValidConnectionsError as e:
current_app.logger.error("ERROR in {0}: {1}".format(e.__class__, e))
raise NoValidConnectionsError("Couldn't connect to {0}, possible timeout or invalid hostname")
# this is called when using this as a default destination plugin
def upload(self, name, body, private_key, cert_chain, options, **kwargs):
current_app.logger.debug("SFTP destination plugin is started")
cn = common_name(parse_certificate(body))
dst_path = self.get_option("destinationPath", options)
dst_path_cn = dst_path + "/" + cn
export_format = self.get_option("exportFormat", options)
# prepare files for upload
files = {cn + ".key": private_key, cn + ".pem": body}
if cert_chain:
if export_format == "NGINX":
# assemble body + chain in the single file
files[cn + ".pem"] += "\n" + cert_chain
elif export_format == "Apache":
# store chain in the separate file
files[cn + ".ca.bundle.pem"] = cert_chain
self.upload_file(dst_path_cn, files, options)
# this is called from the acme http challenge
def upload_acme_token(self, token_path, token, options, **kwargs):
current_app.logger.debug("SFTP destination plugin is started for HTTP-01 challenge")
dst_path = self.get_option("destinationPath", options)
_, filename = path.split(token_path)
# prepare files for upload
files = {filename: token}
self.upload_file(dst_path, files, options)
# this is called from the acme http challenge
def delete_acme_token(self, token_path, options, **kwargs):
dst_path = self.get_option("destinationPath", options)
_, filename = path.split(token_path)
# prepare files for upload
files = {filename: None}
self.delete_file(dst_path, files, options)
# here the file is deleted
def delete_file(self, dst_path, files, options):
try:
# open the ssh and sftp sessions
sftp, ssh = self.open_sftp_connection(options)
# delete files
for filename, _ in files.items():
current_app.logger.debug(
"Deleting {0} from {1}".format(filename, dst_path)
)
try:
sftp.remove(path.join(dst_path, filename))
except PermissionError as permerror:
if permerror.errno == 13:
current_app.logger.debug(
"Deleting {0} from {1} returned Permission Denied Error, making file writable and retrying".format(
filename, dst_path)
)
sftp.chmod(path.join(dst_path, filename), 0o600)
sftp.remove(path.join(dst_path, filename))
ssh.close()
except (AuthenticationException, NoValidConnectionsError) as e:
raise e
except Exception as e:
current_app.logger.error("ERROR in {0}: {1}".format(e.__class__, e))
try:
ssh.close()
except BaseException:
pass
# here the file is uploaded for real, this helps to keep this class DRY
def upload_file(self, dst_path, files, options):
try:
# open the ssh and sftp sessions
sftp, ssh = self.open_sftp_connection(options)
# split the path into it's segments, so we can create it recursively
allparts = []
path_copy = dst_path
while True:
parts = path.split(path_copy)
if parts[0] == path_copy: # sentinel for absolute paths
allparts.insert(0, parts[0])
break
elif parts[1] == path_copy: # sentinel for relative paths
allparts.insert(0, parts[1])
break
else:
path_copy = parts[0]
allparts.insert(0, parts[1])
# make sure that the destination path exists, recursively
remote_path = allparts[0]
for part in allparts:
try:
if part != "/" and part != "":
remote_path = path.join(remote_path, part)
sftp.stat(remote_path)
except IOError:
current_app.logger.debug("{0} doesn't exist, trying to create it".format(remote_path))
try:
sftp.mkdir(remote_path)
except IOError as ioerror:
current_app.logger.debug(
"Couldn't create {0}, error message: {1}".format(remote_path, ioerror))
# upload certificate files to the sftp destination
for filename, data in files.items():
current_app.logger.debug(
"Uploading {0} to {1}".format(filename, dst_path)
)
try:
with sftp.open(path.join(dst_path, filename), "w") as f:
f.write(data)
except PermissionError as permerror:
if permerror.errno == 13:
current_app.logger.debug(
"Uploading {0} to {1} returned Permission Denied Error, making file writable and retrying".format(
filename, dst_path)
)
sftp.chmod(path.join(dst_path, filename), 0o600)
with sftp.open(path.join(dst_path, filename), "w") as f:
f.write(data)
# most likely the upload user isn't the webuser, -rw-r--r--
sftp.chmod(path.join(dst_path, filename), 0o644)
ssh.close()
except (AuthenticationException, NoValidConnectionsError) as e:
raise e
except Exception as e:
current_app.logger.error("ERROR in {0}: {1}".format(e.__class__, e))
try:
ssh.close()
except BaseException:
pass
message = ''
if hasattr(e, 'errors'):
for _, error in e.errors.items():
message = error.strerror
raise Exception(
'Couldn\'t upload file to {}, error message: {}'.format(self.get_option("host", options), message))
|
import os
import unittest
import mock
from perfkitbenchmarker import test_util
from perfkitbenchmarker.linux_benchmarks import mlperf_benchmark
from perfkitbenchmarker.sample import Sample
class MlperfBenchmarkTestCase(unittest.TestCase, test_util.SamplesTestMixin):
def setUp(self):
super(MlperfBenchmarkTestCase, self).setUp()
path = os.path.join(os.path.dirname(__file__), '..', 'data',
'mlperf_output.txt')
with open(path) as fp:
self.contents = fp.read()
@mock.patch('time.time', mock.MagicMock(return_value=1550279509.59))
def testTrainResults(self):
samples = mlperf_benchmark.MakeSamplesFromOutput({'version': 'v0.6.0'},
self.contents,
use_tpu=True,
model='resnet')
golden = [
Sample('Eval Accuracy', 32.322001457214355, '%', {
'epoch': 4,
'times': 0.0,
'version': 'v0.6.0'
}),
Sample('Eval Accuracy', 40.342000126838684, '%', {
'epoch': 8,
'times': 164.16299986839294,
'version': 'v0.6.0'
}),
Sample('Eval Accuracy', 48.21600019931793, '%', {
'epoch': 12,
'times': 328.239000082016,
'version': 'v0.6.0'
}),
Sample('Eval Accuracy', 51.749998331069946, '%', {
'epoch': 16,
'times': 492.335000038147,
'version': 'v0.6.0'
}),
Sample('Eval Accuracy', 52.851998805999756, '%', {
'epoch': 20,
'times': 656.4279999732971,
'version': 'v0.6.0'
}),
Sample('Eval Accuracy', 52.99599766731262, '%', {
'epoch': 24,
'times': 820.5209999084473,
'version': 'v0.6.0'
}),
Sample('Eval Accuracy', 60.44999957084656, '%', {
'epoch': 28,
'times': 984.6259999275208,
'version': 'v0.6.0'
}),
Sample('Eval Accuracy', 62.775999307632446, '%', {
'epoch': 32,
'times': 1148.7119998931885,
'version': 'v0.6.0'
}),
Sample('Eval Accuracy', 66.22400283813477, '%', {
'epoch': 36,
'times': 1312.8050000667572,
'version': 'v0.6.0'
}),
Sample('Eval Accuracy', 67.34600067138672, '%', {
'epoch': 40,
'times': 1476.9070000648499,
'version': 'v0.6.0'
}),
Sample('Eval Accuracy', 70.77400088310242, '%', {
'epoch': 44,
'times': 1640.994999885559,
'version': 'v0.6.0'
}),
Sample('Eval Accuracy', 72.40599989891052, '%', {
'epoch': 48,
'times': 1805.085000038147,
'version': 'v0.6.0'
}),
Sample('Eval Accuracy', 73.85799884796143, '%', {
'epoch': 52,
'times': 1969.1849999427795,
'version': 'v0.6.0'
}),
Sample('Eval Accuracy', 75.26000142097473, '%', {
'epoch': 56,
'times': 2133.2750000953674,
'version': 'v0.6.0'
}),
Sample('Eval Accuracy', 76.0420024394989, '%', {
'epoch': 60,
'times': 2297.3669998645782,
'version': 'v0.6.0'
})
]
self.assertEqual(samples, golden)
if __name__ == '__main__':
unittest.main()
|
import urwid
import urwid.raw_display
import urwid.web_display
def main():
text_header = (u"Welcome to the urwid tour! "
u"UP / DOWN / PAGE UP / PAGE DOWN scroll. F8 exits.")
text_intro = [('important', u"Text"),
u" widgets are the most common in "
u"any urwid program. This Text widget was created "
u"without setting the wrap or align mode, so it "
u"defaults to left alignment with wrapping on space "
u"characters. ",
('important', u"Change the window width"),
u" to see how the widgets on this page react. "
u"This Text widget is wrapped with a ",
('important', u"Padding"),
u" widget to keep it indented on the left and right."]
text_right = (u"This Text widget is right aligned. Wrapped "
u"words stay to the right as well. ")
text_center = u"This one is center aligned."
text_clip = (u"Text widgets may be clipped instead of wrapped.\n"
u"Extra text is discarded instead of wrapped to the next line. "
u"65-> 70-> 75-> 80-> 85-> 90-> 95-> 100>\n"
u"Newlines embedded in the string are still respected.")
text_right_clip = (u"This is a right aligned and clipped Text widget.\n"
u"<100 <-95 <-90 <-85 <-80 <-75 <-70 <-65 "
u"Text will be cut off at the left of this widget.")
text_center_clip = (u"Center aligned and clipped widgets will have "
u"text cut off both sides.")
text_ellipsis = (u"Text can be clipped using the ellipsis character (…)\n"
u"Extra text is discarded and a … mark is shown."
u"50-> 55-> 60-> 65-> 70-> 75-> 80-> 85-> 90-> 95-> 100>\n"
)
text_any = (u"The 'any' wrap mode will wrap on any character. This "
u"mode will not collapse space characters at the end of the "
u"line but it still honors embedded newline characters.\n"
u"Like this one.")
text_padding = (u"Padding widgets have many options. This "
u"is a standard Text widget wrapped with a Padding widget "
u"with the alignment set to relative 20% and with its width "
u"fixed at 40.")
text_divider = [u"The ", ('important', u"Divider"),
u" widget repeats the same character across the whole line. "
u"It can also add blank lines above and below."]
text_edit = [u"The ", ('important', u"Edit"),
u" widget is a simple text editing widget. It supports cursor "
u"movement and tries to maintain the current column when focus "
u"moves to another edit widget. It wraps and aligns the same "
u"way as Text widgets." ]
text_edit_cap1 = ('editcp', u"This is a caption. Edit here: ")
text_edit_text1 = u"editable stuff"
text_edit_cap2 = ('editcp', u"This one supports newlines: ")
text_edit_text2 = (u"line one starts them all\n"
u"== line 2 == with some more text to edit.. words.. whee..\n"
u"LINE III, the line to end lines one and two, unless you "
u"change something.")
text_edit_cap3 = ('editcp', u"This one is clipped, try "
u"editing past the edge: ")
text_edit_text3 = u"add some text here -> -> -> ...."
text_edit_alignments = u"Different Alignments:"
text_edit_left = u"left aligned (default)"
text_edit_center = u"center aligned"
text_edit_right = u"right aligned"
text_intedit = ('editcp', [('important', u"IntEdit"),
u" allows only numbers: "])
text_edit_padding = ('editcp', u"Edit widget within a Padding widget ")
text_columns1 = [('important', u"Columns"),
u" are used to share horizontal screen space. "
u"This one splits the space into two parts with "
u"three characters between each column. The "
u"contents of each column is a single widget."]
text_columns2 = [u"When you need to put more than one "
u"widget into a column you can use a ",('important',
u"Pile"), u" to combine two or more widgets."]
text_col_columns = u"Columns may be placed inside other columns."
text_col_21 = u"Col 2.1"
text_col_22 = u"Col 2.2"
text_col_23 = u"Col 2.3"
text_column_widths = (u"Columns may also have uneven relative "
u"weights or fixed widths. Use a minimum width so that "
u"columns don't become too small.")
text_weight = u"Weight %d"
text_fixed_9 = u"<Fixed 9>" # should be 9 columns wide
text_fixed_14 = u"<--Fixed 14-->" # should be 14 columns wide
text_edit_col_cap1 = ('editcp', u"Edit widget within Columns")
text_edit_col_text1 = u"here's\nsome\ninfo"
text_edit_col_cap2 = ('editcp', u"and within Pile ")
text_edit_col_text2 = u"more"
text_edit_col_cap3 = ('editcp', u"another ")
text_edit_col_text3 = u"still more"
text_gridflow = [u"A ",('important', u"GridFlow"), u" widget "
u"may be used to display a list of flow widgets with equal "
u"widths. Widgets that don't fit on the first line will "
u"flow to the next. This is useful for small widgets that "
u"you want to keep together such as ", ('important', u"Button"),
u", ",('important', u"CheckBox"), u" and ",
('important', u"RadioButton"), u" widgets." ]
text_button_list = [u"Yes", u"No", u"Perhaps", u"Certainly", u"Partially",
u"Tuesdays Only", u"Help"]
text_cb_list = [u"Wax", u"Wash", u"Buff", u"Clear Coat", u"Dry",
u"Racing Stripe"]
text_rb_list = [u"Morning", u"Afternoon", u"Evening", u"Weekend"]
text_listbox = [u"All these widgets have been displayed "
u"with the help of a ", ('important', u"ListBox"), u" widget. "
u"ListBox widgets handle scrolling and changing focus. A ",
('important', u"Frame"), u" widget is used to keep the "
u"instructions at the top of the screen."]
def button_press(button):
frame.footer = urwid.AttrWrap(urwid.Text(
[u"Pressed: ", button.get_label()]), 'header')
radio_button_group = []
blank = urwid.Divider()
listbox_content = [
blank,
urwid.Padding(urwid.Text(text_intro), left=2, right=2, min_width=20),
blank,
urwid.Text(text_right, align='right'),
blank,
urwid.Text(text_center, align='center'),
blank,
urwid.Text(text_clip, wrap='clip'),
blank,
urwid.Text(text_right_clip, align='right', wrap='clip'),
blank,
urwid.Text(text_center_clip, align='center', wrap='clip'),
blank,
urwid.Text(text_ellipsis, wrap='ellipsis'),
blank,
urwid.Text(text_any, wrap='any'),
blank,
urwid.Padding(urwid.Text(text_padding), ('relative', 20), 40),
blank,
urwid.AttrWrap(urwid.Divider("=", 1), 'bright'),
urwid.Padding(urwid.Text(text_divider), left=2, right=2, min_width=20),
urwid.AttrWrap(urwid.Divider("-", 0, 1), 'bright'),
blank,
urwid.Padding(urwid.Text(text_edit), left=2, right=2, min_width=20),
blank,
urwid.AttrWrap(urwid.Edit(text_edit_cap1, text_edit_text1),
'editbx', 'editfc'),
blank,
urwid.AttrWrap(urwid.Edit(text_edit_cap2, text_edit_text2,
multiline=True ), 'editbx', 'editfc'),
blank,
urwid.AttrWrap(urwid.Edit(text_edit_cap3, text_edit_text3,
wrap='clip' ), 'editbx', 'editfc'),
blank,
urwid.Text(text_edit_alignments),
urwid.AttrWrap(urwid.Edit("", text_edit_left, align='left'),
'editbx', 'editfc' ),
urwid.AttrWrap(urwid.Edit("", text_edit_center,
align='center'), 'editbx', 'editfc' ),
urwid.AttrWrap(urwid.Edit("", text_edit_right, align='right'),
'editbx', 'editfc' ),
blank,
urwid.AttrWrap(urwid.IntEdit(text_intedit, 123),
'editbx', 'editfc' ),
blank,
urwid.Padding(urwid.AttrWrap(urwid.Edit(text_edit_padding, ""),
'editbx','editfc' ), left=10, width=50),
blank,
blank,
urwid.AttrWrap(urwid.Columns([
urwid.Divider("."),
urwid.Divider(","),
urwid.Divider("."),
]), 'bright'),
blank,
urwid.Columns([
urwid.Padding(urwid.Text(text_columns1), left=2, right=0,
min_width=20),
urwid.Pile([
urwid.Divider("~"),
urwid.Text(text_columns2),
urwid.Divider("_")])
], 3),
blank,
blank,
urwid.Columns([
urwid.Text(text_col_columns),
urwid.Columns([
urwid.Text(text_col_21),
urwid.Text(text_col_22),
urwid.Text(text_col_23),
], 1),
], 2),
blank,
urwid.Padding(urwid.Text(text_column_widths), left=2, right=2,
min_width=20),
blank,
urwid.Columns( [
urwid.AttrWrap(urwid.Text(text_weight % 1),'reverse'),
('weight', 2, urwid.Text(text_weight % 2)),
('weight', 3, urwid.AttrWrap(urwid.Text(
text_weight % 3), 'reverse')),
('weight', 4, urwid.Text(text_weight % 4)),
('weight', 5, urwid.AttrWrap(urwid.Text(
text_weight % 5), 'reverse')),
('weight', 6, urwid.Text(text_weight % 6)),
], 0, min_width=8),
blank,
urwid.Columns([
('weight', 2, urwid.AttrWrap(urwid.Text(
text_weight % 2), 'reverse')),
('fixed', 9, urwid.Text(text_fixed_9)),
('weight', 3, urwid.AttrWrap(urwid.Text(
text_weight % 2), 'reverse')),
('fixed', 14, urwid.Text(text_fixed_14)),
], 0, min_width=8),
blank,
urwid.Columns([
urwid.AttrWrap(urwid.Edit(text_edit_col_cap1,
text_edit_col_text1, multiline=True),
'editbx','editfc'),
urwid.Pile([
urwid.AttrWrap(urwid.Edit(
text_edit_col_cap2,
text_edit_col_text2),
'editbx','editfc'),
blank,
urwid.AttrWrap(urwid.Edit(
text_edit_col_cap3,
text_edit_col_text3),
'editbx','editfc'),
]),
], 1),
blank,
urwid.AttrWrap(urwid.Columns([
urwid.Divider("'"),
urwid.Divider('"'),
urwid.Divider("~"),
urwid.Divider('"'),
urwid.Divider("'"),
]), 'bright'),
blank,
blank,
urwid.Padding(urwid.Text(text_gridflow), left=2, right=2,
min_width=20),
blank,
urwid.Padding(urwid.GridFlow(
[urwid.AttrWrap(urwid.Button(txt, button_press),
'buttn','buttnf') for txt in text_button_list],
13, 3, 1, 'left'),
left=4, right=3, min_width=13),
blank,
urwid.Padding(urwid.GridFlow(
[urwid.AttrWrap(urwid.CheckBox(txt),'buttn','buttnf')
for txt in text_cb_list],
10, 3, 1, 'left') ,
left=4, right=3, min_width=10),
blank,
urwid.Padding(urwid.GridFlow(
[urwid.AttrWrap(urwid.RadioButton(radio_button_group,
txt), 'buttn','buttnf')
for txt in text_rb_list],
13, 3, 1, 'left') ,
left=4, right=3, min_width=13),
blank,
blank,
urwid.Padding(urwid.Text(text_listbox), left=2, right=2,
min_width=20),
blank,
blank,
]
header = urwid.AttrWrap(urwid.Text(text_header), 'header')
listbox = urwid.ListBox(urwid.SimpleListWalker(listbox_content))
frame = urwid.Frame(urwid.AttrWrap(listbox, 'body'), header=header)
palette = [
('body','black','light gray', 'standout'),
('reverse','light gray','black'),
('header','white','dark red', 'bold'),
('important','dark blue','light gray',('standout','underline')),
('editfc','white', 'dark blue', 'bold'),
('editbx','light gray', 'dark blue'),
('editcp','black','light gray', 'standout'),
('bright','dark gray','light gray', ('bold','standout')),
('buttn','black','dark cyan'),
('buttnf','white','dark blue','bold'),
]
# use appropriate Screen class
if urwid.web_display.is_web_request():
screen = urwid.web_display.Screen()
else:
screen = urwid.raw_display.Screen()
def unhandled(key):
if key == 'f8':
raise urwid.ExitMainLoop()
urwid.MainLoop(frame, palette, screen,
unhandled_input=unhandled).run()
def setup():
urwid.web_display.set_preferences("Urwid Tour")
# try to handle short web requests quickly
if urwid.web_display.handle_short_request():
return
main()
if '__main__'==__name__ or urwid.web_display.is_web_request():
setup()
|
import pytest
import pytz
from datetime import datetime
from decimal import Decimal
from uuid import uuid4
from unittest.mock import MagicMock, Mock
from kombu.utils.encoding import str_to_bytes
from kombu.utils.json import _DecodeError, dumps, loads
class Custom:
def __init__(self, data):
self.data = data
def __json__(self):
return self.data
class test_JSONEncoder:
def test_datetime(self):
now = datetime.utcnow()
now_utc = now.replace(tzinfo=pytz.utc)
stripped = datetime(*now.timetuple()[:3])
serialized = loads(dumps({
'datetime': now,
'tz': now_utc,
'date': now.date(),
'time': now.time()},
))
assert serialized == {
'datetime': now.isoformat(),
'tz': '{}Z'.format(now_utc.isoformat().split('+', 1)[0]),
'time': now.time().isoformat(),
'date': stripped.isoformat(),
}
def test_Decimal(self):
d = Decimal('3314132.13363235235324234123213213214134')
assert loads(dumps({'d': d})), {'d': str(d)}
def test_UUID(self):
id = uuid4()
assert loads(dumps({'u': id})), {'u': str(id)}
def test_default(self):
with pytest.raises(TypeError):
dumps({'o': object()})
class test_dumps_loads:
def test_dumps_custom_object(self):
x = {'foo': Custom({'a': 'b'})}
assert loads(dumps(x)) == {'foo': x['foo'].__json__()}
def test_dumps_custom_object_no_json(self):
x = {'foo': object()}
with pytest.raises(TypeError):
dumps(x)
def test_loads_memoryview(self):
assert loads(
memoryview(bytearray(dumps({'x': 'z'}), encoding='utf-8'))
) == {'x': 'z'}
def test_loads_bytearray(self):
assert loads(
bytearray(dumps({'x': 'z'}), encoding='utf-8')
) == {'x': 'z'}
def test_loads_bytes(self):
assert loads(
str_to_bytes(dumps({'x': 'z'})),
decode_bytes=True) == {'x': 'z'}
def test_loads_DecodeError(self):
_loads = Mock(name='_loads')
_loads.side_effect = _DecodeError(
MagicMock(), MagicMock(), MagicMock())
assert loads(dumps({'x': 'z'}), _loads=_loads) == {'x': 'z'}
|
from typing import Any, Dict, Optional
from xknx.devices import BinarySensor as XknxBinarySensor
from homeassistant.components.binary_sensor import DEVICE_CLASSES, BinarySensorEntity
from .const import ATTR_COUNTER, DOMAIN
from .knx_entity import KnxEntity
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up binary sensor(s) for KNX platform."""
entities = []
for device in hass.data[DOMAIN].xknx.devices:
if isinstance(device, XknxBinarySensor):
entities.append(KNXBinarySensor(device))
async_add_entities(entities)
class KNXBinarySensor(KnxEntity, BinarySensorEntity):
"""Representation of a KNX binary sensor."""
def __init__(self, device: XknxBinarySensor):
"""Initialize of KNX binary sensor."""
super().__init__(device)
@property
def device_class(self):
"""Return the class of this sensor."""
if self._device.device_class in DEVICE_CLASSES:
return self._device.device_class
return None
@property
def is_on(self):
"""Return true if the binary sensor is on."""
return self._device.is_on()
@property
def device_state_attributes(self) -> Optional[Dict[str, Any]]:
"""Return device specific state attributes."""
return {ATTR_COUNTER: self._device.counter}
|
from homeassistant.helpers.entity import Entity
from homeassistant.util.dt import as_local, utcnow
from .const import (
ATTR_ELEMENT,
ATTR_MODALITY,
DOMAIN,
ELEMENT_AIR,
ELEMENT_EARTH,
ELEMENT_FIRE,
ELEMENT_WATER,
MODALITY_CARDINAL,
MODALITY_FIXED,
MODALITY_MUTABLE,
SIGN_AQUARIUS,
SIGN_ARIES,
SIGN_CANCER,
SIGN_CAPRICORN,
SIGN_GEMINI,
SIGN_LEO,
SIGN_LIBRA,
SIGN_PISCES,
SIGN_SAGITTARIUS,
SIGN_SCORPIO,
SIGN_TAURUS,
SIGN_VIRGO,
)
ZODIAC_BY_DATE = (
(
(21, 3),
(20, 4),
SIGN_ARIES,
{
ATTR_ELEMENT: ELEMENT_FIRE,
ATTR_MODALITY: MODALITY_CARDINAL,
},
),
(
(21, 4),
(20, 5),
SIGN_TAURUS,
{
ATTR_ELEMENT: ELEMENT_EARTH,
ATTR_MODALITY: MODALITY_FIXED,
},
),
(
(21, 5),
(21, 6),
SIGN_GEMINI,
{
ATTR_ELEMENT: ELEMENT_AIR,
ATTR_MODALITY: MODALITY_MUTABLE,
},
),
(
(22, 6),
(22, 7),
SIGN_CANCER,
{
ATTR_ELEMENT: ELEMENT_WATER,
ATTR_MODALITY: MODALITY_CARDINAL,
},
),
(
(23, 7),
(22, 8),
SIGN_LEO,
{
ATTR_ELEMENT: ELEMENT_FIRE,
ATTR_MODALITY: MODALITY_FIXED,
},
),
(
(23, 8),
(21, 9),
SIGN_VIRGO,
{
ATTR_ELEMENT: ELEMENT_EARTH,
ATTR_MODALITY: MODALITY_MUTABLE,
},
),
(
(22, 9),
(22, 10),
SIGN_LIBRA,
{
ATTR_ELEMENT: ELEMENT_AIR,
ATTR_MODALITY: MODALITY_CARDINAL,
},
),
(
(23, 10),
(22, 11),
SIGN_SCORPIO,
{
ATTR_ELEMENT: ELEMENT_WATER,
ATTR_MODALITY: MODALITY_FIXED,
},
),
(
(23, 11),
(21, 12),
SIGN_SAGITTARIUS,
{
ATTR_ELEMENT: ELEMENT_FIRE,
ATTR_MODALITY: MODALITY_MUTABLE,
},
),
(
(22, 12),
(20, 1),
SIGN_CAPRICORN,
{
ATTR_ELEMENT: ELEMENT_EARTH,
ATTR_MODALITY: MODALITY_CARDINAL,
},
),
(
(21, 1),
(19, 2),
SIGN_AQUARIUS,
{
ATTR_ELEMENT: ELEMENT_AIR,
ATTR_MODALITY: MODALITY_FIXED,
},
),
(
(20, 2),
(20, 3),
SIGN_PISCES,
{
ATTR_ELEMENT: ELEMENT_WATER,
ATTR_MODALITY: MODALITY_MUTABLE,
},
),
)
ZODIAC_ICONS = {
SIGN_ARIES: "mdi:zodiac-aries",
SIGN_TAURUS: "mdi:zodiac-taurus",
SIGN_GEMINI: "mdi:zodiac-gemini",
SIGN_CANCER: "mdi:zodiac-cancer",
SIGN_LEO: "mdi:zodiac-leo",
SIGN_VIRGO: "mdi:zodiac-virgo",
SIGN_LIBRA: "mdi:zodiac-libra",
SIGN_SCORPIO: "mdi:zodiac-scorpio",
SIGN_SAGITTARIUS: "mdi:zodiac-sagittarius",
SIGN_CAPRICORN: "mdi:zodiac-capricorn",
SIGN_AQUARIUS: "mdi:zodiac-aquarius",
SIGN_PISCES: "mdi:zodiac-pisces",
}
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Zodiac sensor platform."""
if discovery_info is None:
return
async_add_entities([ZodiacSensor()], True)
class ZodiacSensor(Entity):
"""Representation of a Zodiac sensor."""
def __init__(self):
"""Initialize the zodiac sensor."""
self._attrs = None
self._state = None
@property
def unique_id(self):
"""Return a unique ID."""
return DOMAIN
@property
def name(self):
"""Return the name of the entity."""
return "Zodiac"
@property
def device_class(self):
"""Return the device class of the entity."""
return "zodiac__sign"
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return ZODIAC_ICONS.get(self._state)
@property
def device_state_attributes(self):
"""Return the state attributes."""
return self._attrs
async def async_update(self):
"""Get the time and updates the state."""
today = as_local(utcnow()).date()
month = int(today.month)
day = int(today.day)
for sign in ZODIAC_BY_DATE:
if (month == sign[0][1] and day >= sign[0][0]) or (
month == sign[1][1] and day <= sign[1][0]
):
self._state = sign[2]
self._attrs = sign[3]
break
|
from pytest import fixture
from surepy import SurePetcare
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from tests.async_mock import AsyncMock, patch
@fixture()
def surepetcare(hass):
"""Mock the SurePetcare for easier testing."""
with patch("homeassistant.components.surepetcare.SurePetcare") as mock_surepetcare:
instance = mock_surepetcare.return_value = SurePetcare(
"test-username",
"test-password",
hass.loop,
async_get_clientsession(hass),
api_timeout=1,
)
instance.get_data = AsyncMock(return_value=None)
yield mock_surepetcare
|
from urllib.parse import urlparse
from django.conf import settings
from django.core.exceptions import ValidationError
from django.core.validators import validate_ipv46_address
from django.http import Http404, HttpResponsePermanentRedirect
from django.urls import is_valid_path, reverse
from django.utils.http import escape_leading_slashes
from weblate.lang.models import Language
from weblate.trans.models import Change, Component, Project
from weblate.utils.errors import report_error
from weblate.utils.site import get_site_url
CSP_TEMPLATE = (
"default-src 'self'; style-src {0}; img-src {1}; script-src {2}; "
"connect-src {3}; object-src 'none'; font-src {4};"
"frame-src 'none'; frame-ancestors 'none';"
)
# URLs requiring inline javascipt
INLINE_PATHS = {"social:begin", "djangosaml2idp:saml_login_process"}
class ProxyMiddleware:
"""Middleware that updates REMOTE_ADDR from proxy.
Note that this can have security implications and settings have to match your actual
proxy setup.
"""
def __init__(self, get_response=None):
self.get_response = get_response
def __call__(self, request):
# Fake HttpRequest attribute to inject configured
# site name into build_absolute_uri
request._current_scheme_host = get_site_url()
# Actual proxy handling
proxy = None
if settings.IP_BEHIND_REVERSE_PROXY:
proxy = request.META.get(settings.IP_PROXY_HEADER)
if proxy:
# X_FORWARDED_FOR returns client1, proxy1, proxy2,...
address = proxy.split(", ")[settings.IP_PROXY_OFFSET].strip()
try:
validate_ipv46_address(address)
request.META["REMOTE_ADDR"] = address
except ValidationError:
report_error(cause="Invalid IP address")
return self.get_response(request)
class RedirectMiddleware:
"""
Middleware that handles URL redirecting.
This used for fuzzy lookups of projects, for example case insensitive
or after renaming.
"""
def __init__(self, get_response=None):
self.get_response = get_response
def __call__(self, request):
response = self.get_response(request)
# This is based on APPEND_SLASH handling in Django
if response.status_code == 404:
if self.should_redirect_with_slash(request):
new_path = request.get_full_path(force_append_slash=True)
# Prevent construction of scheme relative urls.
new_path = escape_leading_slashes(new_path)
return HttpResponsePermanentRedirect(new_path)
return response
def should_redirect_with_slash(self, request):
path = request.path_info
# Avoid redirecting non GET requests, these would fail anyway
if path.endswith("/") or request.method != "GET":
return False
urlconf = getattr(request, "urlconf", None)
slash_path = f"{path}/"
return not is_valid_path(path, urlconf) and is_valid_path(slash_path, urlconf)
def fixup_language(self, lang):
return Language.objects.fuzzy_get(code=lang, strict=True)
def fixup_project(self, slug, request):
try:
project = Project.objects.get(slug__iexact=slug)
except Project.DoesNotExist:
try:
project = (
Change.objects.filter(
action=Change.ACTION_RENAME_PROJECT,
old=slug,
)
.order()[0]
.project
)
except IndexError:
return None
request.user.check_access(project)
return project
def fixup_component(self, slug, request, project):
try:
component = Component.objects.get(project=project, slug__iexact=slug)
except Component.DoesNotExist:
try:
component = (
Change.objects.filter(
action=Change.ACTION_RENAME_COMPONENT, old=slug
)
.order()[0]
.component
)
except IndexError:
return None
request.user.check_access_component(component)
return component
def process_exception(self, request, exception):
if not isinstance(exception, Http404):
return None
try:
resolver_match = request.resolver_match
except AttributeError:
return None
resolver_match = request.resolver_match
kwargs = dict(resolver_match.kwargs)
if "lang" in kwargs:
language = self.fixup_language(kwargs["lang"])
if language is None:
return None
kwargs["lang"] = language.code
if "project" in kwargs:
project = self.fixup_project(kwargs["project"], request)
if project is None:
return None
kwargs["project"] = project.slug
if "component" in kwargs:
component = self.fixup_component(kwargs["component"], request, project)
if component is None:
return None
kwargs["component"] = component.slug
if kwargs != resolver_match.kwargs:
query = request.META["QUERY_STRING"]
if query:
query = f"?{query}"
return HttpResponsePermanentRedirect(
reverse(resolver_match.url_name, kwargs=kwargs) + query
)
return None
class SecurityMiddleware:
"""Middleware that sets Content-Security-Policy."""
def __init__(self, get_response=None):
self.get_response = get_response
def __call__(self, request):
response = self.get_response(request)
# No CSP for debug mode (to allow djdt or error pages)
if settings.DEBUG:
return response
style = {"'self'", "'unsafe-inline'"} | set(settings.CSP_STYLE_SRC)
script = {"'self'"} | set(settings.CSP_SCRIPT_SRC)
image = {"'self'"} | set(settings.CSP_IMG_SRC)
connect = {"'self'"} | set(settings.CSP_CONNECT_SRC)
font = {"'self'"} | set(settings.CSP_FONT_SRC)
if request.resolver_match and request.resolver_match.view_name in INLINE_PATHS:
script.add("'unsafe-inline'")
# Support form
if request.resolver_match and request.resolver_match.view_name == "manage":
script.add("'care.weblate.org'")
# Rollbar client errors reporting
if (
hasattr(settings, "ROLLBAR")
and "client_token" in settings.ROLLBAR
and "environment" in settings.ROLLBAR
and response.status_code == 500
):
script.add("'unsafe-inline'")
script.add("cdnjs.cloudflare.com")
connect.add("api.rollbar.com")
# Sentry user feedback
if settings.SENTRY_DSN and response.status_code == 500:
domain = urlparse(settings.SENTRY_DSN).hostname
script.add(domain)
script.add("sentry.io")
connect.add(domain)
connect.add("sentry.io")
script.add("'unsafe-inline'")
image.add("data:")
# Matomo (Piwik) analytics
if settings.MATOMO_URL:
domain = urlparse(settings.MATOMO_URL).hostname
script.add(domain)
image.add(domain)
connect.add(domain)
# Google Analytics
if settings.GOOGLE_ANALYTICS_ID:
script.add("'unsafe-inline'")
script.add("www.google-analytics.com")
image.add("www.google-analytics.com")
# External media URL
if "://" in settings.MEDIA_URL:
domain = urlparse(settings.MEDIA_URL).hostname
image.add(domain)
# External static URL
if "://" in settings.STATIC_URL:
domain = urlparse(settings.STATIC_URL).hostname
script.add(domain)
image.add(domain)
style.add(domain)
font.add(domain)
# CDN for fonts
if settings.FONTS_CDN_URL:
domain = urlparse(settings.FONTS_CDN_URL).hostname
style.add(domain)
font.add(domain)
# When using external image for Auth0 provider, add it here
if "://" in settings.SOCIAL_AUTH_AUTH0_IMAGE:
domain = urlparse(settings.SOCIAL_AUTH_AUTH0_IMAGE).hostname
image.add(domain)
response["Content-Security-Policy"] = CSP_TEMPLATE.format(
" ".join(style),
" ".join(image),
" ".join(script),
" ".join(connect),
" ".join(font),
)
if settings.SENTRY_SECURITY:
response["Content-Security-Policy"] += " report-uri {}".format(
settings.SENTRY_SECURITY
)
response["Expect-CT"] = 'max-age=86400, enforce, report-uri="{}"'.format(
settings.SENTRY_SECURITY
)
return response
|
import hashlib
import binascii
import uuid
def is_empty(s):
'''string is empty ?'''
if s is None or s == '':
return True
return False
def is_true(s):
'''string is true'''
if s is True:
return True
if s == 'true':
return True
return False
def md5_salt(s, salt="webhook"):
'''
md5 + 盐:即便两个用户使用了同一个密码,由于系统为它们生成的salt值不同,他们的散列值也是不同的。
'''
if s:
return md5(s + salt)
else:
return ''
def md5(s):
'''
md5
'''
m = hashlib.md5()
m.update(s)
return m.hexdigest()
def crc32_hash(v):
"""
Generates the crc32 hash of the v.
@return: str, the str value for the crc32 of the v (crc32b)
"""
return '%x' % (binascii.crc32(v) & 0xffffffff) # 取crc32的八位数据 %x返回16进制
def md5_token(salt=None):
s = str(uuid.uuid1())
if salt:
return md5_salt(s, salt)
return md5(s)
# 获取一个新的token,保证完全唯一
def crc32_token():
return crc32_hash(str(uuid.uuid1()).encode('utf-8'))
|
from datetime import timedelta
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_COLOR_TEMP,
ATTR_HS_COLOR,
DOMAIN as SENSOR_DOMAIN,
ENTITY_ID_FORMAT,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
SUPPORT_COLOR_TEMP,
LightEntity,
)
from homeassistant.const import CONF_PLATFORM
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.util import color as colorutil
from . import TuyaDevice
from .const import (
CONF_BRIGHTNESS_RANGE_MODE,
CONF_MAX_KELVIN,
CONF_MIN_KELVIN,
CONF_SUPPORT_COLOR,
CONF_TUYA_MAX_COLTEMP,
DEFAULT_TUYA_MAX_COLTEMP,
DOMAIN,
SIGNAL_CONFIG_ENTITY,
TUYA_DATA,
TUYA_DISCOVERY_NEW,
)
SCAN_INTERVAL = timedelta(seconds=15)
TUYA_BRIGHTNESS_RANGE0 = (1, 255)
TUYA_BRIGHTNESS_RANGE1 = (10, 1000)
BRIGHTNESS_MODES = {
0: TUYA_BRIGHTNESS_RANGE0,
1: TUYA_BRIGHTNESS_RANGE1,
}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up tuya sensors dynamically through tuya discovery."""
platform = config_entry.data[CONF_PLATFORM]
async def async_discover_sensor(dev_ids):
"""Discover and add a discovered tuya sensor."""
if not dev_ids:
return
entities = await hass.async_add_executor_job(
_setup_entities,
hass,
dev_ids,
platform,
)
async_add_entities(entities)
async_dispatcher_connect(
hass, TUYA_DISCOVERY_NEW.format(SENSOR_DOMAIN), async_discover_sensor
)
devices_ids = hass.data[DOMAIN]["pending"].pop(SENSOR_DOMAIN)
await async_discover_sensor(devices_ids)
def _setup_entities(hass, dev_ids, platform):
"""Set up Tuya Light device."""
tuya = hass.data[DOMAIN][TUYA_DATA]
entities = []
for dev_id in dev_ids:
device = tuya.get_device_by_id(dev_id)
if device is None:
continue
entities.append(TuyaLight(device, platform))
return entities
class TuyaLight(TuyaDevice, LightEntity):
"""Tuya light device."""
def __init__(self, tuya, platform):
"""Init Tuya light device."""
super().__init__(tuya, platform)
self.entity_id = ENTITY_ID_FORMAT.format(tuya.object_id())
self._min_kelvin = tuya.max_color_temp()
self._max_kelvin = tuya.min_color_temp()
@callback
def _process_config(self):
"""Set device config parameter."""
config = self._get_device_config()
if not config:
return
# support color config
supp_color = config.get(CONF_SUPPORT_COLOR, False)
if supp_color:
self._tuya.force_support_color()
# brightness range config
self._tuya.brightness_white_range = BRIGHTNESS_MODES.get(
config.get(CONF_BRIGHTNESS_RANGE_MODE, 0),
TUYA_BRIGHTNESS_RANGE0,
)
# color set temp range
min_tuya = self._tuya.max_color_temp()
min_kelvin = config.get(CONF_MIN_KELVIN, min_tuya)
max_tuya = self._tuya.min_color_temp()
max_kelvin = config.get(CONF_MAX_KELVIN, max_tuya)
self._min_kelvin = min(max(min_kelvin, min_tuya), max_tuya)
self._max_kelvin = min(max(max_kelvin, self._min_kelvin), max_tuya)
# color shown temp range
max_color_temp = max(
config.get(CONF_TUYA_MAX_COLTEMP, DEFAULT_TUYA_MAX_COLTEMP),
DEFAULT_TUYA_MAX_COLTEMP,
)
self._tuya.color_temp_range = (1000, max_color_temp)
async def async_added_to_hass(self):
"""Set config parameter when add to hass."""
await super().async_added_to_hass()
self._process_config()
self.async_on_remove(
async_dispatcher_connect(
self.hass, SIGNAL_CONFIG_ENTITY, self._process_config
)
)
return
@property
def brightness(self):
"""Return the brightness of the light."""
if self._tuya.brightness() is None:
return None
return int(self._tuya.brightness())
@property
def hs_color(self):
"""Return the hs_color of the light."""
return tuple(map(int, self._tuya.hs_color()))
@property
def color_temp(self):
"""Return the color_temp of the light."""
color_temp = int(self._tuya.color_temp())
if color_temp is None:
return None
return colorutil.color_temperature_kelvin_to_mired(color_temp)
@property
def is_on(self):
"""Return true if light is on."""
return self._tuya.state()
@property
def min_mireds(self):
"""Return color temperature min mireds."""
return colorutil.color_temperature_kelvin_to_mired(self._max_kelvin)
@property
def max_mireds(self):
"""Return color temperature max mireds."""
return colorutil.color_temperature_kelvin_to_mired(self._min_kelvin)
def turn_on(self, **kwargs):
"""Turn on or control the light."""
if (
ATTR_BRIGHTNESS not in kwargs
and ATTR_HS_COLOR not in kwargs
and ATTR_COLOR_TEMP not in kwargs
):
self._tuya.turn_on()
if ATTR_BRIGHTNESS in kwargs:
self._tuya.set_brightness(kwargs[ATTR_BRIGHTNESS])
if ATTR_HS_COLOR in kwargs:
self._tuya.set_color(kwargs[ATTR_HS_COLOR])
if ATTR_COLOR_TEMP in kwargs:
color_temp = colorutil.color_temperature_mired_to_kelvin(
kwargs[ATTR_COLOR_TEMP]
)
self._tuya.set_color_temp(color_temp)
def turn_off(self, **kwargs):
"""Instruct the light to turn off."""
self._tuya.turn_off()
@property
def supported_features(self):
"""Flag supported features."""
supports = SUPPORT_BRIGHTNESS
if self._tuya.support_color():
supports = supports | SUPPORT_COLOR
if self._tuya.support_color_temp():
supports = supports | SUPPORT_COLOR_TEMP
return supports
|
import sys
from itertools import count
from unittest.mock import Mock, patch
from case import mock
from kombu import Connection
from kombu.transport import pyamqp
def test_amqps_connection():
conn = Connection('amqps://')
assert conn.transport # evaluate transport, don't connect
assert conn.ssl
class MockConnection(dict):
def __setattr__(self, key, value):
self[key] = value
def connect(self):
pass
class test_Channel:
def setup(self):
class Channel(pyamqp.Channel):
wait_returns = []
def _x_open(self, *args, **kwargs):
pass
def wait(self, *args, **kwargs):
return self.wait_returns
def _send_method(self, *args, **kwargs):
pass
self.conn = Mock()
self.conn._get_free_channel_id.side_effect = count(0).__next__
self.conn.channels = {}
self.channel = Channel(self.conn, 0)
def test_init(self):
assert not self.channel.no_ack_consumers
def test_prepare_message(self):
assert self.channel.prepare_message(
'foobar', 10, 'application/data', 'utf-8',
properties={},
)
def test_message_to_python(self):
message = Mock()
message.headers = {}
message.properties = {}
assert self.channel.message_to_python(message)
def test_close_resolves_connection_cycle(self):
assert self.channel.connection is not None
self.channel.close()
assert self.channel.connection is None
def test_basic_consume_registers_ack_status(self):
self.channel.wait_returns = ['my-consumer-tag']
self.channel.basic_consume('foo', no_ack=True)
assert 'my-consumer-tag' in self.channel.no_ack_consumers
self.channel.wait_returns = ['other-consumer-tag']
self.channel.basic_consume('bar', no_ack=False)
assert 'other-consumer-tag' not in self.channel.no_ack_consumers
self.channel.basic_cancel('my-consumer-tag')
assert 'my-consumer-tag' not in self.channel.no_ack_consumers
class test_Transport:
def setup(self):
self.connection = Connection('pyamqp://')
self.transport = self.connection.transport
def test_create_channel(self):
connection = Mock()
self.transport.create_channel(connection)
connection.channel.assert_called_with()
def test_ssl_cert_passed(self):
ssl_dict = {
'ca_certs': '/etc/pki/tls/certs/something.crt',
'cert_reqs': "ssl.CERT_REQUIRED",
}
ssl_dict_copy = {k: ssl_dict[k] for k in ssl_dict}
connection = Connection('amqps://', ssl=ssl_dict_copy)
assert connection.transport.client.ssl == ssl_dict
def test_driver_version(self):
assert self.transport.driver_version()
def test_drain_events(self):
connection = Mock()
self.transport.drain_events(connection, timeout=10.0)
connection.drain_events.assert_called_with(timeout=10.0)
def test_dnspython_localhost_resolve_bug(self):
class Conn:
def __init__(self, **kwargs):
vars(self).update(kwargs)
def connect(self):
pass
self.transport.Connection = Conn
self.transport.client.hostname = 'localhost'
conn1 = self.transport.establish_connection()
assert conn1.host == '127.0.0.1:5672'
self.transport.client.hostname = 'example.com'
conn2 = self.transport.establish_connection()
assert conn2.host == 'example.com:5672'
def test_close_connection(self):
connection = Mock()
connection.client = Mock()
self.transport.close_connection(connection)
assert connection.client is None
connection.close.assert_called_with()
@mock.mask_modules('ssl')
def test_import_no_ssl(self):
pm = sys.modules.pop('amqp.connection')
try:
from amqp.connection import SSLError
assert SSLError.__module__ == 'amqp.connection'
finally:
if pm is not None:
sys.modules['amqp.connection'] = pm
class test_pyamqp:
def test_default_port(self):
class Transport(pyamqp.Transport):
Connection = MockConnection
c = Connection(port=None, transport=Transport).connect()
assert c['host'] == f'127.0.0.1:{Transport.default_port}'
def test_custom_port(self):
class Transport(pyamqp.Transport):
Connection = MockConnection
c = Connection(port=1337, transport=Transport).connect()
assert c['host'] == '127.0.0.1:1337'
def test_register_with_event_loop(self):
t = pyamqp.Transport(Mock())
conn = Mock(name='conn')
loop = Mock(name='loop')
t.register_with_event_loop(conn, loop)
loop.add_reader.assert_called_with(
conn.sock, t.on_readable, conn, loop,
)
def test_heartbeat_check(self):
t = pyamqp.Transport(Mock())
conn = Mock()
t.heartbeat_check(conn, rate=4.331)
conn.heartbeat_tick.assert_called_with(rate=4.331)
def test_get_manager(self):
with patch('kombu.transport.pyamqp.get_manager') as get_manager:
t = pyamqp.Transport(Mock())
t.get_manager(1, kw=2)
get_manager.assert_called_with(t.client, 1, kw=2)
|
from datetime import timedelta
from typing import Optional
from pyessent import PyEssent
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, ENERGY_KILO_WATT_HOUR
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
SCAN_INTERVAL = timedelta(hours=1)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string}
)
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the Essent platform."""
username = config[CONF_USERNAME]
password = config[CONF_PASSWORD]
essent = EssentBase(username, password)
meters = []
for meter in essent.retrieve_meters():
data = essent.retrieve_meter_data(meter)
for tariff in data["values"]["LVR"]:
meters.append(
EssentMeter(
essent,
meter,
data["type"],
tariff,
data["values"]["LVR"][tariff]["unit"],
)
)
if not meters:
hass.components.persistent_notification.create(
"Couldn't find any meter readings. "
"Please ensure Verbruiks Manager is enabled in Mijn Essent "
"and at least one reading has been logged to Meterstanden.",
title="Essent",
notification_id="essent_notification",
)
return
add_devices(meters, True)
class EssentBase:
"""Essent Base."""
def __init__(self, username, password):
"""Initialize the Essent API."""
self._username = username
self._password = password
self._meter_data = {}
self.update()
def retrieve_meters(self):
"""Retrieve the list of meters."""
return self._meter_data.keys()
def retrieve_meter_data(self, meter):
"""Retrieve the data for this meter."""
return self._meter_data[meter]
@Throttle(timedelta(minutes=30))
def update(self):
"""Retrieve the latest meter data from Essent."""
essent = PyEssent(self._username, self._password)
eans = set(essent.get_EANs())
for possible_meter in eans:
meter_data = essent.read_meter(possible_meter, only_last_meter_reading=True)
if meter_data:
self._meter_data[possible_meter] = meter_data
class EssentMeter(Entity):
"""Representation of Essent measurements."""
def __init__(self, essent_base, meter, meter_type, tariff, unit):
"""Initialize the sensor."""
self._state = None
self._essent_base = essent_base
self._meter = meter
self._type = meter_type
self._tariff = tariff
self._unit = unit
@property
def unique_id(self) -> Optional[str]:
"""Return a unique ID."""
return f"{self._meter}-{self._type}-{self._tariff}"
@property
def name(self):
"""Return the name of the sensor."""
return f"Essent {self._type} ({self._tariff})"
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
if self._unit.lower() == "kwh":
return ENERGY_KILO_WATT_HOUR
return self._unit
def update(self):
"""Fetch the energy usage."""
# Ensure our data isn't too old
self._essent_base.update()
# Retrieve our meter
data = self._essent_base.retrieve_meter_data(self._meter)
# Set our value
self._state = next(
iter(data["values"]["LVR"][self._tariff]["records"].values())
)
|
import socket
import string
import os
from queue import Empty
from kombu.utils.encoding import bytes_to_str, safe_str
from kombu.utils.json import loads, dumps
from kombu.utils.objects import cached_property
from . import virtual
try:
from softlayer_messaging import get_client
from softlayer_messaging.errors import ResponseError
except ImportError: # pragma: no cover
get_client = ResponseError = None # noqa
# dots are replaced by dash, all other punctuation replaced by underscore.
CHARS_REPLACE_TABLE = {
ord(c): 0x5f for c in string.punctuation if c not in '_'
}
class Channel(virtual.Channel):
"""SLMQ Channel."""
default_visibility_timeout = 1800 # 30 minutes.
domain_format = 'kombu%(vhost)s'
_slmq = None
_queue_cache = {}
_noack_queues = set()
def __init__(self, *args, **kwargs):
if get_client is None:
raise ImportError(
'SLMQ transport requires the softlayer_messaging library',
)
super().__init__(*args, **kwargs)
queues = self.slmq.queues()
for queue in queues:
self._queue_cache[queue] = queue
def basic_consume(self, queue, no_ack, *args, **kwargs):
if no_ack:
self._noack_queues.add(queue)
return super().basic_consume(queue, no_ack,
*args, **kwargs)
def basic_cancel(self, consumer_tag):
if consumer_tag in self._consumers:
queue = self._tag_to_queue[consumer_tag]
self._noack_queues.discard(queue)
return super().basic_cancel(consumer_tag)
def entity_name(self, name, table=CHARS_REPLACE_TABLE):
"""Format AMQP queue name into a valid SLQS queue name."""
return str(safe_str(name)).translate(table)
def _new_queue(self, queue, **kwargs):
"""Ensure a queue exists in SLQS."""
queue = self.entity_name(self.queue_name_prefix + queue)
try:
return self._queue_cache[queue]
except KeyError:
try:
self.slmq.create_queue(
queue, visibility_timeout=self.visibility_timeout)
except ResponseError:
pass
q = self._queue_cache[queue] = self.slmq.queue(queue)
return q
def _delete(self, queue, *args, **kwargs):
"""Delete queue by name."""
queue_name = self.entity_name(queue)
self._queue_cache.pop(queue_name, None)
self.slmq.queue(queue_name).delete(force=True)
super()._delete(queue_name)
def _put(self, queue, message, **kwargs):
"""Put message onto queue."""
q = self._new_queue(queue)
q.push(dumps(message))
def _get(self, queue):
"""Try to retrieve a single message off ``queue``."""
q = self._new_queue(queue)
rs = q.pop(1)
if rs['items']:
m = rs['items'][0]
payload = loads(bytes_to_str(m['body']))
if queue in self._noack_queues:
q.message(m['id']).delete()
else:
payload['properties']['delivery_info'].update({
'slmq_message_id': m['id'], 'slmq_queue_name': q.name})
return payload
raise Empty()
def basic_ack(self, delivery_tag):
delivery_info = self.qos.get(delivery_tag).delivery_info
try:
queue = delivery_info['slmq_queue_name']
except KeyError:
pass
else:
self.delete_message(queue, delivery_info['slmq_message_id'])
super().basic_ack(delivery_tag)
def _size(self, queue):
"""Return the number of messages in a queue."""
return self._new_queue(queue).detail()['message_count']
def _purge(self, queue):
"""Delete all current messages in a queue."""
q = self._new_queue(queue)
n = 0
results = q.pop(10)
while results['items']:
for m in results['items']:
self.delete_message(queue, m['id'])
n += 1
results = q.pop(10)
return n
def delete_message(self, queue, message_id):
q = self.slmq.queue(self.entity_name(queue))
return q.message(message_id).delete()
@property
def slmq(self):
if self._slmq is None:
conninfo = self.conninfo
account = os.environ.get('SLMQ_ACCOUNT', conninfo.virtual_host)
user = os.environ.get('SL_USERNAME', conninfo.userid)
api_key = os.environ.get('SL_API_KEY', conninfo.password)
host = os.environ.get('SLMQ_HOST', conninfo.hostname)
port = os.environ.get('SLMQ_PORT', conninfo.port)
secure = bool(os.environ.get(
'SLMQ_SECURE', self.transport_options.get('secure')) or True,
)
endpoint = '{}://{}{}'.format(
'https' if secure else 'http', host,
f':{port}' if port else '',
)
self._slmq = get_client(account, endpoint=endpoint)
self._slmq.authenticate(user, api_key)
return self._slmq
@property
def conninfo(self):
return self.connection.client
@property
def transport_options(self):
return self.connection.client.transport_options
@cached_property
def visibility_timeout(self):
return (self.transport_options.get('visibility_timeout') or
self.default_visibility_timeout)
@cached_property
def queue_name_prefix(self):
return self.transport_options.get('queue_name_prefix', '')
class Transport(virtual.Transport):
"""SLMQ Transport."""
Channel = Channel
polling_interval = 1
default_port = None
connection_errors = (
virtual.Transport.connection_errors + (
ResponseError, socket.error
)
)
|
import pandas as pd
import pytest
from mock import Mock, sentinel, ANY, call
from pymongo.errors import OperationFailure
from arctic.exceptions import ConcurrentModificationException, NoDataFoundException
from arctic.store.audit import ArcticTransaction, DataChange
from arctic.store.version_store import VersionedItem, VersionStore
def test_data_change():
d = DataChange(sentinel, sentinel)
assert(d.date_range == sentinel)
assert(d.new_data == sentinel)
def test_ArcticTransaction_simple():
vs = Mock(spec=VersionStore)
ts1 = pd.DataFrame(index=[1, 2], data={'a': [1.0, 2.0]})
vs.read.return_value = VersionedItem(symbol=sentinel.symbol, library=sentinel.library, version=1, metadata=None,
data=ts1, host=sentinel.host)
vs.write.return_value = VersionedItem(symbol=sentinel.symbol, library=sentinel.library, version=2,
metadata=None, data=None, host=sentinel.host)
vs.list_versions.return_value = [{'version': 2}, {'version': 1}]
with ArcticTransaction(vs, sentinel.symbol, sentinel.user, sentinel.log) as cwb:
cwb.write(sentinel.symbol, pd.DataFrame(index=[3, 4], data={'a': [1.0, 2.0]}), metadata=sentinel.meta)
assert not vs._delete_version.called
assert vs.write.call_args_list == [call(sentinel.symbol, ANY, prune_previous_version=True, metadata=sentinel.meta)]
assert vs.list_versions.call_args_list == [call(sentinel.symbol)]
assert vs._write_audit.call_args_list == [call(sentinel.user, sentinel.log, ANY)]
def test_ArticTransaction_no_audit():
vs = Mock(spec=VersionStore)
ts1 = pd.DataFrame(index=[1, 2], data={'a': [1.0, 2.0]})
vs.read.return_value = VersionedItem(symbol=sentinel.symbol, library=sentinel.library, version=1, metadata=None,
data=ts1, host=sentinel.host)
vs.write.return_value = VersionedItem(symbol=sentinel.symbol, library=sentinel.library, version=2,
metadata=None, data=None, host=sentinel.host)
vs.list_versions.return_value = [{'version': 2}, {'version': 1}]
with ArcticTransaction(vs, sentinel.symbol, sentinel.user, sentinel.log, audit=False) as cwb:
cwb.write(sentinel.symbol, pd.DataFrame(index=[3, 4], data={'a': [1.0, 2.0]}), metadata=sentinel.meta)
assert vs.write.call_count == 1
assert vs._write_audit.call_count == 0
def test_ArcticTransaction_writes_if_metadata_changed():
vs = Mock(spec=VersionStore)
ts1 = pd.DataFrame(index=[1, 2], data={'a': [1.0, 2.0]})
vs.read.return_value = VersionedItem(symbol=sentinel.symbol, library=sentinel.library, version=1, metadata=None,
data=ts1, host=sentinel.host)
vs.write.return_value = VersionedItem(symbol=sentinel.symbol, library=sentinel.library, version=2, metadata=None,
data=None, host=sentinel.host)
vs.list_versions.return_value = [{'version': 2},
{'version': 1}]
with ArcticTransaction(vs, sentinel.symbol, sentinel.user, sentinel.log) as cwb:
assert cwb._do_write is False
cwb.write(sentinel.symbol, ts1, metadata={1: 2})
assert cwb._do_write is True
assert not vs._delete_version.called
vs.write.assert_called_once_with(sentinel.symbol, ANY, prune_previous_version=True, metadata={1: 2})
vs.list_versions.assert_called_once_with(sentinel.symbol)
# Won't write on exit with same data and metadata
vs.read.return_value = VersionedItem(symbol=sentinel.symbol, library=sentinel.library, version=2, metadata={1: 2},
data=ts1, host=sentinel.host)
with ArcticTransaction(vs, sentinel.symbol, sentinel.user, sentinel.log) as cwb:
assert cwb._do_write is False
cwb.write(sentinel.symbol, ts1, metadata={1: 2})
assert cwb._do_write is False
def test_ArcticTransaction_writes_if_base_data_corrupted():
vs = Mock(spec=VersionStore)
ts1 = pd.DataFrame(index=[1, 2], data={'a': [1.0, 2.0]})
vs.read.side_effect = OperationFailure('some failure')
vs.write.return_value = VersionedItem(symbol=sentinel.symbol, library=sentinel.library, version=2,
metadata=None, data=None, host=sentinel.host)
vs.read_metadata.return_value = VersionedItem(symbol=sentinel.symbol, library=sentinel.library, version=1,
metadata=None, data=None, host=sentinel.host)
vs.list_versions.return_value = [{'version': 2}, {'version': 1}]
with ArcticTransaction(vs, sentinel.symbol, sentinel.user, sentinel.log) as cwb:
cwb.write(sentinel.symbol, ts1, metadata={1: 2})
vs.write.assert_called_once_with(sentinel.symbol, ANY, prune_previous_version=True, metadata={1: 2})
assert vs.list_versions.call_args_list == [call(sentinel.symbol)]
def test_ArcticTransaction_writes_no_data_found():
vs = Mock(spec=VersionStore)
ts1 = pd.DataFrame(index=[1, 2], data={'a': [1.0, 2.0]})
vs.read.side_effect = NoDataFoundException('no data')
vs.write.return_value = VersionedItem(symbol=sentinel.symbol, library=sentinel.library, version=1,
metadata=None, data=None, host=sentinel.host)
vs.list_versions.side_effect = [[],
[{'version': 1}],
]
with ArcticTransaction(vs, sentinel.symbol, sentinel.user, sentinel.log) as cwb:
cwb.write(sentinel.symbol, ts1, metadata={1: 2})
assert vs.write.call_args_list == [call(sentinel.symbol, ANY, prune_previous_version=True, metadata={1: 2})]
assert vs.list_versions.call_args_list == [call(sentinel.symbol, latest_only=True),
call(sentinel.symbol)]
def test_ArcticTransaction_writes_no_data_found_deleted():
vs = Mock(spec=VersionStore)
ts1 = pd.DataFrame(index=[1, 2], data={'a': [1.0, 2.0]})
vs.read.side_effect = NoDataFoundException('no data')
vs.write.return_value = VersionedItem(symbol=sentinel.symbol, library=sentinel.library, version=3,
metadata=None, data=None, host=sentinel.host)
vs.list_versions.side_effect = [[{'version': 2}, {'version': 1}],
[{'version': 3}, {'version': 2}],
]
with ArcticTransaction(vs, sentinel.symbol, sentinel.user, sentinel.log) as cwb:
cwb.write(sentinel.symbol, ts1, metadata={1: 2})
assert vs.write.call_args_list == [call(sentinel.symbol, ANY, prune_previous_version=True, metadata={1: 2})]
assert vs.list_versions.call_args_list == [call(sentinel.symbol, latest_only=True),
call(sentinel.symbol)]
def test_ArcticTransaction_does_nothing_when_data_not_modified():
vs = Mock(spec=VersionStore)
ts1 = pd.DataFrame(index=[1, 2], data={'a': [1.0, 2.0]})
vs.read.return_value = VersionedItem(symbol=sentinel.symbol, library=sentinel.library, version=1, metadata=None,
data=ts1, host=sentinel.host)
vs.write.return_value = VersionedItem(symbol=sentinel.symbol, library=sentinel.library, version=2, metadata=None,
data=None, host=sentinel.host)
vs.list_versions.side_effect = [{'version': 2}, {'version': 1}]
with ArcticTransaction(vs, sentinel.symbol, sentinel.user, sentinel.log) as cwb:
cwb.write(sentinel.symbol, pd.DataFrame(index=[1, 2], data={'a': [1.0, 2.0]}))
assert not vs._delete_version.called
assert not vs.write.called
def test_ArcticTransaction_does_nothing_when_data_is_None():
vs = Mock(spec=VersionStore)
ts1 = pd.DataFrame(index=[1, 2], data={'a': [1.0, 2.0]})
vs.read.return_value = VersionedItem(symbol=sentinel.symbol, library=sentinel.library, version=1, metadata=None,
data=ts1, host=sentinel.host)
vs.write.return_value = VersionedItem(symbol=sentinel.symbol, library=sentinel.library, version=2,
metadata=None, data=None, host=sentinel.host)
vs.list_versions.return_value = [{'version': 1}, {'version': 2}]
with ArcticTransaction(vs, sentinel.symbol, sentinel.user, sentinel.log) as cwb:
pass
assert not vs._delete_version.called
assert not vs.write.called
def test_ArcticTransaction_guards_against_inconsistent_ts():
vs = Mock(spec=VersionStore)
ts1 = pd.DataFrame(index=[1, 2], data={'a': [1.0, 2.0]})
vs.read.return_value = VersionedItem(symbol=sentinel.symbol, library=sentinel.library, version=1, metadata=None,
data=ts1, host=sentinel.host)
vs.write.return_value = VersionedItem(symbol=sentinel.symbol, library=sentinel.library, version=2, metadata=None,
data=None, host=sentinel.host)
vs.list_versions.side_effect = [{'version': 2}, {'version': 1}]
ts1 = pd.DataFrame(index=[1, 2], data={'a': [2.0, 3.0]})
with pytest.raises(ConcurrentModificationException):
with ArcticTransaction(vs, sentinel.symbol, sentinel.user, sentinel.log, modify_timeseries=ts1) as cwb:
pass
def test_ArcticTransaction_detects_concurrent_writes():
vs = Mock(spec=VersionStore)
ts1 = pd.DataFrame(index=[1, 2], data={'a': [1.0, 2.0]})
vs.read.return_value = VersionedItem(symbol=sentinel.symbol, library=sentinel.library, version=1, metadata=None,
data=ts1, host=sentinel.host)
vs.write.side_effect = [VersionedItem(symbol=sentinel.symbol, library=sentinel.library, version=2, metadata=None,
data=None, host=sentinel.host),
VersionedItem(symbol=sentinel.symbol, library=sentinel.library, version=3, metadata=None,
data=None, host=sentinel.host)]
# note that we return some extra version 5, it is possible that we have a write coming in after our own write that gets picked up
vs.list_versions.side_effect = [[{'version': 5}, {'version': 2}, {'version': 1}, ],
[{'version': 5}, {'version': 3}, {'version': 2}, {'version': 1}, ]]
from threading import Event, Thread
e1 = Event()
e2 = Event()
def losing_writer():
# will attempt to write version 2, should find that version 2 is there and it ends up writing version 3
with pytest.raises(ArcticTransaction):
with ArcticTransaction(vs, sentinel.symbol, sentinel.user, sentinel.log) as cwb:
cwb.write(sentinel.symbol, pd.DataFrame([1.0, 2.0], [3, 4]))
e1.wait()
def winning_writer():
# will attempt to write version 2 as well
with ArcticTransaction(vs, sentinel.symbol, sentinel.user, sentinel.log) as cwb:
cwb.write(sentinel.symbol, pd.DataFrame([1.0, 2.0], [5, 6]))
e2.wait()
t1 = Thread(target=losing_writer)
t2 = Thread(target=winning_writer)
t1.start()
t2.start()
# both read the same timeseries and are locked doing some 'work'
e2.set()
# t2 should now be able to finish
t2.join()
e1.set()
t1.join()
# we're expecting the losing_writer to undo its write once it realises that it wrote v3 instead of v2
vs._delete_version.assert_called_once_with(sentinel.symbol, 3)
|
from . import pprint
def parser(subparsers, _):
"""Adds the remote parser to the given subparsers object."""
desc = 'list, create, edit or delete remotes'
remote_parser = subparsers.add_parser(
'remote', help=desc, description=desc.capitalize(), aliases=['rt'])
remote_parser.add_argument(
'-c', '--create', nargs='?', help='create remote', dest='remote_name',
metavar='remote')
remote_parser.add_argument(
'remote_url', nargs='?',
help='the url of the remote (only relevant if a new remote is created)')
remote_parser.add_argument(
'-d', '--delete', nargs='+', help='delete remote(es)', dest='delete_r',
metavar='remote')
remote_parser.add_argument(
'-rn', '--rename', nargs='+',
help='renames the specified remote: accepts two arguments '
'(current remote name and new remote name)',
dest='rename_r')
remote_parser.set_defaults(func=main)
def main(args, repo):
ret = True
remotes = repo.remotes
if args.remote_name:
if not args.remote_url:
raise ValueError('Missing url')
ret = _do_create(args.remote_name, args.remote_url, remotes)
elif args.delete_r:
ret = _do_delete(args.delete_r, remotes)
elif args.rename_r:
ret = _do_rename(args.rename_r, remotes)
else:
ret = _do_list(remotes)
return ret
def _do_list(remotes):
pprint.msg('List of remotes:')
pprint.exp(
'do gl remote -c r r_url to add a new remote r mapping to r_url')
pprint.exp('do gl remote -d r to delete remote r')
pprint.blank()
if not len(remotes):
pprint.item('There are no remotes to list')
else:
for r in remotes:
pprint.item(r.name, opt_text=' (maps to {0})'.format(r.url))
return True
def _do_create(rn, ru, remotes):
remotes.create(rn, ru)
pprint.ok('Remote {0} mapping to {1} created successfully'.format(rn, ru))
pprint.exp('to list existing remotes do gl remote')
pprint.exp('to remove {0} do gl remote -d {1}'.format(rn, rn))
return True
def _do_delete(delete_r, remotes):
errors_found = False
for r in delete_r:
try:
remotes.delete(r)
pprint.ok('Remote {0} removed successfully'.format(r))
except KeyError:
pprint.err('Remote \'{0}\' doesn\'t exist'.format(r))
errors_found = True
return not errors_found
def _do_rename(rename_r, remotes):
errors_found = False
if len(rename_r) != 2:
pprint.err(
'Expected 2 arguments in the folllowing format: '
'gl remote -rn current_remote_name new_remote_name')
errors_found = True
else:
try:
remotes.rename(rename_r[0], rename_r[1])
pprint.ok('Renamed remote {0} to {1}'.format(rename_r[0], rename_r[1]))
except KeyError:
pprint.err('Remote \'{0}\' doesn\'t exist'.format(rename_r[0]))
errors_found = True
return not errors_found
|
from __future__ import division
import chainer
import chainer.functions as F
import chainer.links as L
from chainercv.links.connection import Conv2DBNActiv
from chainercv.links.connection import SeparableConv2DBNActiv
from chainercv.links.model.deeplab.aspp import SeparableASPP
from chainercv.links.model.deeplab.xception import Xception65
from chainercv.transforms import resize
from chainercv import utils
import numpy as np
class Decoder(chainer.Chain):
"""Decoder for DeepLab V3+.
Args:
in_channels (int): Number of channels of input arrays.
out_channels (int): Number of channels of output arrays.
proj_channels (int): Number of channels of output of
first 1x1 convolution.
depth_channels (int): Number of channels of output of
convolution after concatenation.
bn_kwargs (dict): Keywod arguments passed to initialize the batch
normalization layers of :class:`chainercv.links.Conv2DBNActiv` and
:class:`chainercv.links.SeparableConv2DBNActiv`.
"""
def __init__(self, in_channels, out_channels, proj_channels,
depth_channels, bn_kwargs={}):
super(Decoder, self).__init__()
with self.init_scope():
self.feature_proj = Conv2DBNActiv(in_channels, proj_channels, 1)
concat_channels = in_channels+proj_channels
self.conv1 = SeparableConv2DBNActiv(
concat_channels, depth_channels, 3, 1, 1, 1, nobias=True,
dw_activ=F.relu, pw_activ=F.relu, bn_kwargs=bn_kwargs)
self.conv2 = SeparableConv2DBNActiv(
depth_channels, depth_channels, 3, 1, 1, 1, nobias=True,
dw_activ=F.relu, pw_activ=F.relu, bn_kwargs=bn_kwargs)
self.conv_logits = L.Convolution2D(
depth_channels, out_channels, 1, 1, 0)
def forward(self, x, pool):
x = self.feature_proj(x)
pool = F.resize_images(pool, x.shape[2:])
h = F.concat((pool, x), axis=1)
h = self.conv1(h)
h = self.conv2(h)
logits = self.conv_logits(h)
return logits
class DeepLabV3plus(chainer.Chain):
"""Base class of DeepLab V3+.
Args:
fature_extractor (callable): Feature extractor network.
This network should return lowlevel and highlevel feature maps
as :obj:`(lowlevel, highlevel)`.
aspp (callable): ASPP network.
decoder (callable): Decoder network.
min_input_size (int or tuple of ints): Minimum image size of inputs.
if height or width is lower than this values, input images are
padded to be this shape. The default value is :obj:`(513, 513)`
scales (tuple of floats): Scales for multi-scale prediction.
Final outputs are averaged after softmax activation.
The default value is :obj:`(1.0,)`.
flip (bool): When this is true, a left-right flipped images are
also input and finally averaged. When :obj:`len(scales)` are
more than 1, flipped prediction is performed in each scales.
The default value is :obj:`False`
"""
def __init__(self, feature_extractor, aspp, decoder,
min_input_size, scales=(1.0,), flip=False):
super(DeepLabV3plus, self).__init__()
if not isinstance(min_input_size, (list, tuple)):
min_input_size = (int(min_input_size), int(min_input_size))
self.min_input_size = min_input_size
self.scales = scales
self.flip = flip
with self.init_scope():
self.feature_extractor = feature_extractor
self.aspp = aspp
self.decoder = decoder
def prepare(self, image):
"""Preprocess an image for feature extraction.
1. padded by mean pixel defined in feature extractor.
2. scaled to [-1.0, 1.0]
After resizing the image, the image is subtracted by a mean image value
:obj:`self.mean`.
Args:
image (~numpy.ndarray): An image. This is in CHW and RGB format.
The range of its value is :math:`[0, 255]`.
Returns:
~numpy.ndarray:
A preprocessed image.
"""
_, H, W = image.shape
# Pad image and label to have dimensions >= min_input_size
h = max(self.min_input_size[0], H)
w = max(self.min_input_size[1], W)
# Pad image with mean pixel value.
mean = self.feature_extractor.mean
bg = np.zeros((3, h, w), dtype=np.float32) + mean
bg[:, :H, :W] = image
image = bg
# scale to [-1.0, 1.0]
image = image / 127.5 - 1.0
return image
def forward(self, x):
lowlevel, highlevel = self.feature_extractor(x)
highlevel = self.aspp(highlevel)
h = self.decoder(lowlevel, highlevel)
return h
def _get_proba(self, img, scale, flip):
if flip:
img = img[:, :, ::-1]
_, H, W = img.shape
if scale == 1.0:
h, w = H, W
else:
h, w = int(H * scale), int(W * scale)
img = resize(img, (h, w))
img = self.prepare(img)
x = chainer.Variable(self.xp.asarray(img[np.newaxis]))
x = self.forward(x)
x = F.softmax(x, axis=1)
score = F.resize_images(x, img.shape[1:])[0, :, :h, :w].array
score = chainer.backends.cuda.to_cpu(score)
if scale != 1.0:
score = resize(score, (H, W))
if flip:
score = score[:, :, ::-1]
return score
def predict(self, imgs):
"""Conduct semantic segmentation from images.
Args:
imgs (iterable of numpy.ndarray): Arrays holding images.
All images are in CHW and RGB format
and the range of their values are :math:`[0, 255]`.
Returns:
list of numpy.ndarray:
List of integer labels predicted from each image in the input list.
"""
with chainer.using_config('train', False), \
chainer.function.no_backprop_mode():
labels = []
n_aug = len(self.scales) if self.flip else len(self.scales) * 2
for img in imgs:
score = 0
for scale in self.scales:
score += self._get_proba(img, scale, False) / n_aug
if self.flip:
score += self._get_proba(img, scale, True) / n_aug
label = np.argmax(score, axis=0).astype(np.int32)
labels.append(label)
return labels
class DeepLabV3plusXception65(DeepLabV3plus):
_models = {
'voc': {
'param': {
'n_class': 21,
'min_input_size': (513, 513),
'scales': (1.0,),
'flip': False,
'extractor_kwargs': {
'bn_kwargs': {'decay': 0.9997, 'eps': 1e-3},
},
'aspp_kwargs': {
'bn_kwargs': {'decay': 0.9997, 'eps': 1e-5},
},
'decoder_kwargs': {
'bn_kwargs': {'decay': 0.9997, 'eps': 1e-5},
},
},
'overwritable': ('scales', 'flip'),
'url': 'https://chainercv-models.preferred.jp/'
'deeplabv3plus_xception65_voc_converted_2019_02_15.npz',
},
'cityscapes': {
'param': {
'n_class': 19,
'min_input_size': (1025, 2049),
'scales': (1.0,),
'flip': False,
'extractor_kwargs': {
'bn_kwargs': {'decay': 0.9997, 'eps': 1e-3},
},
'aspp_kwargs': {
'bn_kwargs': {'decay': 0.9997, 'eps': 1e-5},
},
'decoder_kwargs': {
'bn_kwargs': {'decay': 0.9997, 'eps': 1e-5},
},
},
'overwritable': ('scales', 'flip'),
'url': 'https://chainercv-models.preferred.jp/'
'deeplabv3plus_xception65_cityscapes_converted_2019_02_15.npz',
},
'ade20k': {
'param': {
'n_class': 150,
'min_input_size': (513, 513),
'scales': (1.0,),
'flip': False,
'extractor_kwargs': {
'bn_kwargs': {'decay': 0.9997, 'eps': 1e-3},
},
'aspp_kwargs': {
'bn_kwargs': {'decay': 0.9997, 'eps': 1e-5},
},
'decoder_kwargs': {
'bn_kwargs': {'decay': 0.9997, 'eps': 1e-5},
},
},
'overwritable': ('scales', 'flip'),
'url': 'https://chainercv-models.preferred.jp/'
'deeplabv3plus_xception65_ade20k_converted_2019_03_08.npz',
}
}
def __init__(self, n_class=None, pretrained_model=None,
min_input_size=None, scales=None, flip=None,
extractor_kwargs=None, aspp_kwargs=None, decoder_kwargs=None):
param, path = utils.prepare_pretrained_model(
{'n_class': n_class, 'min_input_size': min_input_size,
'scales': scales, 'flip': flip,
'extractor_kwargs': extractor_kwargs,
'aspp_kwargs': aspp_kwargs, 'decoder_kwargs': decoder_kwargs},
pretrained_model, self._models,
default={
'min_input_size': (513, 513),
'scales': (1.0,), 'flip': False,
'extractor_kwargs': {},
'aspp_kwargs': {}, 'decoder_kwargs': {}})
super(DeepLabV3plusXception65, self).__init__(
Xception65(**param['extractor_kwargs']),
SeparableASPP(2048, 256, **param['aspp_kwargs']),
Decoder(256, param['n_class'], 48, 256, **param['decoder_kwargs']),
min_input_size=param['min_input_size'], scales=param['scales'],
flip=param['flip'])
if path:
chainer.serializers.load_npz(path, self)
|
from bisect import bisect_left, bisect_right
from pygal.graph.graph import Graph
from pygal.util import alter, decorate
class Box(Graph):
"""
Box plot
For each series, shows the median value, the 25th and 75th percentiles,
and the values within
1.5 times the interquartile range of the 25th and 75th percentiles.
See http://en.wikipedia.org/wiki/Box_plot
"""
_series_margin = .06
def _value_format(self, value, serie):
"""
Format value for dual value display.
"""
if self.box_mode == "extremes":
return (
'Min: %s\nQ1 : %s\nQ2 : %s\nQ3 : %s\nMax: %s' %
tuple(map(self._y_format, serie.points[1:6]))
)
elif self.box_mode in ["tukey", "stdev", "pstdev"]:
return (
'Min: %s\nLower Whisker: %s\nQ1: %s\nQ2: %s\nQ3: %s\n'
'Upper Whisker: %s\nMax: %s' %
tuple(map(self._y_format, serie.points))
)
elif self.box_mode == '1.5IQR':
# 1.5IQR mode
return 'Q1: %s\nQ2: %s\nQ3: %s' % tuple(
map(self._y_format, serie.points[2:5])
)
else:
return self._y_format(serie.points)
def _compute(self):
"""
Compute parameters necessary for later steps
within the rendering process
"""
for serie in self.series:
serie.points, serie.outliers = \
self._box_points(serie.values, self.box_mode)
self._x_pos = [(i + .5) / self._order for i in range(self._order)]
if self._min:
self._box.ymin = min(self._min, self.zero)
if self._max:
self._box.ymax = max(self._max, self.zero)
def _plot(self):
"""Plot the series data"""
for serie in self.series:
self._boxf(serie)
@property
def _len(self):
"""Len is always 7 here"""
return 7
def _boxf(self, serie):
"""For a specific series, draw the box plot."""
serie_node = self.svg.serie(serie)
# Note: q0 and q4 do not literally mean the zero-th quartile
# and the fourth quartile, but rather the distance from 1.5 times
# the inter-quartile range to Q1 and Q3, respectively.
boxes = self.svg.node(serie_node['plot'], class_="boxes")
metadata = serie.metadata.get(0)
box = decorate(self.svg, self.svg.node(boxes, class_='box'), metadata)
val = self._format(serie, 0)
x_center, y_center = self._draw_box(
box, serie.points[1:6], serie.outliers, serie.index, metadata
)
self._tooltip_data(
box, val, x_center, y_center, "centered",
self._get_x_label(serie.index)
)
self._static_value(serie_node, val, x_center, y_center, metadata)
def _draw_box(self, parent_node, quartiles, outliers, box_index, metadata):
"""
Return the center of a bounding box defined by a box plot.
Draws a box plot on self.svg.
"""
width = (self.view.x(1) - self.view.x(0)) / self._order
series_margin = width * self._series_margin
left_edge = self.view.x(0) + width * box_index + series_margin
width -= 2 * series_margin
# draw lines for whiskers - bottom, median, and top
for i, whisker in enumerate((quartiles[0], quartiles[2],
quartiles[4])):
whisker_width = width if i == 1 else width / 2
shift = (width - whisker_width) / 2
xs = left_edge + shift
xe = left_edge + width - shift
alter(
self.svg.line(
parent_node,
coords=[(xs, self.view.y(whisker)),
(xe, self.view.y(whisker))],
class_='reactive tooltip-trigger',
attrib={'stroke-width': 3}
), metadata
)
# draw lines connecting whiskers to box (Q1 and Q3)
alter(
self.svg.line(
parent_node,
coords=[(left_edge + width / 2, self.view.y(quartiles[0])),
(left_edge + width / 2, self.view.y(quartiles[1]))],
class_='reactive tooltip-trigger',
attrib={'stroke-width': 2}
), metadata
)
alter(
self.svg.line(
parent_node,
coords=[(left_edge + width / 2, self.view.y(quartiles[4])),
(left_edge + width / 2, self.view.y(quartiles[3]))],
class_='reactive tooltip-trigger',
attrib={'stroke-width': 2}
), metadata
)
# box, bounded by Q1 and Q3
alter(
self.svg.node(
parent_node,
tag='rect',
x=left_edge,
y=self.view.y(quartiles[1]),
height=self.view.y(quartiles[3]) - self.view.y(quartiles[1]),
width=width,
class_='subtle-fill reactive tooltip-trigger'
), metadata
)
# draw outliers
for o in outliers:
alter(
self.svg.node(
parent_node,
tag='circle',
cx=left_edge + width / 2,
cy=self.view.y(o),
r=3,
class_='subtle-fill reactive tooltip-trigger'
), metadata
)
return (
left_edge + width / 2,
self.view.y(sum(quartiles) / len(quartiles))
)
@staticmethod
def _box_points(values, mode='extremes'):
"""
Default mode: (mode='extremes' or unset)
Return a 7-tuple of 2x minimum, Q1, Median, Q3,
and 2x maximum for a list of numeric values.
1.5IQR mode: (mode='1.5IQR')
Return a 7-tuple of min, Q1 - 1.5 * IQR, Q1, Median, Q3,
Q3 + 1.5 * IQR and max for a list of numeric values.
Tukey mode: (mode='tukey')
Return a 7-tuple of min, q[0..4], max and a list of outliers
Outliers are considered values x: x < q1 - IQR or x > q3 + IQR
SD mode: (mode='stdev')
Return a 7-tuple of min, q[0..4], max and a list of outliers
Outliers are considered values x: x < q2 - SD or x > q2 + SD
SDp mode: (mode='pstdev')
Return a 7-tuple of min, q[0..4], max and a list of outliers
Outliers are considered values x: x < q2 - SDp or x > q2 + SDp
The iterator values may include None values.
Uses quartile definition from Mendenhall, W. and
Sincich, T. L. Statistics for Engineering and the
Sciences, 4th ed. Prentice-Hall, 1995.
"""
def median(seq):
n = len(seq)
if n % 2 == 0: # seq has an even length
return (seq[n // 2] + seq[n // 2 - 1]) / 2
else: # seq has an odd length
return seq[n // 2]
def mean(seq):
return sum(seq) / len(seq)
def stdev(seq):
m = mean(seq)
l = len(seq)
v = sum((n - m)**2 for n in seq) / (l - 1) # variance
return v**0.5 # sqrt
def pstdev(seq):
m = mean(seq)
l = len(seq)
v = sum((n - m)**2 for n in seq) / l # variance
return v**0.5 # sqrt
outliers = []
# sort the copy in case the originals must stay in original order
s = sorted([x for x in values if x is not None])
n = len(s)
if not n:
return (0, 0, 0, 0, 0, 0, 0), []
elif n == 1:
return (s[0], s[0], s[0], s[0], s[0], s[0], s[0]), []
else:
q2 = median(s)
# See 'Method 3' in http://en.wikipedia.org/wiki/Quartile
if n % 2 == 0: # even
q1 = median(s[:n // 2])
q3 = median(s[n // 2:])
else: # odd
if n == 1: # special case
q1 = s[0]
q3 = s[0]
elif n % 4 == 1: # n is of form 4n + 1 where n >= 1
m = (n - 1) // 4
q1 = 0.25 * s[m - 1] + 0.75 * s[m]
q3 = 0.75 * s[3 * m] + 0.25 * s[3 * m + 1]
else: # n is of form 4n + 3 where n >= 1
m = (n - 3) // 4
q1 = 0.75 * s[m] + 0.25 * s[m + 1]
q3 = 0.25 * s[3 * m + 1] + 0.75 * s[3 * m + 2]
iqr = q3 - q1
min_s = s[0]
max_s = s[-1]
if mode == 'extremes':
q0 = min_s
q4 = max_s
elif mode == 'tukey':
# the lowest datum still within 1.5 IQR of the lower quartile,
# and the highest datum still within 1.5 IQR of the upper
# quartile [Tukey box plot, Wikipedia ]
b0 = bisect_left(s, q1 - 1.5 * iqr)
b4 = bisect_right(s, q3 + 1.5 * iqr)
q0 = s[b0]
q4 = s[b4 - 1]
outliers = s[:b0] + s[b4:]
elif mode == 'stdev':
# one standard deviation above and below the mean of the data
sd = stdev(s)
b0 = bisect_left(s, q2 - sd)
b4 = bisect_right(s, q2 + sd)
q0 = s[b0]
q4 = s[b4 - 1]
outliers = s[:b0] + s[b4:]
elif mode == 'pstdev':
# one population standard deviation above and below
# the mean of the data
sdp = pstdev(s)
b0 = bisect_left(s, q2 - sdp)
b4 = bisect_right(s, q2 + sdp)
q0 = s[b0]
q4 = s[b4 - 1]
outliers = s[:b0] + s[b4:]
elif mode == '1.5IQR':
# 1.5IQR mode
q0 = q1 - 1.5 * iqr
q4 = q3 + 1.5 * iqr
return (min_s, q0, q1, q2, q3, q4, max_s), outliers
|
import logging
from blebox_uniapi.error import BadOnValueError
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_HS_COLOR,
ATTR_WHITE_VALUE,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
SUPPORT_WHITE_VALUE,
LightEntity,
)
from homeassistant.util.color import (
color_hs_to_RGB,
color_rgb_to_hex,
color_RGB_to_hs,
rgb_hex_to_rgb_list,
)
from . import BleBoxEntity, create_blebox_entities
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up a BleBox entry."""
create_blebox_entities(
hass, config_entry, async_add_entities, BleBoxLightEntity, "lights"
)
class BleBoxLightEntity(BleBoxEntity, LightEntity):
"""Representation of BleBox lights."""
@property
def supported_features(self):
"""Return supported features."""
white = SUPPORT_WHITE_VALUE if self._feature.supports_white else 0
color = SUPPORT_COLOR if self._feature.supports_color else 0
brightness = SUPPORT_BRIGHTNESS if self._feature.supports_brightness else 0
return white | color | brightness
@property
def is_on(self):
"""Return if light is on."""
return self._feature.is_on
@property
def brightness(self):
"""Return the name."""
return self._feature.brightness
@property
def white_value(self):
"""Return the white value."""
return self._feature.white_value
@property
def hs_color(self):
"""Return the hue and saturation."""
rgbw_hex = self._feature.rgbw_hex
if rgbw_hex is None:
return None
rgb = rgb_hex_to_rgb_list(rgbw_hex)[0:3]
return color_RGB_to_hs(*rgb)
async def async_turn_on(self, **kwargs):
"""Turn the light on."""
white = kwargs.get(ATTR_WHITE_VALUE)
hs_color = kwargs.get(ATTR_HS_COLOR)
brightness = kwargs.get(ATTR_BRIGHTNESS)
feature = self._feature
value = feature.sensible_on_value
if brightness is not None:
value = feature.apply_brightness(value, brightness)
if white is not None:
value = feature.apply_white(value, white)
if hs_color is not None:
raw_rgb = color_rgb_to_hex(*color_hs_to_RGB(*hs_color))
value = feature.apply_color(value, raw_rgb)
try:
await self._feature.async_on(value)
except BadOnValueError as ex:
_LOGGER.error(
"Turning on '%s' failed: Bad value %s (%s)", self.name, value, ex
)
async def async_turn_off(self, **kwargs):
"""Turn the light off."""
await self._feature.async_off()
|
import asyncio
from datetime import timedelta
import logging
from sense_energy import (
ASyncSenseable,
SenseAPITimeoutException,
SenseAuthenticationException,
)
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, CONF_TIMEOUT
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.event import async_track_time_interval
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from .const import (
ACTIVE_UPDATE_RATE,
DEFAULT_TIMEOUT,
DOMAIN,
SENSE_DATA,
SENSE_DEVICE_UPDATE,
SENSE_DEVICES_DATA,
SENSE_DISCOVERED_DEVICES_DATA,
SENSE_TIMEOUT_EXCEPTIONS,
SENSE_TRENDS_COORDINATOR,
)
_LOGGER = logging.getLogger(__name__)
PLATFORMS = ["binary_sensor", "sensor"]
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_EMAIL): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
}
)
},
extra=vol.ALLOW_EXTRA,
)
class SenseDevicesData:
"""Data for each sense device."""
def __init__(self):
"""Create."""
self._data_by_device = {}
def set_devices_data(self, devices):
"""Store a device update."""
self._data_by_device = {}
for device in devices:
self._data_by_device[device["id"]] = device
def get_device_by_id(self, sense_device_id):
"""Get the latest device data."""
return self._data_by_device.get(sense_device_id)
async def async_setup(hass: HomeAssistant, config: dict):
"""Set up the Sense component."""
hass.data.setdefault(DOMAIN, {})
conf = config.get(DOMAIN)
if not conf:
return True
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={
CONF_EMAIL: conf[CONF_EMAIL],
CONF_PASSWORD: conf[CONF_PASSWORD],
CONF_TIMEOUT: conf[CONF_TIMEOUT],
},
)
)
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Set up Sense from a config entry."""
entry_data = entry.data
email = entry_data[CONF_EMAIL]
password = entry_data[CONF_PASSWORD]
timeout = entry_data[CONF_TIMEOUT]
gateway = ASyncSenseable(api_timeout=timeout, wss_timeout=timeout)
gateway.rate_limit = ACTIVE_UPDATE_RATE
try:
await gateway.authenticate(email, password)
except SenseAuthenticationException:
_LOGGER.error("Could not authenticate with sense server")
return False
except SENSE_TIMEOUT_EXCEPTIONS as err:
raise ConfigEntryNotReady from err
sense_devices_data = SenseDevicesData()
try:
sense_discovered_devices = await gateway.get_discovered_device_data()
await gateway.update_realtime()
except SENSE_TIMEOUT_EXCEPTIONS as err:
raise ConfigEntryNotReady from err
trends_coordinator = DataUpdateCoordinator(
hass,
_LOGGER,
name=f"Sense Trends {email}",
update_method=gateway.update_trend_data,
update_interval=timedelta(seconds=300),
)
# This can take longer than 60s and we already know
# sense is online since get_discovered_device_data was
# successful so we do it later.
hass.loop.create_task(trends_coordinator.async_request_refresh())
hass.data[DOMAIN][entry.entry_id] = {
SENSE_DATA: gateway,
SENSE_DEVICES_DATA: sense_devices_data,
SENSE_TRENDS_COORDINATOR: trends_coordinator,
SENSE_DISCOVERED_DEVICES_DATA: sense_discovered_devices,
}
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
async def async_sense_update(_):
"""Retrieve latest state."""
try:
await gateway.update_realtime()
except SenseAPITimeoutException:
_LOGGER.error("Timeout retrieving data")
data = gateway.get_realtime()
if "devices" in data:
sense_devices_data.set_devices_data(data["devices"])
async_dispatcher_send(hass, f"{SENSE_DEVICE_UPDATE}-{gateway.sense_monitor_id}")
hass.data[DOMAIN][entry.entry_id][
"track_time_remove_callback"
] = async_track_time_interval(
hass, async_sense_update, timedelta(seconds=ACTIVE_UPDATE_RATE)
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
track_time_remove_callback = hass.data[DOMAIN][entry.entry_id][
"track_time_remove_callback"
]
track_time_remove_callback()
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
|
import keras
import typing
from matchzoo.engine import hyper_spaces
from matchzoo.engine.param_table import ParamTable
from matchzoo.engine.param import Param
from matchzoo.engine.base_model import BaseModel
class HBMP(BaseModel):
"""
HBMP model.
Examples:
>>> model = HBMP()
>>> model.guess_and_fill_missing_params(verbose=0)
>>> model.params['embedding_input_dim'] = 200
>>> model.params['embedding_output_dim'] = 100
>>> model.params['embedding_trainable'] = True
>>> model.params['alpha'] = 0.1
>>> model.params['mlp_num_layers'] = 3
>>> model.params['mlp_num_units'] = [10, 10]
>>> model.params['lstm_num_units'] = 5
>>> model.params['dropout_rate'] = 0.1
>>> model.build()
"""
@classmethod
def get_default_params(cls) -> ParamTable:
""":return: model default parameters."""
params = super().get_default_params(with_embedding=True)
params['optimizer'] = 'adam'
params.add(Param(name='alpha', value=0.1,
desc="Negative slope coefficient of LeakyReLU "
"function."))
params.add(Param(name='mlp_num_layers', value=3,
desc="The number of layers of mlp."))
params.add(Param(name='mlp_num_units', value=[10, 10],
desc="The hidden size of the FC layers, but not "
"include the final layer."))
params.add(Param(name='lstm_num_units', value=5,
desc="The hidden size of the LSTM layer."))
params.add(Param(
name='dropout_rate', value=0.1,
hyper_space=hyper_spaces.quniform(
low=0.0, high=0.8, q=0.01),
desc="The dropout rate."
))
return params
def build(self):
"""Build model structure."""
input_left, input_right = self._make_inputs()
embedding = self._make_embedding_layer()
embed_left = embedding(input_left)
embed_right = embedding(input_right)
# Get sentence embedding
embed_sen_left = self._sentence_encoder(
embed_left,
lstm_num_units=self._params['lstm_num_units'],
drop_rate=self._params['dropout_rate'])
embed_sen_right = self._sentence_encoder(
embed_right,
lstm_num_units=self._params['lstm_num_units'],
drop_rate=self._params['dropout_rate'])
# Concatenate two sentence embedding: [embed_sen_left, embed_sen_right,
# |embed_sen_left-embed_sen_right|, embed_sen_left*embed_sen_right]
embed_minus = keras.layers.Subtract()(
[embed_sen_left, embed_sen_right])
embed_minus_abs = keras.layers.Lambda(lambda x: abs(x))(embed_minus)
embed_multiply = keras.layers.Multiply()(
[embed_sen_left, embed_sen_right])
concat = keras.layers.Concatenate(axis=1)(
[embed_sen_left, embed_sen_right, embed_minus_abs, embed_multiply])
# Multiply perception layers to classify
mlp_out = self._classifier(
concat,
mlp_num_layers=self._params['mlp_num_layers'],
mlp_num_units=self._params['mlp_num_units'],
drop_rate=self._params['dropout_rate'],
leaky_relu_alpah=self._params['alpha'])
out = self._make_output_layer()(mlp_out)
self._backend = keras.Model(
inputs=[input_left, input_right], outputs=out)
def _classifier(
self,
input_: typing.Any,
mlp_num_layers: int,
mlp_num_units: list,
drop_rate: float,
leaky_relu_alpah: float
) -> typing.Any:
for i in range(mlp_num_layers - 1):
input_ = keras.layers.Dropout(rate=drop_rate)(input_)
input_ = keras.layers.Dense(mlp_num_units[i])(input_)
input_ = keras.layers.LeakyReLU(alpha=leaky_relu_alpah)(input_)
return input_
def _sentence_encoder(
self,
input_: typing.Any,
lstm_num_units: int,
drop_rate: float
) -> typing.Any:
"""
Stack three BiLSTM MaxPooling blocks as a hierarchical structure.
Concatenate the output of three blocs as the input sentence embedding.
Each BiLSTM layer reads the input sentence as the input.
Each BiLSTM layer except the first one is initialized(the initial
hidden state and the cell state) with the final state of the previous
layer.
"""
emb1 = keras.layers.Bidirectional(
keras.layers.LSTM(
units=lstm_num_units,
return_sequences=True,
return_state=True,
dropout=drop_rate,
recurrent_dropout=drop_rate),
merge_mode='concat')(input_)
emb1_maxpooling = keras.layers.GlobalMaxPooling1D()(emb1[0])
emb2 = keras.layers.Bidirectional(
keras.layers.LSTM(
units=lstm_num_units,
return_sequences=True,
return_state=True,
dropout=drop_rate,
recurrent_dropout=drop_rate),
merge_mode='concat')(input_, initial_state=emb1[1:5])
emb2_maxpooling = keras.layers.GlobalMaxPooling1D()(emb2[0])
emb3 = keras.layers.Bidirectional(
keras.layers.LSTM(
units=lstm_num_units,
return_sequences=True,
return_state=True,
dropout=drop_rate,
recurrent_dropout=drop_rate),
merge_mode='concat')(input_, initial_state=emb2[1:5])
emb3_maxpooling = keras.layers.GlobalMaxPooling1D()(emb3[0])
emb = keras.layers.Concatenate(axis=1)(
[emb1_maxpooling, emb2_maxpooling, emb3_maxpooling])
return emb
|
from ReText import globalSettings
from ReText.highlighter import ReTextHighlighter
from PyQt5.QtWidgets import QCheckBox, QDialog, QDialogButtonBox, \
QLabel, QLineEdit, QTextEdit, QVBoxLayout
class HtmlDialog(QDialog):
def __init__(self, parent=None):
QDialog.__init__(self, parent)
self.resize(700, 600)
verticalLayout = QVBoxLayout(self)
self.textEdit = QTextEdit(self)
self.textEdit.setReadOnly(True)
self.textEdit.setFont(globalSettings.editorFont)
self.hl = ReTextHighlighter(self.textEdit.document())
self.hl.docType = 'html'
verticalLayout.addWidget(self.textEdit)
buttonBox = QDialogButtonBox(self)
buttonBox.setStandardButtons(QDialogButtonBox.Close)
buttonBox.rejected.connect(self.close)
verticalLayout.addWidget(buttonBox)
class LocaleDialog(QDialog):
def __init__(self, parent, defaultText=None):
QDialog.__init__(self, parent)
verticalLayout = QVBoxLayout(self)
self.label = QLabel(self)
self.label.setText(self.tr('Enter locale name (example: en_US)'))
verticalLayout.addWidget(self.label)
self.localeEdit = QLineEdit(self)
if defaultText:
self.localeEdit.setText(defaultText)
verticalLayout.addWidget(self.localeEdit)
self.checkBox = QCheckBox(self.tr('Set as default'), self)
verticalLayout.addWidget(self.checkBox)
buttonBox = QDialogButtonBox(self)
buttonBox.setStandardButtons(QDialogButtonBox.Cancel | QDialogButtonBox.Ok)
verticalLayout.addWidget(buttonBox)
buttonBox.accepted.connect(self.accept)
buttonBox.rejected.connect(self.reject)
|
from __future__ import print_function
import errno
import fcntl
import optparse
import os
import shutil
import signal
import sys
import threading
import time
WAIT_TIMEOUT_IN_SEC = 120.0
WAIT_SLEEP_IN_SEC = 5.0
RETRYABLE_SSH_RETCODE = 255
def main():
p = optparse.OptionParser()
p.add_option('-o', '--stdout', dest='stdout',
help="""Read stdout from FILE.""", metavar='FILE')
p.add_option('-e', '--stderr', dest='stderr',
help="""Read stderr from FILE.""", metavar='FILE')
p.add_option('-s', '--status', dest='status', metavar='FILE',
help='Get process exit status from FILE. '
'Will block until a shared lock is acquired on FILE.')
p.add_option('-d', '--delete', dest='delete', action='store_true',
help='Delete stdout, stderr, and status files when finished.')
p.add_option(
'-x',
'--exclusive',
dest='exclusive',
help='Will block until FILE exists to ensure that status is ready to be '
'read. Required.',
metavar='FILE')
options, args = p.parse_args()
if args:
sys.stderr.write('Unexpected arguments: {0}\n'.format(args))
return 1
missing = []
for option in ('status', 'exclusive'):
if getattr(options, option) is None:
missing.append(option)
if missing:
p.print_usage()
msg = 'Missing required flag(s): {0}\n'.format(
', '.join('--' + i for i in missing))
sys.stderr.write(msg)
return 1
start = time.time()
return_code_str = None
while time.time() < WAIT_TIMEOUT_IN_SEC + start:
try:
with open(options.exclusive, 'r'):
with open(options.status, 'r'):
break
except IOError as e:
print('WARNING: file doesn\'t exist, retrying: %s' % e, file=sys.stderr)
time.sleep(WAIT_SLEEP_IN_SEC)
signal.signal(signal.SIGALRM, lambda signum, frame: None)
signal.alarm(int(WAIT_TIMEOUT_IN_SEC))
with open(options.status, 'r') as status:
try:
fcntl.lockf(status, fcntl.LOCK_SH)
except IOError as e:
if e.errno == errno.EINTR:
print('Wait timed out. This will be retried with a subsequent wait.')
return 0
elif e.errno == errno.ECONNREFUSED:
print('Connection refused during wait. '
'This will be retried with a subsequent wait.')
return 0
elif e.errno in (errno.EAGAIN, errno.EACCES):
print('Status currently being modified and cannot be read right now. '
'This will be retried with a subsequent wait.')
return 0
raise e
signal.alarm(0)
return_code_str = status.read()
if not (options.stdout and options.stderr):
print('Command finished.')
return 0
with open(options.stdout, 'r') as stdout:
with open(options.stderr, 'r') as stderr:
if return_code_str:
return_code = int(return_code_str)
else:
print('WARNING: wrapper script interrupted.', file=sys.stderr)
return_code = 1
# RemoteCommand retries 255 as temporary SSH failure. In this case,
# long running command actually returned 255 and should not be retried.
if return_code == RETRYABLE_SSH_RETCODE:
print('WARNING: command returned 255.', file=sys.stderr)
return_code = 1
stderr_copier = threading.Thread(target=shutil.copyfileobj,
args=[stderr, sys.stderr],
name='stderr-copier')
stderr_copier.daemon = True
stderr_copier.start()
try:
shutil.copyfileobj(stdout, sys.stdout)
finally:
stderr_copier.join()
if options.delete:
for f in [options.stdout, options.stderr, options.status]:
os.unlink(f)
return return_code
if __name__ == '__main__':
sys.exit(main())
|
import logging
from pykodi import CannotConnectError, InvalidAuthError, Kodi, get_kodi_connection
import voluptuous as vol
from homeassistant import config_entries, core, exceptions
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
CONF_SSL,
CONF_TIMEOUT,
CONF_USERNAME,
)
from homeassistant.core import callback
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.typing import DiscoveryInfoType, Optional
from .const import (
CONF_WS_PORT,
DEFAULT_PORT,
DEFAULT_SSL,
DEFAULT_TIMEOUT,
DEFAULT_WS_PORT,
)
from .const import DOMAIN # pylint:disable=unused-import
_LOGGER = logging.getLogger(__name__)
async def validate_http(hass: core.HomeAssistant, data):
"""Validate the user input allows us to connect over HTTP."""
host = data[CONF_HOST]
port = data[CONF_PORT]
username = data.get(CONF_USERNAME)
password = data.get(CONF_PASSWORD)
ssl = data.get(CONF_SSL)
session = async_get_clientsession(hass)
_LOGGER.debug("Connecting to %s:%s over HTTP", host, port)
khc = get_kodi_connection(
host, port, None, username, password, ssl, session=session
)
kodi = Kodi(khc)
try:
await kodi.ping()
except CannotConnectError as error:
raise CannotConnect from error
except InvalidAuthError as error:
raise InvalidAuth from error
async def validate_ws(hass: core.HomeAssistant, data):
"""Validate the user input allows us to connect over WS."""
ws_port = data.get(CONF_WS_PORT)
if not ws_port:
return
host = data[CONF_HOST]
port = data[CONF_PORT]
username = data.get(CONF_USERNAME)
password = data.get(CONF_PASSWORD)
ssl = data.get(CONF_SSL)
session = async_get_clientsession(hass)
_LOGGER.debug("Connecting to %s:%s over WebSocket", host, ws_port)
kwc = get_kodi_connection(
host, port, ws_port, username, password, ssl, session=session
)
try:
await kwc.connect()
if not kwc.connected:
_LOGGER.warning("Cannot connect to %s:%s over WebSocket", host, ws_port)
raise WSCannotConnect()
kodi = Kodi(kwc)
await kodi.ping()
except CannotConnectError as error:
raise WSCannotConnect from error
class KodiConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Kodi."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL
def __init__(self):
"""Initialize flow."""
self._host: Optional[str] = None
self._port: Optional[int] = DEFAULT_PORT
self._ws_port: Optional[int] = DEFAULT_WS_PORT
self._name: Optional[str] = None
self._username: Optional[str] = None
self._password: Optional[str] = None
self._ssl: Optional[bool] = DEFAULT_SSL
self._discovery_name: Optional[str] = None
async def async_step_zeroconf(self, discovery_info: DiscoveryInfoType):
"""Handle zeroconf discovery."""
self._host = discovery_info["host"]
self._port = int(discovery_info["port"])
self._name = discovery_info["hostname"][: -len(".local.")]
uuid = discovery_info["properties"]["uuid"]
self._discovery_name = discovery_info["name"]
await self.async_set_unique_id(uuid)
self._abort_if_unique_id_configured(
updates={
CONF_HOST: self._host,
CONF_PORT: self._port,
CONF_NAME: self._name,
}
)
# pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167
self.context.update({"title_placeholders": {CONF_NAME: self._name}})
try:
await validate_http(self.hass, self._get_data())
await validate_ws(self.hass, self._get_data())
except InvalidAuth:
return await self.async_step_credentials()
except WSCannotConnect:
return await self.async_step_ws_port()
except CannotConnect:
return self.async_abort(reason="cannot_connect")
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
return self.async_abort(reason="unknown")
return await self.async_step_discovery_confirm()
async def async_step_discovery_confirm(self, user_input=None):
"""Handle user-confirmation of discovered node."""
if user_input is None:
return self.async_show_form(
step_id="discovery_confirm",
description_placeholders={"name": self._name},
)
return self._create_entry()
async def async_step_user(self, user_input=None):
"""Handle the initial step."""
errors = {}
if user_input is not None:
self._host = user_input[CONF_HOST]
self._port = user_input[CONF_PORT]
self._ssl = user_input[CONF_SSL]
try:
await validate_http(self.hass, self._get_data())
await validate_ws(self.hass, self._get_data())
except InvalidAuth:
return await self.async_step_credentials()
except WSCannotConnect:
return await self.async_step_ws_port()
except CannotConnect:
errors["base"] = "cannot_connect"
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return self._create_entry()
return self._show_user_form(errors)
async def async_step_credentials(self, user_input=None):
"""Handle username and password input."""
errors = {}
if user_input is not None:
self._username = user_input.get(CONF_USERNAME)
self._password = user_input.get(CONF_PASSWORD)
try:
await validate_http(self.hass, self._get_data())
await validate_ws(self.hass, self._get_data())
except InvalidAuth:
errors["base"] = "invalid_auth"
except WSCannotConnect:
return await self.async_step_ws_port()
except CannotConnect:
errors["base"] = "cannot_connect"
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return self._create_entry()
return self._show_credentials_form(errors)
async def async_step_ws_port(self, user_input=None):
"""Handle websocket port of discovered node."""
errors = {}
if user_input is not None:
self._ws_port = user_input.get(CONF_WS_PORT)
# optional ints return 0 rather than None when empty
if self._ws_port == 0:
self._ws_port = None
try:
await validate_ws(self.hass, self._get_data())
except WSCannotConnect:
errors["base"] = "cannot_connect"
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return self._create_entry()
return self._show_ws_port_form(errors)
async def async_step_import(self, data):
"""Handle import from YAML."""
reason = None
try:
await validate_http(self.hass, data)
await validate_ws(self.hass, data)
except InvalidAuth:
_LOGGER.exception("Invalid Kodi credentials")
reason = "invalid_auth"
except CannotConnect:
_LOGGER.exception("Cannot connect to Kodi")
reason = "cannot_connect"
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
reason = "unknown"
else:
return self.async_create_entry(title=data[CONF_NAME], data=data)
return self.async_abort(reason=reason)
@callback
def _show_credentials_form(self, errors=None):
schema = vol.Schema(
{
vol.Optional(
CONF_USERNAME, description={"suggested_value": self._username}
): str,
vol.Optional(
CONF_PASSWORD, description={"suggested_value": self._password}
): str,
}
)
return self.async_show_form(
step_id="credentials", data_schema=schema, errors=errors or {}
)
@callback
def _show_user_form(self, errors=None):
default_port = self._port or DEFAULT_PORT
default_ssl = self._ssl or DEFAULT_SSL
schema = vol.Schema(
{
vol.Required(CONF_HOST, default=self._host): str,
vol.Required(CONF_PORT, default=default_port): int,
vol.Required(CONF_SSL, default=default_ssl): bool,
}
)
return self.async_show_form(
step_id="user", data_schema=schema, errors=errors or {}
)
@callback
def _show_ws_port_form(self, errors=None):
suggestion = self._ws_port or DEFAULT_WS_PORT
schema = vol.Schema(
{
vol.Optional(
CONF_WS_PORT, description={"suggested_value": suggestion}
): int
}
)
return self.async_show_form(
step_id="ws_port", data_schema=schema, errors=errors or {}
)
@callback
def _create_entry(self):
return self.async_create_entry(
title=self._name or self._host,
data=self._get_data(),
)
@callback
def _get_data(self):
data = {
CONF_NAME: self._name,
CONF_HOST: self._host,
CONF_PORT: self._port,
CONF_WS_PORT: self._ws_port,
CONF_USERNAME: self._username,
CONF_PASSWORD: self._password,
CONF_SSL: self._ssl,
CONF_TIMEOUT: DEFAULT_TIMEOUT,
}
return data
class CannotConnect(exceptions.HomeAssistantError):
"""Error to indicate we cannot connect."""
class InvalidAuth(exceptions.HomeAssistantError):
"""Error to indicate there is invalid auth."""
class WSCannotConnect(exceptions.HomeAssistantError):
"""Error to indicate we cannot connect to websocket."""
|
from collections import OrderedDict
from datetime import datetime, timezone
from pathlib import Path
import numpy as np
from ...utils import fill_doc, logger, verbose, warn
from ..base import BaseRaw
from ..meas_info import create_info
from ...annotations import Annotations
from ..utils import _mult_cal_one
def _ensure_path(fname):
out = fname
if not isinstance(out, Path):
out = Path(out)
return out
@fill_doc
def read_raw_nihon(fname, preload=False, verbose=None):
"""Reader for an Nihon Kohden EEG file.
Parameters
----------
fname : str
Path to the Nihon Kohden data file (``.EEG``).
preload : bool
If True, all data are loaded at initialization.
%(verbose)s
Returns
-------
raw : instance of RawNihon
A Raw object containing Nihon Kohden data.
See Also
--------
mne.io.Raw : Documentation of attribute and methods.
"""
return RawNihon(fname, preload, verbose)
_valid_headers = [
'EEG-1100A V01.00',
'EEG-1100B V01.00',
'EEG-1100C V01.00',
'QI-403A V01.00',
'QI-403A V02.00',
'EEG-2100 V01.00',
'EEG-2100 V02.00',
'DAE-2100D V01.30',
'DAE-2100D V02.00',
# 'EEG-1200A V01.00', # Not working for the moment.
]
def _read_nihon_metadata(fname):
metadata = {}
fname = _ensure_path(fname)
pnt_fname = fname.with_suffix('.PNT')
if not pnt_fname.exists():
warn('No PNT file exists. Metadata will be blank')
return metadata
logger.info('Found PNT file, reading metadata.')
with open(pnt_fname, 'r') as fid:
version = np.fromfile(fid, '|S16', 1).astype('U16')[0]
if version not in _valid_headers:
raise ValueError(f'Not a valid Nihon Kohden PNT file ({version})')
metadata['version'] = version
# Read timestamp
fid.seek(0x40)
meas_str = np.fromfile(fid, '|S14', 1).astype('U14')[0]
meas_date = datetime.strptime(meas_str, '%Y%m%d%H%M%S')
meas_date = meas_date.replace(tzinfo=timezone.utc)
metadata['meas_date'] = meas_date
return metadata
_default_chan_labels = [
'FP1', 'FP2', 'F3', 'F4', 'C3', 'C4', 'P3', 'P4', 'O1', 'O2', 'F7', 'F8',
'T3', 'T4', 'T5', 'T6', 'FZ', 'CZ', 'PZ', 'E', 'PG1', 'PG2', 'A1', 'A2',
'T1', 'T2'
]
_default_chan_labels += [f'X{i}' for i in range(1, 12)]
_default_chan_labels += [f'NA{i}' for i in range(1, 6)]
_default_chan_labels += [f'DC{i:02}' for i in range(1, 33)]
_default_chan_labels += ['BN1', 'BN2', 'Mark1', 'Mark2']
_default_chan_labels += [f'NA{i}' for i in range(6, 28)]
_default_chan_labels += ['X12/BP1', 'X13/BP2', 'X14/BP3', 'X15/BP4']
_default_chan_labels += [f'X{i}' for i in range(16, 166)]
_default_chan_labels += ['NA28', 'Z']
def _read_21e_file(fname):
fname = _ensure_path(fname)
e_fname = fname.with_suffix('.21E')
_chan_labels = [x for x in _default_chan_labels]
if e_fname.exists():
# Read the 21E file and update the labels accordingly.
logger.info('Found 21E file, reading channel names.')
with open(e_fname, 'r') as fid:
keep_parsing = False
for line in fid:
if line.startswith('['):
if 'ELECTRODE' in line or 'REFERENCE' in line:
keep_parsing = True
else:
keep_parsing = False
elif keep_parsing is True:
idx, name = line.split('=')
idx = int(idx)
if idx >= len(_chan_labels):
n = idx - len(_chan_labels) + 1
_chan_labels.extend(['UNK'] * n)
_chan_labels[idx] = name.strip()
return _chan_labels
def _read_nihon_header(fname):
# Read the Nihon Kohden EEG file header
fname = _ensure_path(fname)
_chan_labels = _read_21e_file(fname)
header = {}
logger.info(f'Reading header from {fname}')
with open(fname, 'r') as fid:
version = np.fromfile(fid, '|S16', 1).astype('U16')[0]
if version not in _valid_headers:
raise ValueError(
'Not a valid Nihon Kohden EEG file ({})'.format(version))
fid.seek(0x0081)
control_block = np.fromfile(fid, '|S16', 1).astype('U16')[0]
if control_block not in _valid_headers:
raise ValueError('Not a valid Nihon Kohden EEG file '
'(control block {})'.format(version))
fid.seek(0x17fe)
waveform_sign = np.fromfile(fid, np.uint8, 1)[0]
if waveform_sign != 1:
raise ValueError('Not a valid Nihon Kohden EEG file '
'(waveform block)')
header['version'] = version
fid.seek(0x0091)
n_ctlblocks = np.fromfile(fid, np.uint8, 1)[0]
header['n_ctlblocks'] = n_ctlblocks
controlblocks = []
for i_ctl_block in range(n_ctlblocks):
t_controlblock = {}
fid.seek(0x0092 + i_ctl_block * 20)
t_ctl_address = np.fromfile(fid, np.uint32, 1)[0]
t_controlblock['address'] = t_ctl_address
fid.seek(t_ctl_address + 17)
n_datablocks = np.fromfile(fid, np.uint8, 1)[0]
t_controlblock['n_datablocks'] = n_datablocks
t_controlblock['datablocks'] = []
for i_data_block in range(n_datablocks):
t_datablock = {}
fid.seek(t_ctl_address + i_data_block * 20 + 18)
t_data_address = np.fromfile(fid, np.uint32, 1)[0]
t_datablock['address'] = t_data_address
fid.seek(t_data_address + 0x26)
t_n_channels = np.fromfile(fid, np.uint8, 1)[0]
t_datablock['n_channels'] = t_n_channels
t_channels = []
for i_ch in range(t_n_channels):
fid.seek(t_data_address + 0x27 + (i_ch * 10))
t_idx = np.fromfile(fid, np.uint8, 1)[0]
t_channels.append(_chan_labels[t_idx])
t_datablock['channels'] = t_channels
fid.seek(t_data_address + 0x1C)
t_record_duration = np.fromfile(fid, np.uint32, 1)[0]
t_datablock['duration'] = t_record_duration
fid.seek(t_data_address + 0x1a)
sfreq = np.fromfile(fid, np.uint16, 1)[0] & 0x3FFF
t_datablock['sfreq'] = sfreq
t_datablock['n_samples'] = int(t_record_duration * sfreq / 10)
t_controlblock['datablocks'].append(t_datablock)
controlblocks.append(t_controlblock)
header['controlblocks'] = controlblocks
# Now check that every data block has the same channels and sfreq
chans = []
sfreqs = []
nsamples = []
for t_ctl in header['controlblocks']:
for t_dtb in t_ctl['datablocks']:
chans.append(t_dtb['channels'])
sfreqs.append(t_dtb['sfreq'])
nsamples.append(t_dtb['n_samples'])
for i_elem in range(1, len(chans)):
if chans[0] != chans[i_elem]:
raise ValueError('Channel names in datablocks do not match')
if sfreqs[0] != sfreqs[i_elem]:
raise ValueError('Sample frequency in datablocks do not match')
header['ch_names'] = chans[0]
header['sfreq'] = sfreqs[0]
header['n_samples'] = np.sum(nsamples)
# TODO: Support more than one controlblock and more than one datablock
if header['n_ctlblocks'] != 1:
raise NotImplementedError('I dont know how to read more than one '
'control block for this type of file :(')
if header['controlblocks'][0]['n_datablocks'] != 1:
raise NotImplementedError('I dont know how to read more than one '
'data block for this type of file :(')
return header
def _read_nihon_annotations(fname, orig_time):
fname = _ensure_path(fname)
annotations = None
log_fname = fname.with_suffix('.LOG')
if not log_fname.exists():
warn('No LOG file exists. Annotations will not be read')
return annotations
logger.info('Found LOG file, reading events.')
with open(log_fname, 'r') as fid:
version = np.fromfile(fid, '|S16', 1).astype('U16')[0]
if version not in _valid_headers:
raise ValueError(
'Not a valid Nihon Kohden LOG file ({})'.format(version))
fid.seek(0x91)
n_logblocks = np.fromfile(fid, np.uint8, 1)[0]
all_onsets = []
all_descriptions = []
for t_block in range(n_logblocks):
fid.seek(0x92 + t_block * 20)
t_blk_address = np.fromfile(fid, np.uint32, 1)[0]
fid.seek(t_blk_address + 0x12)
n_logs = np.fromfile(fid, np.uint8, 1)[0]
fid.seek(t_blk_address + 0x14)
t_logs = np.fromfile(fid, '|S45', n_logs).astype('U45')
for t_log in t_logs:
t_desc = t_log[:20].strip('\x00')
t_onset = datetime.strptime(t_log[20:26], '%H%M%S')
t_onset = (t_onset.hour * 3600 + t_onset.minute * 60 +
t_onset.second)
all_onsets.append(t_onset)
all_descriptions.append(t_desc)
annotations = Annotations(all_onsets, 0.0, all_descriptions, orig_time)
return annotations
def _map_ch_to_type(ch_name):
ch_type_pattern = OrderedDict([
('stim', ('Mark',)), ('misc', ('DC', 'NA', 'Z')), ('bio', ('X',))])
for key, kinds in ch_type_pattern.items():
if any(kind in ch_name for kind in kinds):
return key
return 'eeg'
def _map_ch_to_specs(ch_name):
unit_mult = 1e-3
phys_min = -12002.9
phys_max = 12002.56
dig_min = -32768
if ch_name.upper() in _default_chan_labels:
idx = _default_chan_labels.index(ch_name.upper())
if (idx < 42 or idx > 73) and idx not in [76, 77]:
unit_mult = 1e-6
phys_min = -3200
phys_max = 3199.902
t_range = phys_max - phys_min
cal = t_range / 65535
offset = phys_min - (dig_min * cal)
out = dict(unit=unit_mult, phys_min=phys_min, phys_max=phys_max,
dig_min=dig_min, cal=cal, offset=offset)
return out
@fill_doc
class RawNihon(BaseRaw):
"""Raw object from a Nihon Kohden EEG file.
Parameters
----------
fname : str
Path to the Nihon Kohden data file (.eeg).
preload : bool
If True, all data are loaded at initialization.
%(verbose)s
See Also
--------
mne.io.Raw : Documentation of attribute and methods.
"""
@verbose
def __init__(self, fname, preload=False, verbose=None):
fname = _ensure_path(fname)
data_name = fname.name
logger.info('Loading %s' % data_name)
header = _read_nihon_header(fname)
metadata = _read_nihon_metadata(fname)
# n_chan = len(header['ch_names']) + 1
sfreq = header['sfreq']
# data are multiplexed int16
ch_names = header['ch_names']
ch_types = [_map_ch_to_type(x) for x in ch_names]
info = create_info(ch_names, sfreq, ch_types)
n_samples = header['n_samples']
if 'meas_date' in metadata:
info['meas_date'] = metadata['meas_date']
chs = {x: _map_ch_to_specs(x) for x in ch_names}
orig_ch_names = header['ch_names']
cal = np.array(
[chs[x]['cal'] for x in orig_ch_names], float)[:, np.newaxis]
offsets = np.array(
[chs[x]['offset'] for x in orig_ch_names], float)[:, np.newaxis]
gains = np.array(
[chs[x]['unit'] for x in orig_ch_names], float)[:, np.newaxis]
raw_extras = dict(
cal=cal, offsets=offsets, gains=gains, header=header)
self._header = header
for i_ch, ch_name in enumerate(info['ch_names']):
t_range = (chs[ch_name]['phys_max'] - chs[ch_name]['phys_min'])
info['chs'][i_ch]['range'] = t_range
info['chs'][i_ch]['cal'] = 1 / t_range
super(RawNihon, self).__init__(
info, preload=preload, last_samps=(n_samples - 1,),
filenames=[fname.as_posix()], orig_format='short',
raw_extras=[raw_extras])
# Get annotations from LOG file
annots = _read_nihon_annotations(fname, orig_time=info['meas_date'])
self.set_annotations(annots)
def _read_segment_file(self, data, idx, fi, start, stop, cals, mult):
"""Read a chunk of raw data."""
# For now we assume one control block and one data block.
header = self._raw_extras[fi]['header']
# Get the original cal, offsets and gains
cal = self._raw_extras[fi]['cal']
offsets = self._raw_extras[fi]['offsets']
gains = self._raw_extras[fi]['gains']
datablock = header['controlblocks'][0]['datablocks'][0]
n_channels = datablock['n_channels'] + 1
datastart = (datablock['address'] + 0x27 +
(datablock['n_channels'] * 10))
with open(self._filenames[fi], 'rb') as fid:
start_offset = datastart + start * n_channels * 2
to_read = (stop - start) * n_channels
fid.seek(start_offset)
block_data = np.fromfile(fid, '<u2', to_read) + 0x8000
block_data = block_data.astype(np.int16)
block_data = block_data.reshape(n_channels, -1, order='F')
block_data = block_data[:-1] * cal # cast to float64
block_data += offsets
block_data *= gains
_mult_cal_one(data, block_data, idx, cals, mult)
|
from dataclasses import dataclass
from datetime import datetime
import pytest
import homeassistant.components.google_pubsub as google_pubsub
from homeassistant.components.google_pubsub import DateTimeJSONEncoder as victim
from homeassistant.const import EVENT_STATE_CHANGED
from homeassistant.core import split_entity_id
from homeassistant.setup import async_setup_component
import tests.async_mock as mock
GOOGLE_PUBSUB_PATH = "homeassistant.components.google_pubsub"
@dataclass
class FilterTest:
"""Class for capturing a filter test."""
id: str
should_pass: bool
async def test_datetime():
"""Test datetime encoding."""
time = datetime(2019, 1, 13, 12, 30, 5)
assert victim().encode(time) == '"2019-01-13T12:30:05"'
async def test_no_datetime():
"""Test integer encoding."""
assert victim().encode(42) == "42"
async def test_nested():
"""Test dictionary encoding."""
assert victim().encode({"foo": "bar"}) == '{"foo": "bar"}'
@pytest.fixture(autouse=True, name="mock_client")
def mock_client_fixture():
"""Mock the pubsub client."""
with mock.patch(f"{GOOGLE_PUBSUB_PATH}.pubsub_v1") as client:
client.PublisherClient = mock.MagicMock()
setattr(
client.PublisherClient,
"from_service_account_json",
mock.MagicMock(return_value=mock.MagicMock()),
)
yield client
@pytest.fixture(autouse=True, name="mock_os")
def mock_os_fixture():
"""Mock the OS cli."""
with mock.patch(f"{GOOGLE_PUBSUB_PATH}.os") as os_cli:
os_cli.path = mock.MagicMock()
setattr(os_cli.path, "join", mock.MagicMock(return_value="path"))
yield os_cli
@pytest.fixture(autouse=True)
def mock_bus_and_json(hass, monkeypatch):
"""Mock the event bus listener and os component."""
hass.bus.listen = mock.MagicMock()
monkeypatch.setattr(
f"{GOOGLE_PUBSUB_PATH}.json.dumps", mock.Mock(return_value=mock.MagicMock())
)
async def test_minimal_config(hass, mock_client):
"""Test the minimal config and defaults of component."""
config = {
google_pubsub.DOMAIN: {
"project_id": "proj",
"topic_name": "topic",
"credentials_json": "creds",
"filter": {},
}
}
assert await async_setup_component(hass, google_pubsub.DOMAIN, config)
await hass.async_block_till_done()
assert hass.bus.listen.called
assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0]
assert mock_client.PublisherClient.from_service_account_json.call_count == 1
assert (
mock_client.PublisherClient.from_service_account_json.call_args[0][0] == "path"
)
async def test_full_config(hass, mock_client):
"""Test the full config of the component."""
config = {
google_pubsub.DOMAIN: {
"project_id": "proj",
"topic_name": "topic",
"credentials_json": "creds",
"filter": {
"include_domains": ["light"],
"include_entity_globs": ["sensor.included_*"],
"include_entities": ["binary_sensor.included"],
"exclude_domains": ["light"],
"exclude_entity_globs": ["sensor.excluded_*"],
"exclude_entities": ["binary_sensor.excluded"],
},
}
}
assert await async_setup_component(hass, google_pubsub.DOMAIN, config)
await hass.async_block_till_done()
assert hass.bus.listen.called
assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0]
assert mock_client.PublisherClient.from_service_account_json.call_count == 1
assert (
mock_client.PublisherClient.from_service_account_json.call_args[0][0] == "path"
)
def make_event(entity_id):
"""Make a mock event for test."""
domain = split_entity_id(entity_id)[0]
state = mock.MagicMock(
state="not blank",
domain=domain,
entity_id=entity_id,
object_id="entity",
attributes={},
)
return mock.MagicMock(data={"new_state": state}, time_fired=12345)
async def _setup(hass, filter_config):
"""Shared set up for filtering tests."""
config = {
google_pubsub.DOMAIN: {
"project_id": "proj",
"topic_name": "topic",
"credentials_json": "creds",
"filter": filter_config,
}
}
assert await async_setup_component(hass, google_pubsub.DOMAIN, config)
await hass.async_block_till_done()
return hass.bus.listen.call_args_list[0][0][1]
async def test_allowlist(hass, mock_client):
"""Test an allowlist only config."""
handler_method = await _setup(
hass,
{
"include_domains": ["light"],
"include_entity_globs": ["sensor.included_*"],
"include_entities": ["binary_sensor.included"],
},
)
publish_client = mock_client.PublisherClient.from_service_account_json("path")
tests = [
FilterTest("climate.excluded", False),
FilterTest("light.included", True),
FilterTest("sensor.excluded_test", False),
FilterTest("sensor.included_test", True),
FilterTest("binary_sensor.included", True),
FilterTest("binary_sensor.excluded", False),
]
for test in tests:
event = make_event(test.id)
handler_method(event)
was_called = publish_client.publish.call_count == 1
assert test.should_pass == was_called
publish_client.publish.reset_mock()
async def test_denylist(hass, mock_client):
"""Test a denylist only config."""
handler_method = await _setup(
hass,
{
"exclude_domains": ["climate"],
"exclude_entity_globs": ["sensor.excluded_*"],
"exclude_entities": ["binary_sensor.excluded"],
},
)
publish_client = mock_client.PublisherClient.from_service_account_json("path")
tests = [
FilterTest("climate.excluded", False),
FilterTest("light.included", True),
FilterTest("sensor.excluded_test", False),
FilterTest("sensor.included_test", True),
FilterTest("binary_sensor.included", True),
FilterTest("binary_sensor.excluded", False),
]
for test in tests:
event = make_event(test.id)
handler_method(event)
was_called = publish_client.publish.call_count == 1
assert test.should_pass == was_called
publish_client.publish.reset_mock()
async def test_filtered_allowlist(hass, mock_client):
"""Test an allowlist config with a filtering denylist."""
handler_method = await _setup(
hass,
{
"include_domains": ["light"],
"include_entity_globs": ["*.included_*"],
"exclude_domains": ["climate"],
"exclude_entity_globs": ["*.excluded_*"],
"exclude_entities": ["light.excluded"],
},
)
publish_client = mock_client.PublisherClient.from_service_account_json("path")
tests = [
FilterTest("light.included", True),
FilterTest("light.excluded_test", False),
FilterTest("light.excluded", False),
FilterTest("sensor.included_test", True),
FilterTest("climate.included_test", False),
]
for test in tests:
event = make_event(test.id)
handler_method(event)
was_called = publish_client.publish.call_count == 1
assert test.should_pass == was_called
publish_client.publish.reset_mock()
async def test_filtered_denylist(hass, mock_client):
"""Test a denylist config with a filtering allowlist."""
handler_method = await _setup(
hass,
{
"include_entities": ["climate.included", "sensor.excluded_test"],
"exclude_domains": ["climate"],
"exclude_entity_globs": ["*.excluded_*"],
"exclude_entities": ["light.excluded"],
},
)
publish_client = mock_client.PublisherClient.from_service_account_json("path")
tests = [
FilterTest("climate.excluded", False),
FilterTest("climate.included", True),
FilterTest("switch.excluded_test", False),
FilterTest("sensor.excluded_test", True),
FilterTest("light.excluded", False),
FilterTest("light.included", True),
]
for test in tests:
event = make_event(test.id)
handler_method(event)
was_called = publish_client.publish.call_count == 1
assert test.should_pass == was_called
publish_client.publish.reset_mock()
|
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
# pylint: disable=unused-import
from . import DOMAIN
CONF_STRING = "string"
CONF_BOOLEAN = "bool"
CONF_INT = "int"
CONF_SELECT = "select"
CONF_MULTISELECT = "multi"
class DemoConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Demo configuration flow."""
VERSION = 1
@staticmethod
@callback
def async_get_options_flow(config_entry):
"""Get the options flow for this handler."""
return OptionsFlowHandler(config_entry)
async def async_step_import(self, import_info):
"""Set the config entry up from yaml."""
return self.async_create_entry(title="Demo", data={})
class OptionsFlowHandler(config_entries.OptionsFlow):
"""Handle options."""
def __init__(self, config_entry):
"""Initialize options flow."""
self.config_entry = config_entry
self.options = dict(config_entry.options)
async def async_step_init(self, user_input=None):
"""Manage the options."""
return await self.async_step_options_1()
async def async_step_options_1(self, user_input=None):
"""Manage the options."""
if user_input is not None:
self.options.update(user_input)
return await self.async_step_options_2()
return self.async_show_form(
step_id="options_1",
data_schema=vol.Schema(
{
vol.Required("constant"): "Constant Value",
vol.Optional(
CONF_BOOLEAN,
default=self.config_entry.options.get(CONF_BOOLEAN, False),
): bool,
vol.Optional(
CONF_INT,
default=self.config_entry.options.get(CONF_INT, 10),
): int,
}
),
)
async def async_step_options_2(self, user_input=None):
"""Manage the options 2."""
if user_input is not None:
self.options.update(user_input)
return await self._update_options()
return self.async_show_form(
step_id="options_2",
data_schema=vol.Schema(
{
vol.Optional(
CONF_STRING,
default=self.config_entry.options.get(
CONF_STRING,
"Default",
),
): str,
vol.Optional(
CONF_SELECT,
default=self.config_entry.options.get(CONF_SELECT, "default"),
): vol.In(["default", "other"]),
vol.Optional(
CONF_MULTISELECT,
default=self.config_entry.options.get(
CONF_MULTISELECT, ["default"]
),
): cv.multi_select({"default": "Default", "other": "Other"}),
}
),
)
async def _update_options(self):
"""Update config entry options."""
return self.async_create_entry(title="", data=self.options)
|
import os
import sys
import platform
from psdash.log import LogReader
import socket
import tempfile
import unittest2
import time
import psutil
from psdash.node import LocalNode
class TestNode(unittest2.TestCase):
def setUp(self):
self.node = LocalNode()
self.service = self.node.get_service()
def test_get_uptime(self):
sysinfo = self.service.get_sysinfo()
uptime = int(time.time() - psutil.boot_time())
self.assertEqual(sysinfo['uptime'], uptime)
def test_get_hostname(self):
sysinfo = self.service.get_sysinfo()
self.assertEqual(sysinfo['hostname'], socket.gethostname())
def test_get_os_info(self):
sysinfo = self.service.get_sysinfo()
self.assertEqual(sysinfo['os'], platform.platform())
def test_get_load_avg(self):
sysinfo = self.service.get_sysinfo()
self.assertEqual(len(sysinfo['load_avg']), 3)
self.assertTrue(isinstance(sysinfo['load_avg'][0], float))
self.assertTrue(isinstance(sysinfo['load_avg'][1], float))
self.assertTrue(isinstance(sysinfo['load_avg'][2], float))
def test_get_cpu_count(self):
sysinfo = self.service.get_sysinfo()
self.assertEqual(sysinfo['num_cpus'], psutil.cpu_count())
def test_get_memory_total(self):
mem = self.service.get_memory()
self.assertEqual(mem['total'], psutil.virtual_memory().total)
def test_get_memory_free(self):
mem = self.service.get_memory()
self.assertIn('free', mem)
def test_get_memory_available(self):
mem = self.service.get_memory()
self.assertIn('available', mem)
def test_get_memory_used(self):
mem = self.service.get_memory()
self.assertIn('used', mem)
def test_get_memory_percent(self):
mem = self.service.get_memory()
self.assertIn('percent', mem)
self.assertLessEqual(mem['percent'], 100)
self.assertGreaterEqual(mem['percent'], 0)
self.assertIsInstance(mem['percent'], float)
def test_get_swap_total(self):
swap = self.service.get_swap_space()
self.assertEqual(swap['total'], psutil.swap_memory().total)
def test_get_swap_free(self):
swap = self.service.get_swap_space()
self.assertEqual(swap['free'], psutil.swap_memory().free)
def test_get_swap_used(self):
swap = self.service.get_swap_space()
self.assertEqual(swap['used'], psutil.swap_memory().used)
def test_get_swap_percent(self):
swap = self.service.get_swap_space()
self.assertEqual(swap['percent'], psutil.swap_memory().percent)
self.assertLessEqual(swap['percent'], 100)
self.assertGreaterEqual(swap['percent'], 0)
self.assertIsInstance(swap['percent'], float)
def test_get_swap_swapped_in(self):
swap = self.service.get_swap_space()
self.assertEqual(swap['swapped_in'], psutil.swap_memory().sin)
def test_get_swap_swapped_out(self):
swap = self.service.get_swap_space()
self.assertEqual(swap['swapped_out'], psutil.swap_memory().sout)
def test_get_cpu(self):
cpu = self.service.get_cpu()
asserts = ['user', 'system', 'idle', 'iowait', 'irq', 'nice']
for a in asserts:
self.assertIn(a, cpu)
def test_get_cpu_cores(self):
cores = self.service.get_cpu_cores()
self.assertIsInstance(cores, list)
asserts = ['user', 'system', 'idle', 'iowait', 'irq', 'nice']
for a in asserts:
self.assertIn(a, cores[0])
@unittest2.skipIf('TRAVIS' in os.environ, 'Functionality not supported on Travis CI')
def test_get_disks(self):
disks = self.service.get_disks()
self.assertIsInstance(disks, list)
asserts = ['device', 'mountpoint', 'type', 'options', 'space_total',
'space_used', 'space_used_percent', 'space_free']
for a in asserts:
self.assertIn(a, disks[0])
@unittest2.skipIf('TRAVIS' in os.environ, 'Functionality not supported on Travis CI')
def test_get_disks_counters(self):
counters = self.service.get_disks_counters()
self.assertIsInstance(counters, dict)
dev, c = counters.popitem()
self.assertTrue(len(dev))
self.assertIsInstance(dev, str)
asserts = ['read_count', 'read_bytes', 'read_time',
'write_count', 'write_bytes', 'write_time']
for a in asserts:
self.assertIn(a, c)
def test_get_users(self):
users = self.service.get_users()
self.assertIsInstance(users, list)
def test_get_network_interfaces(self):
self.node.net_io_counters.update()
netifs = self.service.get_network_interfaces()
self.assertIsInstance(netifs, dict)
name, netif = netifs.popitem()
self.assertGreater(len(name), 0)
asserts = ['ip', 'bytes_sent', 'bytes_recv', 'packets_sent',
'packets_recv', 'errors_in', 'errors_out', 'dropped_in',
'dropped_out', 'send_rate', 'recv_rate']
for a in asserts:
self.assertIn(a, netif)
def test_get_process_list(self):
process_list = self.service.get_process_list()
self.assertIsInstance(process_list, list)
proc = process_list.pop()
asserts = ['pid', 'name', 'cmdline', 'user', 'status', 'created',
'mem_rss', 'mem_vms', 'mem_percent', 'cpu_percent']
for a in asserts:
self.assertIn(a, proc)
@unittest2.skipIf(os.environ.get('USER') != 'root', 'os.setuid requires privileged user')
def test_get_process_list_anonymous_process(self):
os.setuid(12345)
process_list = self.service.get_process_list()
self.assertIsInstance(process_list, list)
def test_get_process(self):
proc = self.service.get_process(os.getpid())
self.assertIsInstance(proc, dict)
asserts = ['pid', 'ppid', 'parent_name', 'name', 'cmdline',
'user', 'uid_real', 'uid_effective', 'uid_saved',
'gid_real', 'gid_effective', 'gid_saved', 'status',
'created', 'mem_rss', 'mem_shared', 'mem_text',
'mem_lib', 'mem_data', 'mem_dirty', 'mem_percent',
'terminal', 'nice', 'io_nice_class', 'io_nice_value',
'num_threads', 'num_files', 'num_children', 'cwd',
'num_ctx_switches_invol', 'num_ctx_switches_vol',
'cpu_times_user', 'cpu_times_system', 'cpu_affinity',
'cpu_percent']
for a in asserts:
self.assertIn(a, proc)
@unittest2.skipIf('TRAVIS' in os.environ, 'Functionality not supported on Travis CI')
def test_get_process_limits(self):
limits = self.service.get_process_limits(os.getpid())
self.assertIsInstance(limits, dict)
asserts = ['RLIMIT_AS', 'RLIMIT_CORE', 'RLIMIT_CPU', 'RLIMIT_DATA',
'RLIMIT_FSIZE', 'RLIMIT_LOCKS', 'RLIMIT_MEMLOCK',
'RLIMIT_MSGQUEUE', 'RLIMIT_NICE', 'RLIMIT_NOFILE',
'RLIMIT_NPROC', 'RLIMIT_RSS', 'RLIMIT_RTPRIO',
'RLIMIT_RTTIME', 'RLIMIT_SIGPENDING', 'RLIMIT_STACK']
for a in asserts:
self.assertIn(a, limits)
def test_get_process_environment(self):
env = self.service.get_process_environment(os.getpid())
self.assertIsInstance(env, dict)
def test_get_process_threads(self):
threads = self.service.get_process_threads(os.getpid())
self.assertIsInstance(threads, list)
asserts = ['id', 'cpu_time_user', 'cpu_time_system']
for a in asserts:
self.assertIn(a, threads[0])
def test_get_process_open_files(self):
tempfile.mkstemp()
files = self.service.get_process_open_files(os.getpid())
self.assertIsInstance(files, list)
asserts = ['fd', 'path']
for a in asserts:
self.assertIn(a, files[0])
def test_get_process_connections(self):
s = socket.socket()
s.bind(('', 5555))
s.listen(1)
conns = self.service.get_process_connections(os.getpid())
self.assertIsInstance(conns, list)
asserts = ['fd', 'family', 'type', 'local_addr_host', 'local_addr_port',
'remote_addr_host', 'remote_addr_port', 'state']
for a in asserts:
self.assertIn(a, conns[0])
s.close()
def test_get_process_memory_maps(self):
memmaps = self.service.get_process_memory_maps(os.getpid())
self.assertIsInstance(memmaps, list)
m = memmaps[0]
asserts = ['path', 'rss', 'size', 'pss', 'shared_clean', 'shared_dirty',
'private_clean', 'referenced', 'anonymous', 'swap']
for a in asserts:
self.assertIn(a, m)
def test_get_process_children(self):
children = self.service.get_process_children(os.getppid())
self.assertIsInstance(children, list)
c = children[0]
asserts = ['pid', 'name', 'cmdline', 'status']
for a in asserts:
self.assertIn(a, c)
def test_get_connections(self):
conns = self.service.get_connections()
self.assertIsInstance(conns, list)
c = conns[0]
asserts = ['fd', 'pid', 'family', 'type', 'local_addr_host', 'local_addr_port',
'remote_addr_host', 'remote_addr_port', 'state']
for a in asserts:
self.assertIn(a, c)
def test_get_logs(self):
_, filename = tempfile.mkstemp()
self.node.logs.add_patterns([filename])
logs = self.service.get_logs()
self.assertIsInstance(logs, list)
log = logs[0]
asserts = ['path', 'size', 'atime', 'mtime']
for a in asserts:
self.assertIn(a, log)
def test_read_log(self):
fd, filename = tempfile.mkstemp()
os.write(fd, 'FOOBAR\n' * 10000)
num_added = self.node.logs.add_patterns([filename])
self.assertEqual(num_added, 1)
content = self.service.read_log(filename, seek_tail=True)
self.assertEqual(len(content), LogReader.BUFFER_SIZE)
os.close(fd)
def test_search_log(self):
fd, filename = tempfile.mkstemp()
os.write(fd, 'FOOBAR\n' * 100)
os.write(fd, 'NEEDLE\n')
os.write(fd, 'FOOBAR\n' * 100)
os.fsync(fd)
self.node.logs.add_patterns([filename])
result = self.service.search_log(filename, 'NEEDLE')
self.assertIsInstance(result, dict)
asserts = ['position', 'buffer_pos', 'filesize', 'content']
for a in asserts:
self.assertIn(a, result)
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
from mock import call
from collections import Iterator
from diamond.collector import Collector
from sockstat import SockstatCollector
##########################################################################
class TestSockstatCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('SockstatCollector', {
'interval': 10
})
self.collector = SockstatCollector(config, None)
def test_import(self):
self.assertTrue(SockstatCollector)
@patch('__builtin__.open')
@patch('os.access', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_should_open_proc_net_sockstat(self, publish_mock, open_mock):
class Klass(Iterator):
def close(self):
pass
def next(self):
raise StopIteration
open_mock.return_value = Klass()
self.collector.collect()
calls = [call('/proc/net/sockstat'), call('/proc/net/sockstat6')]
open_mock.assert_has_calls(calls)
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
SockstatCollector.PROCS = [self.getFixturePath('proc_net_sockstat'),
self.getFixturePath('proc_net_sockstat6')]
self.collector.collect()
metrics = {
'used': 118,
'tcp_inuse': 61,
'tcp_orphan': 0,
'tcp_tw': 1,
'tcp_alloc': 13,
'tcp_mem': 1,
'udp_inuse': 6,
'udp_mem': 0
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
##########################################################################
if __name__ == "__main__":
unittest.main()
|
import logging
from perfkitbenchmarker import errors
from perfkitbenchmarker import os_types
# Location of the EPEL RPM.
_EPEL_URL = 'https://dl.fedoraproject.org/pub/epel/epel-release-latest-{}.noarch.rpm'
# Dict of vm.OS_TYPE to the yum RPM to install. Must include all OSes that
# can install EPEL.
_EPEL_URLS = {
os_types.CENTOS7: None, # RPM already installed
os_types.CENTOS8: _EPEL_URL.format(8),
os_types.RHEL7: _EPEL_URL.format(7),
os_types.RHEL8: _EPEL_URL.format(8),
os_types.AMAZONLINUX2: _EPEL_URL.format(7),
}
# Additional commands to run after installing the RPM.
_EPEL_CMDS = {
os_types.CENTOS7: 'sudo yum install -y epel-release',
os_types.CENTOS8: 'sudo dnf config-manager --set-enabled PowerTools'
}
# The ids of the EPEL yum repo
_EPEL_REPO_IDS = frozenset(['epel', 'epel/x86_64'])
def AptInstall(vm):
del vm
raise NotImplementedError()
def YumInstall(vm):
"""Installs epel-release repo."""
if vm.OS_TYPE not in _EPEL_URLS:
raise errors.Setup.InvalidConfigurationError(
'os_type {} not in {}'.format(vm.OS_TYPE, sorted(_EPEL_URLS)))
if IsEpelRepoInstalled(vm):
logging.info('EPEL repo already installed')
return
url = _EPEL_URLS[vm.OS_TYPE]
if url:
vm.InstallPackages(url)
if vm.OS_TYPE in _EPEL_CMDS:
vm.RemoteCommand(_EPEL_CMDS[vm.OS_TYPE])
vm.InstallPackages('yum-utils')
vm.RemoteCommand('sudo yum-config-manager --enable epel')
if not IsEpelRepoInstalled(vm):
raise ValueError('EPEL repos {} not in {}'.format(
sorted(_EPEL_REPO_IDS), sorted(Repolist(vm))))
def IsEpelRepoInstalled(vm):
return bool(Repolist(vm).intersection(_EPEL_REPO_IDS))
def Repolist(vm, enabled=True):
"""Returns a frozenset of the yum repos ids."""
txt, _ = vm.RemoteCommand(
'sudo yum repolist {}'.format('enabled' if enabled else 'all'))
hit_repo_id = False
repos = set()
for line in txt.splitlines():
if hit_repo_id:
repo_id = line.split()[0]
if repo_id[0] == '*': # Repo metadata is not local, still okay to use
repo_id = repo_id[1:]
if repo_id != 'repolist:':
repos.add(repo_id)
else:
hit_repo_id = line.startswith('repo id')
if not repos:
raise ValueError('Could not find repo ids in {}'.format(txt))
return frozenset(repos)
|
from homeassistant.const import DEVICE_CLASS_BATTERY, PERCENTAGE
from .base_class import TradfriBaseDevice
from .const import CONF_GATEWAY_ID, DEVICES, DOMAIN, KEY_API
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up a Tradfri config entry."""
gateway_id = config_entry.data[CONF_GATEWAY_ID]
tradfri_data = hass.data[DOMAIN][config_entry.entry_id]
api = tradfri_data[KEY_API]
devices = tradfri_data[DEVICES]
sensors = (
dev
for dev in devices
if not dev.has_light_control
and not dev.has_socket_control
and not dev.has_blind_control
and not dev.has_signal_repeater_control
)
if sensors:
async_add_entities(TradfriSensor(sensor, api, gateway_id) for sensor in sensors)
class TradfriSensor(TradfriBaseDevice):
"""The platform class required by Home Assistant."""
def __init__(self, device, api, gateway_id):
"""Initialize the device."""
super().__init__(device, api, gateway_id)
self._unique_id = f"{gateway_id}-{device.id}"
@property
def device_class(self):
"""Return the devices' state attributes."""
return DEVICE_CLASS_BATTERY
@property
def state(self):
"""Return the current state of the device."""
return self._device.device_info.battery_level
@property
def unit_of_measurement(self):
"""Return the unit_of_measurement of the device."""
return PERCENTAGE
|
from __future__ import absolute_import
import unittest
import sys
from .common_imports import StringIO, etree, HelperTestCase, _str, _bytes, _chr
try:
unicode
except NameError:
unicode = str
ascii_uni = _bytes('a').decode('utf8')
klingon = _bytes("\\uF8D2").decode("unicode_escape") # not valid for XML names
invalid_tag = _bytes("test").decode('utf8') + klingon
uni = _bytes('\\xc3\\u0680\\u3120').decode("unicode_escape") # some non-ASCII characters
uxml = _bytes("<test><title>test \\xc3\\xa1\\u3120</title><h1>page \\xc3\\xa1\\u3120 title</h1></test>"
).decode("unicode_escape")
class UnicodeTestCase(HelperTestCase):
def test__str(self):
# test the testing framework, namely _str from common_imports
self.assertEqual(_str('\x10'), _str('\u0010'))
self.assertEqual(_str('\x10'), _str('\U00000010'))
self.assertEqual(_str('\u1234'), _str('\U00001234'))
def test_unicode_xml(self):
tree = etree.XML('<p>%s</p>' % uni)
self.assertEqual(uni, tree.text)
def test_wide_unicode_xml(self):
if sys.maxunicode < 1114111:
return # skip test
tree = etree.XML(_bytes('<p>\\U00026007</p>').decode('unicode_escape'))
self.assertEqual(1, len(tree.text))
self.assertEqual(_bytes('\\U00026007').decode('unicode_escape'),
tree.text)
def test_unicode_xml_broken(self):
uxml = ('<?xml version="1.0" encoding="UTF-8"?>' +
'<p>%s</p>' % uni)
self.assertRaises(ValueError, etree.XML, uxml)
def test_unicode_tag(self):
el = etree.Element(uni)
self.assertEqual(uni, el.tag)
def test_unicode_tag_invalid(self):
# sadly, Klingon is not well-formed
self.assertRaises(ValueError, etree.Element, invalid_tag)
def test_unicode_nstag(self):
tag = "{http://abc/}%s" % uni
el = etree.Element(tag)
self.assertEqual(tag, el.tag)
def test_unicode_ns_invalid(self):
# namespace URIs must conform to RFC 3986
tag = "{http://%s/}abc" % uni
self.assertRaises(ValueError, etree.Element, tag)
def test_unicode_nstag_invalid(self):
# sadly, Klingon is not well-formed
tag = "{http://abc/}%s" % invalid_tag
self.assertRaises(ValueError, etree.Element, tag)
def test_unicode_qname(self):
qname = etree.QName(uni, uni)
tag = "{%s}%s" % (uni, uni)
self.assertEqual(qname.text, tag)
self.assertEqual(unicode(qname), tag)
def test_unicode_qname_invalid(self):
self.assertRaises(ValueError, etree.QName, invalid_tag)
def test_unicode_attr(self):
el = etree.Element('foo', {'bar': uni})
self.assertEqual(uni, el.attrib['bar'])
def test_unicode_comment(self):
el = etree.Comment(uni)
self.assertEqual(uni, el.text)
def test_unicode_repr1(self):
x = etree.Element(_str('å'))
# must not raise UnicodeEncodeError
repr(x)
def test_unicode_repr2(self):
x = etree.Comment(_str('ö'))
repr(x)
def test_unicode_repr3(self):
x = etree.ProcessingInstruction(_str('Å'), _str('\u0131'))
repr(x)
def test_unicode_repr4(self):
x = etree.Entity(_str('ä'))
repr(x)
def test_unicode_text(self):
e = etree.Element('e')
def settext(text):
e.text = text
self.assertRaises(ValueError, settext, _str('ab\ufffe'))
self.assertRaises(ValueError, settext, _str('ö\ffff'))
self.assertRaises(ValueError, settext, _str('\u0123\ud800'))
self.assertRaises(ValueError, settext, _str('x\ud8ff'))
self.assertRaises(ValueError, settext, _str('\U00010000\udfff'))
self.assertRaises(ValueError, settext, _str('abd\x00def'))
# should not Raise
settext(_str('\ud7ff\ue000\U00010000\U0010FFFFäöas'))
for char_val in range(0xD800, 0xDFFF+1):
self.assertRaises(ValueError, settext, 'abc' + _chr(char_val))
self.assertRaises(ValueError, settext, _chr(char_val))
self.assertRaises(ValueError, settext, _chr(char_val) + 'abc')
self.assertRaises(ValueError, settext, _bytes('\xe4'))
self.assertRaises(ValueError, settext, _bytes('\x80'))
self.assertRaises(ValueError, settext, _bytes('\xff'))
self.assertRaises(ValueError, settext, _bytes('\x08'))
self.assertRaises(ValueError, settext, _bytes('\x19'))
self.assertRaises(ValueError, settext, _bytes('\x20\x00'))
# should not Raise
settext(_bytes('\x09\x0A\x0D\x20\x60\x7f'))
def test_uniname(self):
Element = etree.Element
def el(name):
return Element(name)
self.assertRaises(ValueError, el, ':')
self.assertRaises(ValueError, el, '0a')
self.assertRaises(ValueError, el, _str('\u203f'))
# should not Raise
el(_str('\u0132'))
def test_unicode_parse_stringio(self):
el = etree.parse(StringIO('<p>%s</p>' % uni)).getroot()
self.assertEqual(uni, el.text)
## def test_parse_fileobject_unicode(self):
## # parse unicode from unnamed file object (not supported by ElementTree)
## f = SillyFileLike(uxml)
## root = etree.parse(f).getroot()
## self.assertEqual(unicode(etree.tostring(root, 'UTF-8'), 'UTF-8'),
## uxml)
class EncodingsTestCase(HelperTestCase):
def test_illegal_utf8(self):
data = _bytes('<test>\x80\x80\x80</test>', encoding='iso8859-1')
self.assertRaises(etree.XMLSyntaxError, etree.fromstring, data)
def test_illegal_utf8_recover(self):
data = _bytes('<test>\x80\x80\x80</test>', encoding='iso8859-1')
parser = etree.XMLParser(recover=True)
self.assertRaises(etree.XMLSyntaxError, etree.fromstring, data, parser)
def _test_encoding(self, encoding, xml_encoding_name=None):
foo = """<?xml version='1.0' encoding='%s'?>\n<tag attrib='123'></tag>""" % (
xml_encoding_name or encoding)
root = etree.fromstring(foo.encode(encoding))
self.assertEqual('tag', root.tag)
doc_encoding = root.getroottree().docinfo.encoding
self.assertTrue(
doc_encoding.lower().rstrip('lbe'),
(xml_encoding_name or encoding).lower().rstrip('lbe'))
def test_utf8_fromstring(self):
self._test_encoding('utf-8')
def test_utf8sig_fromstring(self):
self._test_encoding('utf_8_sig', 'utf-8')
def test_utf16_fromstring(self):
self._test_encoding('utf-16')
def test_utf16LE_fromstring(self):
self._test_encoding('utf-16le', 'utf-16')
def test_utf16BE_fromstring(self):
self._test_encoding('utf-16be', 'utf-16')
def test_utf32_fromstring(self):
self._test_encoding('utf-32', 'utf-32')
def test_utf32LE_fromstring(self):
self._test_encoding('utf-32le', 'utf-32')
def test_utf32BE_fromstring(self):
self._test_encoding('utf-32be', 'utf-32')
def test_suite():
suite = unittest.TestSuite()
suite.addTests([unittest.makeSuite(UnicodeTestCase)])
suite.addTests([unittest.makeSuite(EncodingsTestCase)])
return suite
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import sys
from google.protobuf import text_format
from tensorflow.core.framework import graph_pb2
from tensorflow.core.protobuf import saver_pb2
from tensorflow.python import pywrap_tensorflow
from tensorflow.python.client import session
from tensorflow.python.framework import graph_util
from tensorflow.python.framework import importer
from tensorflow.python.platform import app
from tensorflow.python.platform import gfile
from tensorflow.python.training import saver as saver_lib
FLAGS = None
def freeze_graph(input_graph,
input_saver,
input_binary,
input_checkpoint,
output_node_names,
restore_op_name,
filename_tensor_name,
output_graph,
clear_devices,
initializer_nodes,
variable_names_blacklist=""):
"""Converts all variables in a graph and checkpoint into constants."""
del restore_op_name, filename_tensor_name # Unused by updated loading code.
if not gfile.Exists(input_graph):
print("Input graph file '" + input_graph + "' does not exist!")
return -1
if input_saver and not gfile.Exists(input_saver):
print("Input saver file '" + input_saver + "' does not exist!")
return -1
# 'input_checkpoint' may be a prefix if we're using Saver V2 format
if not saver_lib.checkpoint_exists(input_checkpoint):
print("Input checkpoint '" + input_checkpoint + "' doesn't exist!")
return -1
if not output_node_names:
print("You need to supply the name of a node to --output_node_names.")
return -1
input_graph_def = graph_pb2.GraphDef()
mode = "rb" if input_binary else "r"
with gfile.FastGFile(input_graph, mode) as f:
if input_binary:
input_graph_def.ParseFromString(f.read())
else:
text_format.Merge(f.read(), input_graph_def)
# Remove all the explicit device specifications for this node. This helps to
# make the graph more portable.
if clear_devices:
for node in input_graph_def.node:
node.device = ""
_ = importer.import_graph_def(input_graph_def, name="")
with session.Session() as sess:
if input_saver:
with gfile.FastGFile(input_saver, mode) as f:
saver_def = saver_pb2.SaverDef()
if input_binary:
saver_def.ParseFromString(f.read())
else:
text_format.Merge(f.read(), saver_def)
saver = saver_lib.Saver(saver_def=saver_def)
saver.restore(sess, input_checkpoint)
else:
var_list = {}
reader = pywrap_tensorflow.NewCheckpointReader(input_checkpoint)
var_to_shape_map = reader.get_variable_to_shape_map()
for key in var_to_shape_map:
try:
tensor = sess.graph.get_tensor_by_name(key + ":0")
except KeyError:
# This tensor doesn't exist in the graph (for example it's
# 'global_step' or a similar housekeeping element) so skip it.
continue
var_list[key] = tensor
saver = saver_lib.Saver(var_list=var_list)
saver.restore(sess, input_checkpoint)
if initializer_nodes:
sess.run(initializer_nodes)
variable_names_blacklist = (variable_names_blacklist.split(",") if
variable_names_blacklist else None)
output_graph_def = graph_util.convert_variables_to_constants(
sess,
input_graph_def,
output_node_names.split(","),
variable_names_blacklist=variable_names_blacklist)
with gfile.GFile(output_graph, "wb") as f:
f.write(output_graph_def.SerializeToString())
print("%d ops in the final graph." % len(output_graph_def.node))
def main(unused_args):
freeze_graph(FLAGS.input_graph, FLAGS.input_saver, FLAGS.input_binary,
FLAGS.input_checkpoint, FLAGS.output_node_names,
FLAGS.restore_op_name, FLAGS.filename_tensor_name,
FLAGS.output_graph, FLAGS.clear_devices, FLAGS.initializer_nodes,
FLAGS.variable_names_blacklist)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.register("type", "bool", lambda v: v.lower() == "true")
parser.add_argument(
"--input_graph",
type=str,
default="",
help="TensorFlow \'GraphDef\' file to load.")
parser.add_argument(
"--input_saver",
type=str,
default="",
help="TensorFlow saver file to load.")
parser.add_argument(
"--input_checkpoint",
type=str,
default="",
help="TensorFlow variables file to load.")
parser.add_argument(
"--output_graph",
type=str,
default="",
help="Output \'GraphDef\' file name.")
parser.add_argument(
"--input_binary",
nargs="?",
const=True,
type="bool",
default=False,
help="Whether the input files are in binary format.")
parser.add_argument(
"--output_node_names",
type=str,
default="",
help="The name of the output nodes, comma separated.")
parser.add_argument(
"--restore_op_name",
type=str,
default="save/restore_all",
help="The name of the master restore operator.")
parser.add_argument(
"--filename_tensor_name",
type=str,
default="save/Const:0",
help="The name of the tensor holding the save path.")
parser.add_argument(
"--clear_devices",
nargs="?",
const=True,
type="bool",
default=True,
help="Whether to remove device specifications.")
parser.add_argument(
"--initializer_nodes",
type=str,
default="",
help="comma separated list of initializer nodes to run before freezing.")
parser.add_argument(
"--variable_names_blacklist",
type=str,
default="",
help="""\
comma separated list of variables to skip converting to constants\
""")
FLAGS, unparsed = parser.parse_known_args()
app.run(main=main, argv=[sys.argv[0]] + unparsed)
|
from homeassistant.components import emulated_roku
from homeassistant.setup import async_setup_component
from tests.async_mock import AsyncMock, Mock, patch
async def test_config_required_fields(hass):
"""Test that configuration is successful with required fields."""
with patch.object(emulated_roku, "configured_servers", return_value=[]), patch(
"homeassistant.components.emulated_roku.binding.EmulatedRokuServer",
return_value=Mock(start=AsyncMock(), close=AsyncMock()),
):
assert (
await async_setup_component(
hass,
emulated_roku.DOMAIN,
{
emulated_roku.DOMAIN: {
emulated_roku.CONF_SERVERS: [
{
emulated_roku.CONF_NAME: "Emulated Roku Test",
emulated_roku.CONF_LISTEN_PORT: 8060,
}
]
}
},
)
is True
)
async def test_config_already_registered_not_configured(hass):
"""Test that an already registered name causes the entry to be ignored."""
with patch(
"homeassistant.components.emulated_roku.binding.EmulatedRokuServer",
return_value=Mock(start=AsyncMock(), close=AsyncMock()),
) as instantiate, patch.object(
emulated_roku, "configured_servers", return_value=["Emulated Roku Test"]
):
assert (
await async_setup_component(
hass,
emulated_roku.DOMAIN,
{
emulated_roku.DOMAIN: {
emulated_roku.CONF_SERVERS: [
{
emulated_roku.CONF_NAME: "Emulated Roku Test",
emulated_roku.CONF_LISTEN_PORT: 8060,
}
]
}
},
)
is True
)
assert len(instantiate.mock_calls) == 0
async def test_setup_entry_successful(hass):
"""Test setup entry is successful."""
entry = Mock()
entry.data = {
emulated_roku.CONF_NAME: "Emulated Roku Test",
emulated_roku.CONF_LISTEN_PORT: 8060,
emulated_roku.CONF_HOST_IP: "1.2.3.5",
emulated_roku.CONF_ADVERTISE_IP: "1.2.3.4",
emulated_roku.CONF_ADVERTISE_PORT: 8071,
emulated_roku.CONF_UPNP_BIND_MULTICAST: False,
}
with patch(
"homeassistant.components.emulated_roku.binding.EmulatedRokuServer",
return_value=Mock(start=AsyncMock(), close=AsyncMock()),
) as instantiate:
assert await emulated_roku.async_setup_entry(hass, entry) is True
assert len(instantiate.mock_calls) == 1
assert hass.data[emulated_roku.DOMAIN]
roku_instance = hass.data[emulated_roku.DOMAIN]["Emulated Roku Test"]
assert roku_instance.roku_usn == "Emulated Roku Test"
assert roku_instance.host_ip == "1.2.3.5"
assert roku_instance.listen_port == 8060
assert roku_instance.advertise_ip == "1.2.3.4"
assert roku_instance.advertise_port == 8071
assert roku_instance.bind_multicast is False
async def test_unload_entry(hass):
"""Test being able to unload an entry."""
entry = Mock()
entry.data = {"name": "Emulated Roku Test", "listen_port": 8060}
with patch(
"homeassistant.components.emulated_roku.binding.EmulatedRokuServer",
return_value=Mock(start=AsyncMock(), close=AsyncMock()),
):
assert await emulated_roku.async_setup_entry(hass, entry) is True
assert emulated_roku.DOMAIN in hass.data
await hass.async_block_till_done()
assert await emulated_roku.async_unload_entry(hass, entry)
assert len(hass.data[emulated_roku.DOMAIN]) == 0
|
import re
import sys
import itertools as it
import fcntl
from math import isnan
from functools import partial, wraps
from operator import itemgetter
from os import O_NONBLOCK, path as p
from io import BytesIO, StringIO, TextIOBase
from urllib.error import HTTPError, URLError
from urllib.request import urlopen, Request
from werkzeug.local import LocalProxy
import requests
import pygogo as gogo
import mezmorize
try:
import __builtin__ as _builtins
except ImportError:
import builtins as _builtins
from meza import compat
from meza.io import reencode
from meza.fntools import SleepyDict
from mezmorize.utils import get_cache_type
from riko import ENCODING, __version__
from riko.cast import cast
logger = gogo.Gogo(__name__, verbose=False, monolog=True).logger
DEF_NS = 'https://github.com/nerevu/riko'
def get_abspath(url):
url = 'http://%s' % url if url and '://' not in url else url
if url and url.startswith('file:///'):
# already have an abspath
pass
elif url and url.startswith('file://'):
parent = p.dirname(p.dirname(__file__))
rel_path = url[7:]
abspath = p.abspath(p.join(parent, rel_path))
url = 'file://%s' % abspath
return compat.decode(url)
# https://trac.edgewall.org/ticket/2066#comment:1
# http://stackoverflow.com/a/22675049/408556
def make_blocking(f):
fd = f.fileno()
flags = fcntl.fcntl(fd, fcntl.F_GETFL)
if flags & O_NONBLOCK:
blocking = flags & ~O_NONBLOCK
fcntl.fcntl(fd, fcntl.F_SETFL, blocking)
if 'nose' in sys.modules:
logger.debug('Running in nose environment...')
make_blocking(sys.stderr)
def default_user_agent(name="riko"):
"""
Return a string representing the default user agent.
:rtype: str
"""
return '%s/%s' % (name, __version__)
class Chainable(object):
def __init__(self, data, method=None):
self.data = data
self.method = method
self.list = list(data)
def __getattr__(self, name):
funcs = (partial(getattr, x) for x in [self.data, _builtins, it])
zipped = zip(funcs, it.repeat(AttributeError))
method = multi_try(name, zipped, default=None)
return Chainable(self.data, method)
def __call__(self, *args, **kwargs):
try:
return Chainable(self.method(self.data, *args, **kwargs))
except TypeError:
return Chainable(self.method(args[0], self.data, **kwargs))
def invert_dict(d):
return {v: k for k, v in d.items()}
def multi_try(source, zipped, default=None):
value = None
for func, error in zipped:
try:
value = func(source)
except error:
pass
else:
return value
else:
return default
def get_response_content_type(response):
try:
content_type = response.getheader('Content-Type', '')
except AttributeError:
content_type = response.headers.get('Content-Type', '')
return content_type.lower()
def get_response_encoding(response, def_encoding=ENCODING):
info = response.info()
try:
encoding = info.getencoding()
except AttributeError:
encoding = info.get_charset()
encoding = None if encoding == '7bit' else encoding
if not encoding and hasattr(info, 'get_content_charset'):
encoding = info.get_content_charset()
if not encoding:
content_type = get_response_content_type(response)
if 'charset' in content_type:
ctype = content_type.split('=')[1]
encoding = ctype.strip().strip('"').strip("'")
extracted = encoding or def_encoding
assert extracted
return extracted
# https://docs.python.org/3.3/reference/expressions.html#examples
def auto_close(stream, f):
try:
for record in stream:
yield record
finally:
f.close()
def opener(url, memoize=False, delay=0, encoding=ENCODING, params=None, **kwargs):
params = params or {}
timeout = kwargs.get('timeout')
decode = kwargs.get('decode')
if url.startswith('http') and params:
r = requests.get(url, params=params, stream=True)
r.raw.decode_content = decode
response = r.text if memoize else r.raw
else:
req = Request(url, headers={'User-Agent': default_user_agent()})
context = SleepyDict(delay=delay) if delay else None
try:
r = urlopen(req, context=context, timeout=timeout)
except TypeError:
r = urlopen(req, timeout=timeout)
except HTTPError as e:
raise URLError(f'{url} returned {e.code}: {e.reason}')
except URLError as e:
raise URLError(f'{url}: {e.reason}')
text = r.read() if memoize else None
if decode:
encoding = get_response_encoding(r, encoding)
if text:
response = compat.decode(text, encoding)
else:
response = reencode(r.fp, encoding, decode=True)
response.r = r
else:
response = text or r
content_type = get_response_content_type(r)
return (response, content_type)
def get_opener(memoize=False, **kwargs):
wrapper = partial(opener, memoize=memoize, **kwargs)
current_opener = wraps(opener)(wrapper)
if memoize:
kwargs.setdefault('cache_type', get_cache_type(spread=False))
memoizer = mezmorize.memoize(**kwargs)
current_opener = memoizer(current_opener)
return current_opener
class fetch(TextIOBase):
# http://stackoverflow.com/a/22836333/408556
def __init__(self, url=None, memoize=False, **kwargs):
# TODO: need to use separate timeouts for memoize and urlopen
if memoize:
self.opener = LocalProxy(lambda: get_opener(memoize=True, **kwargs))
else:
self.opener = get_opener(**kwargs)
responses = self.opener(get_abspath(url))
try:
response, self.content_type = responses
except ValueError:
# HACK: This happens for memoized responses. Not sure why though!
response, self.content_type = responses, 'application/json'
if memoize:
wrapper = StringIO if kwargs.get('decode') else BytesIO
f = wrapper(response)
else:
f = response
self.close = f.close
self.read = f.read
self.readline = f.readline
try:
self.seek = f.seek
except AttributeError:
pass
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
@property
def ext(self):
if not self.content_type:
ext = None
elif 'xml' in self.content_type:
ext = 'xml'
elif 'json' in self.content_type:
ext = 'json'
else:
ext = self.content_type.split('/')[1].split(';')[0]
return ext
def def_itemgetter(attr, default=0, _type=None):
# like operator.itemgetter but fills in missing keys with a default value
def keyfunc(item):
value = item.get(attr, default)
casted = cast(value, _type) if _type else value
try:
is_nan = isnan(casted)
except TypeError:
is_nan = False
return default if is_nan else casted
return keyfunc
# TODO: move this to meza.process.group
def group_by(iterable, attr, default=None):
keyfunc = def_itemgetter(attr, default)
data = list(iterable)
order = unique_everseen(data, keyfunc)
sorted_iterable = sorted(data, key=keyfunc)
grouped = it.groupby(sorted_iterable, keyfunc)
groups = {str(k): list(v) for k, v in grouped}
# return groups in original order
return ((key, groups[key]) for key in order)
def unique_everseen(iterable, key=None):
# List unique elements, preserving order. Remember all elements ever seen
# unique_everseen('ABBCcAD', str.lower) --> a b c d
seen = set()
for element in iterable:
k = str(key(element))
if k not in seen:
seen.add(k)
yield k
def betwix(iterable, start=None, stop=None, inc=False):
""" Extract selected elements from an iterable. But unlike `islice`,
extract based on the element's value instead of its position.
Args:
iterable (iter): The initial sequence
start (str): The fragment to begin with (inclusive)
stop (str): The fragment to finish at (exclusive)
inc (bool): Make stop operate inclusively (useful if reading a file and
the start and stop fragments are on the same line)
Returns:
Iter: New dict with specified keys removed
Examples:
>>> from io import StringIO
>>>
>>> list(betwix('ABCDEFG', stop='C')) == ['A', 'B']
True
>>> list(betwix('ABCDEFG', 'C', 'E')) == ['C', 'D']
True
>>> list(betwix('ABCDEFG', 'C')) == ['C', 'D', 'E', 'F', 'G']
True
>>> f = StringIO('alpha\\n<beta>\\ngamma\\n')
>>> list(betwix(f, '<', '>', True)) == ['<beta>\\n']
True
>>> list(betwix('ABCDEFG', 'C', 'E', True)) == ['C', 'D', 'E']
True
"""
def inc_takewhile(predicate, _iter):
for x in _iter:
yield x
if not predicate(x):
break
get_pred = lambda sentinel: lambda x: sentinel not in x
pred = get_pred(stop)
first = it.dropwhile(get_pred(start), iterable) if start else iterable
if stop and inc:
last = inc_takewhile(pred, first)
elif stop:
last = it.takewhile(pred, first)
else:
last = first
return last
def dispatch(split, *funcs):
"""takes a tuple of items and delivers each item to a different function
/--> item1 --> double(item1) -----> \
/ \
split ----> item2 --> triple(item2) -----> _OUTPUT
\\ /
\\--> item3 --> quadruple(item3) --> /
One way to construct such a flow in code would be::
split = ('bar', 'baz', 'qux')
double = lambda word: word * 2
triple = lambda word: word * 3
quadruple = lambda word: word * 4
_OUTPUT = dispatch(split, double, triple, quadruple)
_OUTPUT == ('barbar', 'bazbazbaz', 'quxquxquxqux')
"""
return [func(item) for item, func in zip(split, funcs)]
def broadcast(item, *funcs):
"""delivers the same item to different functions
/--> item --> double(item) -----> \
/ \
item -----> item --> triple(item) -----> _OUTPUT
\\ /
\\--> item --> quadruple(item) --> /
One way to construct such a flow in code would be::
double = lambda word: word * 2
triple = lambda word: word * 3
quadruple = lambda word: word * 4
_OUTPUT = broadcast('bar', double, triple, quadruple)
_OUTPUT == ('barbar', 'bazbazbaz', 'quxquxquxqux')
"""
return [func(item) for func in funcs]
def _gen_words(match, splits):
groups = list(it.dropwhile(lambda x: not x, match.groups()))
for s in splits:
try:
num = int(s)
except ValueError:
word = s
else:
word = next(it.islice(groups, num, num + 1))
yield word
def multi_substitute(word, rules):
""" Apply multiple regex rules to 'word'
http://code.activestate.com/recipes/
576710-multi-regex-single-pass-replace-of-multiple-regexe/
"""
flags = rules[0]['flags']
# Create a combined regex from the rules
tuples = ((p, r['match']) for p, r in enumerate(rules))
regexes = ('(?P<match_%i>%s)' % (p, r) for p, r in tuples)
pattern = '|'.join(regexes)
regex = re.compile(pattern, flags)
resplit = re.compile('\\$(\\d+)')
# For each match, look-up corresponding replace value in dictionary
rules_in_series = filter(itemgetter('series'), rules)
rules_in_parallel = (r for r in rules if not r['series'])
try:
has_parallel = [next(rules_in_parallel)]
except StopIteration:
has_parallel = []
# print('================')
# pprint(rules)
# print('word:', word)
# print('pattern', pattern)
# print('flags', flags)
for _ in it.chain(rules_in_series, has_parallel):
# print('~~~~~~~~~~~~~~~~')
# print('new round')
# print('word:', word)
# found = list(regex.finditer(word))
# matchitems = [match.groupdict().items() for match in found]
# pprint(matchitems)
prev_name = None
prev_is_series = None
i = 0
for match in regex.finditer(word):
items = match.groupdict().items()
item = next(filter(itemgetter(1), items))
# print('----------------')
# print('groupdict:', match.groupdict().items())
# print('item:', item)
if not item:
continue
name = item[0]
rule = rules[int(name[6:])]
series = rule.get('series')
kwargs = {'count': rule['count'], 'series': series}
is_previous = name == prev_name
singlematch = kwargs['count'] == 1
is_series = prev_is_series or kwargs['series']
isnt_previous = bool(prev_name) and not is_previous
if (is_previous and singlematch) or (isnt_previous and is_series):
continue
prev_name = name
prev_is_series = series
if resplit.findall(rule['replace']):
splits = resplit.split(rule['replace'])
words = _gen_words(match, splits)
else:
splits = rule['replace']
start = match.start() + i
end = match.end() + i
words = [word[:start], splits, word[end:]]
i += rule['offset']
word = ''.join(words)
# print('name:', name)
# print('prereplace:', rule['replace'])
# print('splits:', splits)
# print('resplits:', resplit.findall(rule['replace']))
# print('groups:', filter(None, match.groups()))
# print('i:', i)
# print('words:', words)
# print('range:', match.start(), '-', match.end())
# print('replace:', word)
# print('substitution:', word)
return word
def substitute(word, rule):
if word:
result = rule['match'].subn(rule['replace'], word, rule['count'])
replaced, replacements = result
if rule.get('default') is not None and not replacements:
replaced = rule.get('default')
else:
replaced = word
return replaced
def get_new_rule(rule, recompile=False):
flags = 0 if rule.get('casematch') else re.IGNORECASE
if not rule.get('singlelinematch'):
flags |= re.MULTILINE
flags |= re.DOTALL
count = 1 if rule.get('singlematch') else 0
if recompile and '$' in rule['replace']:
replace = re.sub(r'\$(\d+)', r'\\\1', rule['replace'], 0)
else:
replace = rule['replace']
match = re.compile(rule['match'], flags) if recompile else rule['match']
nrule = {
'match': match,
'replace': replace,
'default': rule.get('default'),
'field': rule.get('field'),
'count': count,
'flags': flags,
'series': rule.get('seriesmatch', True),
'offset': int(rule.get('offset') or 0),
}
return nrule
def multiplex(sources):
"""Combine multiple generators into one"""
return it.chain.from_iterable(sources)
def gen_entries(parsed):
if parsed.get('bozo_exception'):
raise Exception(parsed['bozo_exception'])
for entry in parsed['entries']:
# prevent feedparser deprecation warnings
if 'published_parsed' in entry:
updated = entry['published_parsed']
else:
updated = entry.get('updated_parsed')
entry['pubDate'] = updated
entry['y:published'] = updated
entry['dc:creator'] = entry.get('author')
entry['author.uri'] = entry.get('author_detail', {}).get(
'href')
entry['author.name'] = entry.get('author_detail', {}).get(
'name')
entry['y:title'] = entry.get('title')
entry['y:id'] = entry.get('id')
yield entry
def gen_items(content, key=None):
if hasattr(content, 'append'):
for nested in content:
for i in gen_items(nested, key):
yield i
elif content:
yield {key: content} if key else content
|
from __future__ import absolute_import
from __future__ import print_function
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation
from keras.optimizers import SGD
from keras.utils import np_utils
from elephas.spark_model import SparkModel
from elephas.utils.rdd_utils import to_simple_rdd
import pytest
pytest.mark.usefixtures("spark_context")
def test_async_mode(spark_context):
# Define basic parameters
batch_size = 64
nb_classes = 10
epochs = 1
# Load data
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train = x_train.reshape(60000, 784)
x_test = x_test.reshape(10000, 784)
x_train = x_train.astype("float32")
x_test = x_test.astype("float32")
x_train /= 255
x_test /= 255
print(x_train.shape[0], 'train samples')
print(x_test.shape[0], 'test samples')
# Convert class vectors to binary class matrices
y_train = np_utils.to_categorical(y_train, nb_classes)
y_test = np_utils.to_categorical(y_test, nb_classes)
model = Sequential()
model.add(Dense(128, input_dim=784))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(128))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(10))
model.add(Activation('softmax'))
sgd = SGD(lr=0.1)
model.compile(sgd, 'categorical_crossentropy', ['acc'])
# Build RDD from numpy features and labels
rdd = to_simple_rdd(spark_context, x_train, y_train)
# Initialize SparkModel from Keras model and Spark context
spark_model = SparkModel(model, frequency='epoch', mode='asynchronous')
# Train Spark model
spark_model.fit(rdd, epochs=epochs, batch_size=batch_size,
verbose=0, validation_split=0.1)
# Evaluate Spark model by evaluating the underlying model
score = spark_model.master_network.evaluate(x_test, y_test, verbose=2)
assert score[1] >= 0.7
if __name__ == '__main__':
pytest.main([__file__])
|
from datetime import datetime
import logging
from tellduslive import BATTERY_LOW, BATTERY_OK, BATTERY_UNKNOWN
from homeassistant.const import ATTR_BATTERY_LEVEL, DEVICE_DEFAULT_NAME
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity
from .const import SIGNAL_UPDATE_ENTITY
_LOGGER = logging.getLogger(__name__)
ATTR_LAST_UPDATED = "time_last_updated"
class TelldusLiveEntity(Entity):
"""Base class for all Telldus Live entities."""
def __init__(self, client, device_id):
"""Initialize the entity."""
self._id = device_id
self._client = client
self._name = self.device.name
self._async_unsub_dispatcher_connect = None
async def async_added_to_hass(self):
"""Call when entity is added to hass."""
_LOGGER.debug("Created device %s", self)
self._async_unsub_dispatcher_connect = async_dispatcher_connect(
self.hass, SIGNAL_UPDATE_ENTITY, self._update_callback
)
async def async_will_remove_from_hass(self):
"""Disconnect dispatcher listener when removed."""
if self._async_unsub_dispatcher_connect:
self._async_unsub_dispatcher_connect()
@callback
def _update_callback(self):
"""Return the property of the device might have changed."""
if self.device.name:
self._name = self.device.name
self.async_write_ha_state()
@property
def device_id(self):
"""Return the id of the device."""
return self._id
@property
def device(self):
"""Return the representation of the device."""
return self._client.device(self.device_id)
@property
def _state(self):
"""Return the state of the device."""
return self.device.state
@property
def should_poll(self):
"""Return the polling state."""
return False
@property
def assumed_state(self):
"""Return true if unable to access real state of entity."""
return True
@property
def name(self):
"""Return name of device."""
return self._name or DEVICE_DEFAULT_NAME
@property
def available(self):
"""Return true if device is not offline."""
return self._client.is_available(self.device_id)
@property
def device_state_attributes(self):
"""Return the state attributes."""
attrs = {}
if self._battery_level:
attrs[ATTR_BATTERY_LEVEL] = self._battery_level
if self._last_updated:
attrs[ATTR_LAST_UPDATED] = self._last_updated
return attrs
@property
def _battery_level(self):
"""Return the battery level of a device."""
if self.device.battery == BATTERY_LOW:
return 1
if self.device.battery == BATTERY_UNKNOWN:
return None
if self.device.battery == BATTERY_OK:
return 100
return self.device.battery # Percentage
@property
def _last_updated(self):
"""Return the last update of a device."""
return (
str(datetime.fromtimestamp(self.device.lastUpdated))
if self.device.lastUpdated
else None
)
@property
def unique_id(self) -> str:
"""Return a unique ID."""
return self._id
@property
def device_info(self):
"""Return device info."""
device = self._client.device_info(self.device.device_id)
device_info = {
"identifiers": {("tellduslive", self.device.device_id)},
"name": self.device.name,
}
model = device.get("model")
if model is not None:
device_info["model"] = model.title()
protocol = device.get("protocol")
if protocol is not None:
device_info["manufacturer"] = protocol.title()
client = device.get("client")
if client is not None:
device_info["via_device"] = ("tellduslive", client)
return device_info
|
import logging
import unittest
import numpy as np
from gensim.topic_coherence import segmentation
from numpy import array
class TestSegmentation(unittest.TestCase):
def setUp(self):
self.topics = [
array([9, 4, 6]),
array([9, 10, 7]),
array([5, 2, 7])
]
def testSOnePre(self):
"""Test s_one_pre segmentation."""
actual = segmentation.s_one_pre(self.topics)
expected = [
[(4, 9), (6, 9), (6, 4)],
[(10, 9), (7, 9), (7, 10)],
[(2, 5), (7, 5), (7, 2)]
]
self.assertTrue(np.allclose(actual, expected))
def testSOneOne(self):
"""Test s_one_one segmentation."""
actual = segmentation.s_one_one(self.topics)
expected = [
[(9, 4), (9, 6), (4, 9), (4, 6), (6, 9), (6, 4)],
[(9, 10), (9, 7), (10, 9), (10, 7), (7, 9), (7, 10)],
[(5, 2), (5, 7), (2, 5), (2, 7), (7, 5), (7, 2)]
]
self.assertTrue(np.allclose(actual, expected))
def testSOneSet(self):
"""Test s_one_set segmentation."""
actual = segmentation.s_one_set(self.topics)
expected = [
[(9, array([9, 4, 6])), (4, array([9, 4, 6])), (6, array([9, 4, 6]))],
[(9, array([9, 10, 7])), (10, array([9, 10, 7])), (7, array([9, 10, 7]))],
[(5, array([5, 2, 7])), (2, array([5, 2, 7])), (7, array([5, 2, 7]))]
]
for s_i in range(len(actual)):
for j in range(len(actual[s_i])):
self.assertEqual(actual[s_i][j][0], expected[s_i][j][0])
self.assertTrue(np.allclose(actual[s_i][j][1], expected[s_i][j][1]))
if __name__ == '__main__':
logging.root.setLevel(logging.WARNING)
unittest.main()
|
import abc
import datetime
import logging
import posixpath
from typing import Dict, List
from absl import flags
from dataclasses import dataclass
from perfkitbenchmarker import errors
from perfkitbenchmarker import resource
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.linux_packages import hadoop
flags.DEFINE_string(
'static_dpb_service_instance', None,
'If set, the name of the pre created dpb implementation,'
'assumed to be ready.')
flags.DEFINE_string('dpb_log_level', 'INFO', 'Manipulate service log level')
flags.DEFINE_string('dpb_job_jarfile', None,
'Executable Jarfile containing workload implementation')
flags.DEFINE_string('dpb_job_classname', None, 'Classname of the job '
'implementation in the jar file')
flags.DEFINE_string('dpb_service_zone', None, 'The zone for provisioning the '
'dpb_service instance.')
flags.DEFINE_list('dpb_job_properties', [], 'A list of strings of the form '
'"key=vale" to be passed into DBP jobs.')
FLAGS = flags.FLAGS
# List of supported data processing backend services
DATAPROC = 'dataproc'
DATAFLOW = 'dataflow'
EMR = 'emr'
UNMANAGED_DPB_SVC_YARN_CLUSTER = 'unmanaged_dpb_svc_yarn_cluster'
# Default number of workers to be used in the dpb service implementation
DEFAULT_WORKER_COUNT = 2
# List of supported applications that can be enabled on the dpb service
FLINK = 'flink'
HIVE = 'hive'
# Metrics and Status related metadata
# TODO(pclay): Remove these after migrating all callers to SubmitJob
SUCCESS = 'success'
RUNTIME = 'running_time'
WAITING = 'pending_time'
class JobSubmissionError(errors.Benchmarks.RunError):
"""Thrown by all implementations if SubmitJob fails."""
pass
@dataclass
class JobResult:
"""Data class for the timing of a successful DPB job."""
# Service reported execution time
run_time: float
# Service reported pending time (0 if service does not report).
pending_time: float = 0
@property
def wall_time(self) -> float:
"""The total time the service reported it took to execute."""
return self.run_time + self.pending_time
def GetDpbServiceClass(dpb_service_type):
"""Gets the Data Processing Backend class corresponding to 'service_type'.
Args:
dpb_service_type: String service type as specified in configuration
Returns:
Implementation class corresponding to the argument dpb_service_type
Raises:
Exception: An invalid data processing backend service type was provided
"""
return resource.GetResourceClass(
BaseDpbService, SERVICE_TYPE=dpb_service_type)
class BaseDpbService(resource.BaseResource):
"""Object representing a Data Processing Backend Service."""
REQUIRED_ATTRS = ['SERVICE_TYPE']
RESOURCE_TYPE = 'BaseDpbService'
SERVICE_TYPE = 'abstract'
HDFS_FS = 'hdfs'
GCS_FS = 'gs'
S3_FS = 's3'
# Job types that are supported on the dpb service backends
PYSPARK_JOB_TYPE = 'pyspark'
SPARKSQL_JOB_TYPE = 'spark-sql'
SPARK_JOB_TYPE = 'spark'
HADOOP_JOB_TYPE = 'hadoop'
DATAFLOW_JOB_TYPE = 'dataflow'
BEAM_JOB_TYPE = 'beam'
JOB_JARS = {
SPARK_JOB_TYPE: {
'pi': 'file:///usr/lib/spark/examples/jars/spark-examples.jar'
}
}
def __init__(self, dpb_service_spec):
"""Initialize the Dpb service object.
Args:
dpb_service_spec: spec of the dpb service.
"""
is_user_managed = dpb_service_spec.static_dpb_service_instance is not None
# Hand over the actual creation to the resource module which treats the
# user_managed resources in a special manner and skips creation attempt
super(BaseDpbService, self).__init__(user_managed=is_user_managed)
self.spec = dpb_service_spec
self.dpb_hdfs_type = None
if dpb_service_spec.static_dpb_service_instance:
self.cluster_id = dpb_service_spec.static_dpb_service_instance
else:
self.cluster_id = 'pkb-' + FLAGS.run_uri
self.dpb_service_zone = FLAGS.dpb_service_zone
self.dpb_version = dpb_service_spec.version
self.dpb_service_type = 'unknown'
self.storage_service = None
@abc.abstractmethod
def SubmitJob(self,
jarfile: str = None,
classname: str = None,
pyspark_file: str = None,
query_file: str = None,
job_poll_interval: float = None,
job_stdout_file: str = None,
job_arguments: List[str] = None,
job_files: List[str] = None,
job_jars: List[str] = None,
job_type: str = None,
properties: Dict[str, str] = None) -> JobResult:
"""Submit a data processing job to the backend.
Args:
jarfile: Jar file to execute.
classname: Name of the main class.
pyspark_file: Comma separated list of Python files to be provided to the
job. Must be one of the following file formats ".py, .zip, or .egg".
query_file: HCFS URI of file containing Spark SQL script to execute as the
job.
job_poll_interval: integer saying how often to poll for job completion.
Not used by providers for which submit job is a synchronous operation.
job_stdout_file: String giving the location of the file in which to put
the standard out of the job.
job_arguments: List of string arguments to pass to driver application.
These are not the arguments passed to the wrapper that submits the job.
job_files: Files passed to a Spark Application to be distributed to
executors.
job_jars: Jars to pass to the application
job_type: Spark or Hadoop job
properties: Dict of properties to pass with the job.
Returns:
A JobResult with the timing of the successful job.
Raises:
JobSubmissionError if job fails.
"""
pass
def GetMetadata(self):
"""Return a dictionary of the metadata for this cluster."""
pretty_version = self.dpb_version or 'default'
basic_data = {
'dpb_service': self.dpb_service_type,
'dpb_version': pretty_version,
'dpb_service_version':
'{}_{}'.format(self.dpb_service_type, pretty_version),
'dpb_cluster_id': self.cluster_id,
'dpb_cluster_shape': self.spec.worker_group.vm_spec.machine_type,
'dpb_cluster_size': self.spec.worker_count,
'dpb_hdfs_type': self.dpb_hdfs_type,
'dpb_service_zone': self.dpb_service_zone,
'dpb_job_properties': ','.join(
'{}={}'.format(k, v) for k, v in self.GetJobProperties().items()),
}
return basic_data
def _Create(self):
"""Creates the underlying resource."""
raise NotImplementedError()
def _Delete(self):
"""Deletes the underlying resource.
Implementations of this method should be idempotent since it may
be called multiple times, even if the resource has already been
deleted.
"""
raise NotImplementedError()
def _ProcessWallTime(self, start_time, end_time):
"""Compute the wall time from the given start and end processing time.
Args:
start_time: Datetime value when the processing was started.
end_time: Datetime value when the processing completed.
Returns:
Wall time in seconds.
Raises:
ValueError: Exception raised when invalid input is provided.
"""
if start_time > end_time:
raise ValueError('start_time cannot be later than the end_time')
return (end_time - start_time).total_seconds()
def GetJobProperties(self):
"""Parse the dpb_job_properties_flag."""
return dict(pair.split('=') for pair in FLAGS.dpb_job_properties)
def GetExecutionJar(self, job_category, job_type):
"""Retrieve execution jar corresponding to the job_category and job_type.
Args:
job_category: String category of the job for eg. hadoop, spark, hive, etc.
job_type: String name of the type of workload to executed on the cluster,
for eg. word_count, terasort, etc.
Returns:
The path to the execusion jar on the cluster
Raises:
NotImplementedError: Exception: An unsupported combination of
job_category
and job_type was provided for execution on the cluster.
"""
if job_category not in self.JOB_JARS or job_type not in self.JOB_JARS[
job_category]:
raise NotImplementedError()
return self.JOB_JARS[job_category][job_type]
def SubmitSparkJob(self, spark_application_jar, spark_application_classname,
spark_application_args):
"""Submit a SparkJob to the service instance, returning performance stats.
Args:
spark_application_jar: String path to the spark application executable
that containing workload implementation.
spark_application_classname: Classname of the spark job's implementation
in the spark_application_jar file.
spark_application_args: Arguments to pass to spark application. These are
not the arguments passed to the wrapper that submits the job.
Returns:
JobResult of the Spark Job
Raises:
JobSubmissionError if the job fails.
"""
return self.SubmitJob(
jarfile=spark_application_jar,
job_type='spark',
classname=spark_application_classname,
job_arguments=spark_application_args
)
def CreateBucket(self, source_bucket):
"""Creates an object-store bucket used during persistent data processing.
Default behaviour is a no-op as concrete implementations will have native
implementations.
Args:
source_bucket: String, name of the bucket to create.
"""
pass
def DeleteBucket(self, source_bucket):
"""Deletes an object-store bucket used during persistent data processing.
Default behaviour is a no-op as concrete implementations will have native
implementations.
Args:
source_bucket: String, name of the bucket to delete.
"""
pass
class UnmanagedDpbService(BaseDpbService):
"""Object representing an un-managed dpb service."""
@abc.abstractmethod
def SubmitJob(self,
jarfile=None,
classname=None,
pyspark_file=None,
query_file=None,
job_poll_interval=None,
job_stdout_file=None,
job_arguments=None,
job_files=None,
job_jars=None,
job_type=None,
properties=None):
"""Submit a data processing job to the backend."""
pass
class UnmanagedDpbServiceYarnCluster(UnmanagedDpbService):
"""Object representing an un-managed dpb service yarn cluster."""
SERVICE_TYPE = UNMANAGED_DPB_SVC_YARN_CLUSTER
JOB_JARS = {
'hadoop': {
'terasort':
'/opt/pkb/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-examples-*.jar'
},
}
def __init__(self, dpb_service_spec):
super(UnmanagedDpbServiceYarnCluster, self).__init__(dpb_service_spec)
# Dictionary to hold the cluster vms.
self.vms = {}
self.dpb_service_type = UNMANAGED_DPB_SVC_YARN_CLUSTER
def _Create(self):
"""Create an un-managed yarn cluster."""
logging.info('Should have created vms by now.')
logging.info(str(self.vms))
# need to fix this to install spark
def InstallHadoop(vm):
vm.Install('hadoop')
vm_util.RunThreaded(InstallHadoop, self.vms['worker_group'] +
self.vms['master_group'])
self.leader = self.vms['master_group'][0]
hadoop.ConfigureAndStart(self.leader,
self.vms['worker_group'])
def SubmitJob(self,
jarfile=None,
classname=None,
pyspark_file=None,
query_file=None,
job_poll_interval=None,
job_stdout_file=None,
job_arguments=None,
job_files=None,
job_jars=None,
job_type=None,
properties=None):
"""Submit a data processing job to the backend."""
if job_type != self.HADOOP_JOB_TYPE:
raise NotImplementedError
cmd_list = [posixpath.join(hadoop.HADOOP_BIN, 'hadoop')]
# Order is important
if jarfile:
cmd_list += ['jar', jarfile]
# Specifying classname only works if jarfile is omitted or if it has no
# main class.
if classname:
cmd_list += [classname]
all_properties = self.GetJobProperties()
all_properties.update(properties or {})
cmd_list += ['-D{}={}'.format(k, v) for k, v in all_properties.items()]
if job_arguments:
cmd_list += job_arguments
cmd_string = ' '.join(cmd_list)
start_time = datetime.datetime.now()
stdout, stderr, retcode = self.leader.RemoteCommandWithReturnCode(
cmd_string)
if retcode:
raise JobSubmissionError(stderr)
end_time = datetime.datetime.now()
if job_stdout_file:
with open(job_stdout_file, 'w') as f:
f.write(stdout)
return JobResult(run_time=(end_time - start_time).total_seconds())
def _Delete(self):
pass
def GetExecutionJar(self, job_category, job_type):
"""Retrieve execution jar corresponding to the job_category and job_type."""
if (job_category not in self.JOB_JARS or
job_type not in self.JOB_JARS[job_category]):
raise NotImplementedError()
return self.JOB_JARS[job_category][job_type]
|
import pytest
from nikola import __main__
from .helper import cd, patch_config
from .test_check_absolute_subfolder import test_index_in_sitemap # NOQA
from .test_demo_build import prepare_demo_site
from .test_empty_build import ( # NOQA
test_archive_exists,
test_avoid_double_slash_in_rss,
test_check_files,
test_check_links,
)
@pytest.fixture(scope="module")
def build(target_dir):
"""Fill the site with demo content and build it."""
prepare_demo_site(target_dir)
patch_config(
target_dir,
('SITE_URL = "https://example.com/"', 'SITE_URL = "https://example.com/foo/"'),
("# URL_TYPE = 'rel_path'", "URL_TYPE = 'full_path'"),
)
with cd(target_dir):
__main__.main(["build"])
|
import logging
import unittest
import numpy as np
from gensim.corpora.mmcorpus import MmCorpus
from gensim.models import rpmodel
from gensim import matutils
from gensim.test.utils import datapath, get_tmpfile
class TestRpModel(unittest.TestCase):
def setUp(self):
self.corpus = MmCorpus(datapath('testcorpus.mm'))
def testTransform(self):
# create the transformation model
# HACK; set fixed seed so that we always get the same random matrix (and can compare against expected results)
np.random.seed(13)
model = rpmodel.RpModel(self.corpus, num_topics=2)
# transform one document
doc = list(self.corpus)[0]
transformed = model[doc]
vec = matutils.sparse2full(transformed, 2) # convert to dense vector, for easier equality tests
expected = np.array([-0.70710677, 0.70710677])
self.assertTrue(np.allclose(vec, expected)) # transformed entries must be equal up to sign
def testPersistence(self):
fname = get_tmpfile('gensim_models.tst')
model = rpmodel.RpModel(self.corpus, num_topics=2)
model.save(fname)
model2 = rpmodel.RpModel.load(fname)
self.assertEqual(model.num_topics, model2.num_topics)
self.assertTrue(np.allclose(model.projection, model2.projection))
tstvec = []
self.assertTrue(np.allclose(model[tstvec], model2[tstvec])) # try projecting an empty vector
def testPersistenceCompressed(self):
fname = get_tmpfile('gensim_models.tst.gz')
model = rpmodel.RpModel(self.corpus, num_topics=2)
model.save(fname)
model2 = rpmodel.RpModel.load(fname, mmap=None)
self.assertEqual(model.num_topics, model2.num_topics)
self.assertTrue(np.allclose(model.projection, model2.projection))
tstvec = []
self.assertTrue(np.allclose(model[tstvec], model2[tstvec])) # try projecting an empty vector
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.DEBUG)
unittest.main()
|
import flatbuffers
class Abort(object):
__slots__ = ['_tab']
@classmethod
def GetRootAsAbort(cls, buf, offset):
n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
x = Abort()
x.Init(buf, n + offset)
return x
# Abort
def Init(self, buf, pos):
self._tab = flatbuffers.table.Table(buf, pos)
# Abort
def Reason(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
if o != 0:
return self._tab.String(o + self._tab.Pos)
return None
# Abort
def Message(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6))
if o != 0:
return self._tab.String(o + self._tab.Pos)
return None
def AbortStart(builder): builder.StartObject(2)
def AbortAddReason(builder, reason): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(reason), 0)
def AbortAddMessage(builder, message): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(message), 0)
def AbortEnd(builder): return builder.EndObject()
|
from collections import OrderedDict
from django import template
from django.conf import settings
from django.template import Node, TemplateSyntaxError
from django.template.loader import select_template
from django.utils import formats
from django.utils.encoding import force_str
from django.utils.safestring import mark_safe
from django.utils.dateformat import format, time_format
from django.utils.timezone import datetime
from shop.conf import app_settings
from shop.models.cart import CartModel
from shop.serializers.cart import CartSerializer, CartItems
from shop.rest.money import JSONRenderer
register = template.Library()
class CartIcon(Node):
"""
Inclusion tag for displaying cart summary.
"""
def __init__(self, with_items):
self.with_items = with_items
def get_template(self):
return select_template([
'{}/templatetags/cart-icon.html'.format(app_settings.APP_LABEL),
'shop/templatetags/cart-icon.html',
]).template
def render(self, context):
try:
cart = CartModel.objects.get_from_request(context['request'])
serializer = CartSerializer(instance=cart, context=context, label='dropdown', with_items=self.with_items)
cart_data = JSONRenderer().render(serializer.data)
except CartModel.DoesNotExist:
cart_data = {'total_quantity': 0, 'num_items': 0}
context.update({
'cart_as_json': mark_safe(force_str(cart_data)),
'has_dropdown': self.with_items != CartItems.without,
})
return self.get_template().render(context)
@register.tag
def cart_icon(parser, token):
def raise_syntax_error():
choices = '|'.join([item.name for item in CartItems])
raise TemplateSyntaxError("Template tag '{}' takes one optional argument: {}".format(bits[0], choices))
bits = token.split_contents()
if len(bits) > 2:
raise_syntax_error()
if len(bits) == 2:
try:
with_items = CartItems(bits[1])
except ValueError:
raise_syntax_error()
else:
with_items = CartItems.without
return CartIcon(with_items)
def from_iso8601(value):
try:
return datetime.strptime(value, "%Y-%m-%dT%H:%M:%S.%fZ")
except ValueError:
return datetime.strptime(value, "%Y-%m-%dT%H:%M:%SZ")
@register.filter(expects_localtime=True, is_safe=False)
def date(value, arg=None):
"""
Alternative implementation to the built-in `date` template filter which also accepts the
date string in iso-8601 as passed in by the REST serializers.
"""
if value in (None, ''):
return ''
if not isinstance(value, datetime):
value = from_iso8601(value)
if arg is None:
arg = settings.DATE_FORMAT
try:
return formats.date_format(value, arg)
except AttributeError:
try:
return format(value, arg)
except AttributeError:
return ''
@register.filter(expects_localtime=True, is_safe=False)
def time(value, arg=None):
"""
Alternative implementation to the built-in `time` template filter which also accepts the
date string in iso-8601 as passed in by the REST serializers.
"""
if value in (None, ''):
return ''
if not isinstance(value, datetime):
value = from_iso8601(value)
if arg is None:
arg = settings.TIME_FORMAT
try:
return formats.time_format(value, arg)
except AttributeError:
try:
return time_format(value, arg)
except AttributeError:
return ''
@register.filter
def rest_json(value, arg=None):
"""
Renders a `ReturnDict` as used by the REST framework into a safe JSON string.
"""
if isinstance(value, (dict, OrderedDict, list, tuple)):
data = JSONRenderer().render(value)
elif not value:
data = '{}'
else:
msg = "Given value must be of type dict, OrderedDict, list or tuple but it is {}."
raise ValueError(msg.format(value.__class__.__name__))
return mark_safe(force_str(data))
|
import sys
import os
from six.moves import range
from logilab.common.compat import StringIO
from logilab.common.testlib import TestCase, unittest_main
from logilab.common.table import Table, TableStyleSheet, DocbookTableWriter, \
DocbookRenderer, TableStyle, TableWriter, TableCellRenderer
class TableTC(TestCase):
"""Table TestCase class"""
def setUp(self):
"""Creates a default table"""
# from logilab.common import table
# reload(table)
self.table = Table()
self.table.create_rows(['row1', 'row2', 'row3'])
self.table.create_columns(['col1', 'col2'])
def test_valeur_scalaire(self):
tab = Table()
tab.create_columns(['col1'])
tab.append_row([1])
self.assertEqual(tab, [[1]])
tab.append_row([2])
self.assertEqual(tab[0, 0], 1)
self.assertEqual(tab[1, 0], 2)
def test_valeur_ligne(self):
tab = Table()
tab.create_columns(['col1', 'col2'])
tab.append_row([1, 2])
self.assertEqual(tab, [[1, 2]])
def test_valeur_colonne(self):
tab = Table()
tab.create_columns(['col1'])
tab.append_row([1])
tab.append_row([2])
self.assertEqual(tab, [[1], [2]])
self.assertEqual(tab[:, 0], [1, 2])
def test_indexation(self):
"""we should be able to use [] to access rows"""
self.assertEqual(self.table[0], self.table.data[0])
self.assertEqual(self.table[1], self.table.data[1])
def test_iterable(self):
"""test iter(table)"""
it = iter(self.table)
self.assertEqual(next(it), self.table.data[0])
self.assertEqual(next(it), self.table.data[1])
def test_get_rows(self):
"""tests Table.get_rows()"""
self.assertEqual(self.table, [[0, 0], [0, 0], [0, 0]])
self.assertEqual(self.table[:], [[0, 0], [0, 0], [0, 0]])
self.table.insert_column(1, range(3), 'supp')
self.assertEqual(self.table, [[0, 0, 0], [0, 1, 0], [0, 2, 0]])
self.assertEqual(self.table[:], [[0, 0, 0], [0, 1, 0], [0, 2, 0]])
def test_get_cells(self):
self.table.insert_column(1, range(3), 'supp')
self.assertEqual(self.table[0, 1], 0)
self.assertEqual(self.table[1, 1], 1)
self.assertEqual(self.table[2, 1], 2)
self.assertEqual(self.table['row1', 'supp'], 0)
self.assertEqual(self.table['row2', 'supp'], 1)
self.assertEqual(self.table['row3', 'supp'], 2)
self.assertRaises(KeyError, self.table.__getitem__, ('row1', 'foo'))
self.assertRaises(KeyError, self.table.__getitem__, ('foo', 'bar'))
def test_shape(self):
"""tests table shape"""
self.assertEqual(self.table.shape, (3, 2))
self.table.insert_column(1, range(3), 'supp')
self.assertEqual(self.table.shape, (3, 3))
def test_set_column(self):
"""Tests that table.set_column() works fine.
"""
self.table.set_column(0, range(3))
self.assertEqual(self.table[0, 0], 0)
self.assertEqual(self.table[1, 0], 1)
self.assertEqual(self.table[2, 0], 2)
def test_set_column_by_id(self):
"""Tests that table.set_column_by_id() works fine.
"""
self.table.set_column_by_id('col1', range(3))
self.assertEqual(self.table[0, 0], 0)
self.assertEqual(self.table[1, 0], 1)
self.assertEqual(self.table[2, 0], 2)
self.assertRaises(KeyError, self.table.set_column_by_id, 'col123', range(3))
def test_cells_ids(self):
"""tests that we can access cells by giving row/col ids"""
self.assertRaises(KeyError, self.table.set_cell_by_ids, 'row12', 'col1', 12)
self.assertRaises(KeyError, self.table.set_cell_by_ids, 'row1', 'col12', 12)
self.assertEqual(self.table[0, 0], 0)
self.table.set_cell_by_ids('row1', 'col1', 'DATA')
self.assertEqual(self.table[0, 0], 'DATA')
self.assertRaises(KeyError, self.table.set_row_by_id, 'row12', [])
self.table.set_row_by_id('row1', ['1.0', '1.1'])
self.assertEqual(self.table[0, 0], '1.0')
def test_insert_row(self):
"""tests a row insertion"""
tmp_data = ['tmp1', 'tmp2']
self.table.insert_row(1, tmp_data, 'tmprow')
self.assertEqual(self.table[1], tmp_data)
self.assertEqual(self.table['tmprow'], tmp_data)
self.table.delete_row_by_id('tmprow')
self.assertRaises(KeyError, self.table.delete_row_by_id, 'tmprow')
self.assertEqual(self.table[1], [0, 0])
self.assertRaises(KeyError, self.table.__getitem__, 'tmprow')
def test_get_column(self):
"""Tests that table.get_column() works fine.
"""
self.table.set_cell(0, 1, 12)
self.table.set_cell(2, 1, 13)
self.assertEqual(self.table[:, 1], [12, 0, 13])
self.assertEqual(self.table[:, 'col2'], [12, 0, 13])
def test_get_columns(self):
"""Tests if table.get_columns() works fine.
"""
self.table.set_cell(0, 1, 12)
self.table.set_cell(2, 1, 13)
self.assertEqual(self.table.get_columns(), [[0, 0, 0], [12, 0, 13]])
def test_insert_column(self):
"""Tests that table.insert_column() works fine.
"""
self.table.insert_column(1, range(3), "inserted_column")
self.assertEqual(self.table[:, 1], [0, 1, 2])
self.assertEqual(self.table.col_names,
['col1', 'inserted_column', 'col2'])
def test_delete_column(self):
"""Tests that table.delete_column() works fine.
"""
self.table.delete_column(1)
self.assertEqual(self.table.col_names, ['col1'])
self.assertEqual(self.table[:, 0], [0, 0, 0])
self.assertRaises(KeyError, self.table.delete_column_by_id, 'col2')
self.table.delete_column_by_id('col1')
self.assertEqual(self.table.col_names, [])
def test_transpose(self):
"""Tests that table.transpose() works fine.
"""
self.table.append_column(range(5, 8), 'col3')
ttable = self.table.transpose()
self.assertEqual(ttable.row_names, ['col1', 'col2', 'col3'])
self.assertEqual(ttable.col_names, ['row1', 'row2', 'row3'])
self.assertEqual(ttable.data, [[0, 0, 0], [0, 0, 0], [5, 6, 7]])
def test_sort_table(self):
"""Tests the table sort by column
"""
self.table.set_column(0, [3, 1, 2])
self.table.set_column(1, [1, 2, 3])
self.table.sort_by_column_index(0)
self.assertEqual(self.table.row_names, ['row2', 'row3', 'row1'])
self.assertEqual(self.table.data, [[1, 2], [2, 3], [3, 1]])
self.table.sort_by_column_index(1, 'desc')
self.assertEqual(self.table.row_names, ['row3', 'row2', 'row1'])
self.assertEqual(self.table.data, [[2, 3], [1, 2], [3, 1]])
def test_sort_by_id(self):
"""tests sort_by_column_id()"""
self.table.set_column_by_id('col1', [3, 1, 2])
self.table.set_column_by_id('col2', [1, 2, 3])
self.table.sort_by_column_id('col1')
self.assertRaises(KeyError, self.table.sort_by_column_id, 'col123')
self.assertEqual(self.table.row_names, ['row2', 'row3', 'row1'])
self.assertEqual(self.table.data, [[1, 2], [2, 3], [3, 1]])
self.table.sort_by_column_id('col2', 'desc')
self.assertEqual(self.table.row_names, ['row3', 'row2', 'row1'])
self.assertEqual(self.table.data, [[2, 3], [1, 2], [3, 1]])
def test_pprint(self):
"""only tests pprint doesn't raise an exception"""
self.table.pprint()
str(self.table)
class GroupByTC(TestCase):
"""specific test suite for groupby()"""
def setUp(self):
t = Table()
t.create_columns(['date', 'res', 'task', 'usage'])
t.append_row(['date1', 'ing1', 'task1', 0.3])
t.append_row(['date1', 'ing2', 'task2', 0.3])
t.append_row(['date2', 'ing3', 'task3', 0.3])
t.append_row(['date3', 'ing4', 'task2', 0.3])
t.append_row(['date1', 'ing1', 'task3', 0.3])
t.append_row(['date3', 'ing1', 'task3', 0.3])
self.table = t
def test_single_groupby(self):
"""tests groupby() on several columns"""
grouped = self.table.groupby('date')
self.assertEqual(len(grouped), 3)
self.assertEqual(len(grouped['date1']), 3)
self.assertEqual(len(grouped['date2']), 1)
self.assertEqual(len(grouped['date3']), 2)
self.assertEqual(grouped['date1'], [
('date1', 'ing1', 'task1', 0.3),
('date1', 'ing2', 'task2', 0.3),
('date1', 'ing1', 'task3', 0.3),
])
self.assertEqual(grouped['date2'], [('date2', 'ing3', 'task3', 0.3)])
self.assertEqual(grouped['date3'], [
('date3', 'ing4', 'task2', 0.3),
('date3', 'ing1', 'task3', 0.3),
])
def test_multiple_groupby(self):
"""tests groupby() on several columns"""
grouped = self.table.groupby('date', 'task')
self.assertEqual(len(grouped), 3)
self.assertEqual(len(grouped['date1']), 3)
self.assertEqual(len(grouped['date2']), 1)
self.assertEqual(len(grouped['date3']), 2)
self.assertEqual(grouped['date1']['task1'], [('date1', 'ing1', 'task1', 0.3)])
self.assertEqual(grouped['date2']['task3'], [('date2', 'ing3', 'task3', 0.3)])
self.assertEqual(grouped['date3']['task2'], [('date3', 'ing4', 'task2', 0.3)])
date3 = grouped['date3']
self.assertRaises(KeyError, date3.__getitem__, 'task1')
def test_select(self):
"""tests Table.select() method"""
rows = self.table.select('date', 'date1')
self.assertEqual(rows, [
('date1', 'ing1', 'task1', 0.3),
('date1', 'ing2', 'task2', 0.3),
('date1', 'ing1', 'task3', 0.3),
])
class TableStyleSheetTC(TestCase):
"""The Stylesheet test case
"""
def setUp(self):
"""Builds a simple table to test the stylesheet
"""
self.table = Table()
self.table.create_row('row1')
self.table.create_columns(['a', 'b', 'c'])
self.stylesheet = TableStyleSheet()
# We don't want anything to be printed
self.stdout_backup = sys.stdout
sys.stdout = StringIO()
def tearDown(self):
sys.stdout = self.stdout_backup
def test_add_rule(self):
"""Tests that the regex pattern works as expected.
"""
rule = '0_2 = sqrt(0_0**2 + 0_1**2)'
self.stylesheet.add_rule(rule)
self.table.set_row(0, [3, 4, 0])
self.table.apply_stylesheet(self.stylesheet)
self.assertEqual(self.table[0], [3, 4, 5])
self.assertEqual(len(self.stylesheet.rules), 1)
self.stylesheet.add_rule('some bad rule with bad syntax')
self.assertEqual(len(self.stylesheet.rules), 1, "Ill-formed rule mustn't be added")
self.assertEqual(len(self.stylesheet.instructions), 1, "Ill-formed rule mustn't be added")
def test_stylesheet_init(self):
"""tests Stylesheet.__init__"""
rule = '0_2 = 1'
sheet = TableStyleSheet([rule, 'bad rule'])
self.assertEqual(len(sheet.rules), 1, "Ill-formed rule mustn't be added")
self.assertEqual(len(sheet.instructions), 1, "Ill-formed rule mustn't be added")
def test_rowavg_rule(self):
"""Tests that add_rowavg_rule works as expected
"""
self.table.set_row(0, [10, 20, 0])
self.stylesheet.add_rowavg_rule((0, 2), 0, 0, 1)
self.table.apply_stylesheet(self.stylesheet)
val = self.table[0, 2]
self.assertEqual(int(val), 15)
def test_rowsum_rule(self):
"""Tests that add_rowsum_rule works as expected
"""
self.table.set_row(0, [10, 20, 0])
self.stylesheet.add_rowsum_rule((0, 2), 0, 0, 1)
self.table.apply_stylesheet(self.stylesheet)
val = self.table[0, 2]
self.assertEqual(val, 30)
def test_colavg_rule(self):
"""Tests that add_colavg_rule works as expected
"""
self.table.set_row(0, [10, 20, 0])
self.table.append_row([12, 8, 3], 'row2')
self.table.create_row('row3')
self.stylesheet.add_colavg_rule((2, 0), 0, 0, 1)
self.table.apply_stylesheet(self.stylesheet)
val = self.table[2, 0]
self.assertEqual(int(val), 11)
def test_colsum_rule(self):
"""Tests that add_colsum_rule works as expected
"""
self.table.set_row(0, [10, 20, 0])
self.table.append_row([12, 8, 3], 'row2')
self.table.create_row('row3')
self.stylesheet.add_colsum_rule((2, 0), 0, 0, 1)
self.table.apply_stylesheet(self.stylesheet)
val = self.table[2, 0]
self.assertEqual(val, 22)
class TableStyleTC(TestCase):
"""Test suite for TableSuite"""
def setUp(self):
self.table = Table()
self.table.create_rows(['row1', 'row2', 'row3'])
self.table.create_columns(['col1', 'col2'])
self.style = TableStyle(self.table)
self._tested_attrs = (('size', '1*'),
('alignment', 'right'),
('unit', ''))
def test_getset(self):
"""tests style's get and set methods"""
for attrname, default_value in self._tested_attrs:
getter = getattr(self.style, 'get_%s' % attrname)
setter = getattr(self.style, 'set_%s' % attrname)
self.assertRaises(KeyError, getter, 'badcol')
self.assertEqual(getter('col1'), default_value)
setter('FOO', 'col1')
self.assertEqual(getter('col1'), 'FOO')
def test_getset_index(self):
"""tests style's get and set by index methods"""
for attrname, default_value in self._tested_attrs:
getter = getattr(self.style, 'get_%s' % attrname)
setter = getattr(self.style, 'set_%s' % attrname)
igetter = getattr(self.style, 'get_%s_by_index' % attrname)
isetter = getattr(self.style, 'set_%s_by_index' % attrname)
self.assertEqual(getter('__row_column__'), default_value)
isetter('FOO', 0)
self.assertEqual(getter('__row_column__'), 'FOO')
self.assertEqual(igetter(0), 'FOO')
self.assertEqual(getter('col1'), default_value)
isetter('FOO', 1)
self.assertEqual(getter('col1'), 'FOO')
self.assertEqual(igetter(1), 'FOO')
class RendererTC(TestCase):
"""Test suite for DocbookRenderer"""
def setUp(self):
self.renderer = DocbookRenderer(alignment = True)
self.table = Table()
self.table.create_rows(['row1', 'row2', 'row3'])
self.table.create_columns(['col1', 'col2'])
self.style = TableStyle(self.table)
self.base_renderer = TableCellRenderer()
def test_cell_content(self):
"""test how alignment is rendered"""
entry_xml = self.renderer._render_cell_content('data', self.style, 1)
self.assertEqual(entry_xml, "<entry align='right'>data</entry>\n")
self.style.set_alignment_by_index('left', 1)
entry_xml = self.renderer._render_cell_content('data', self.style, 1)
self.assertEqual(entry_xml, "<entry align='left'>data</entry>\n")
def test_default_content_rendering(self):
"""tests that default rendering just prints the cell's content"""
rendered_cell = self.base_renderer._render_cell_content('data', self.style, 1)
self.assertEqual(rendered_cell, "data")
def test_replacement_char(self):
"""tests that 0 is replaced when asked for"""
cell_content = self.base_renderer._make_cell_content(0, self.style, 1)
self.assertEqual(cell_content, 0)
self.base_renderer.properties['skip_zero'] = '---'
cell_content = self.base_renderer._make_cell_content(0, self.style, 1)
self.assertEqual(cell_content, '---')
def test_unit(self):
"""tests if units are added"""
self.base_renderer.properties['units'] = True
self.style.set_unit_by_index('EUR', 1)
cell_content = self.base_renderer._make_cell_content(12, self.style, 1)
self.assertEqual(cell_content, '12 EUR')
class DocbookTableWriterTC(TestCase):
"""TestCase for table's writer"""
def setUp(self):
self.stream = StringIO()
self.table = Table()
self.table.create_rows(['row1', 'row2', 'row3'])
self.table.create_columns(['col1', 'col2'])
self.writer = DocbookTableWriter(self.stream, self.table, None)
self.writer.set_renderer(DocbookRenderer())
def test_write_table(self):
"""make sure write_table() doesn't raise any exception"""
self.writer.write_table()
def test_abstract_writer(self):
"""tests that Abstract Writers can't be used !"""
writer = TableWriter(self.stream, self.table, None)
self.assertRaises(NotImplementedError, writer.write_table)
if __name__ == '__main__':
unittest_main()
|
from pprint import pprint
from functools import partial
from riko import get_path
from riko.bado import coroutine
from riko.collections import SyncPipe, AsyncPipe
BR = {'find': '<br>'}
DEF_CUR_CODE = 'USD'
odesk_conf = {'url': get_path('odesk.json'), 'path': 'items'}
guru_conf = {'url': get_path('guru.json'), 'path': 'items'}
elance_conf = {'url': get_path('elance.json'), 'path': 'items'}
freelancer_conf = {'url': get_path('freelancer.json'), 'path': 'items'}
def make_regex(field, match, replace, default=None):
result = {
'field': field, 'match': match, 'replace': replace, 'default': default}
return result
def make_simplemath(other, op):
return {'other': {'subkey': other, 'type': 'number'}, 'op': op}
def add_source(source):
subelement_conf = {'path': 'k:source.content.1', 'token_key': None}
sourced = (source
.urlparse(field='link', assign='k:source')
.subelement(conf=subelement_conf, emit=False, assign='k:source'))
return sourced
def add_id(source, rule, field='link'):
make_id_part = [{'subkey': 'k:source'}, {'value': '-'}, {'subkey': 'id'}]
ideed = (source
.strfind(conf={'rule': rule}, field=field, assign='id')
.strconcat(conf={'part': make_id_part}, assign='id'))
return ideed
def add_posted(source, rule='', field='summary'):
if rule:
conf = {'rule': rule}
source = source.strfind(conf=conf, field=field, assign='k:posted')
else:
rule = {'field': 'updated', 'newval': 'k:posted'}
source = source.rename(conf={'rule': rule})
return source
def add_tags(source, rule, field='summary', assign='k:tags'):
tokenizer_conf = {'dedupe': True, 'sort': True}
no_tags = {'field': assign}
tag_strreplace_rule = [
{'find': ' ', 'replace': ','},
{'find': '>', 'replace': ','},
{'find': '&', 'replace': '&'},
{'find': 'Other -', 'replace': ''},
# {'find': '-', 'replace': ''},
]
tagged = (source
.strfind(conf={'rule': rule}, field=field, assign=assign)
.strreplace(
conf={'rule': tag_strreplace_rule}, field=assign,
assign=assign, skip_if=no_tags)
.strtransform(
conf={'rule': {'transform': 'lower'}}, field=assign,
assign=assign, skip_if=no_tags)
.tokenizer(
conf=tokenizer_conf, field=assign, assign=assign, skip_if=no_tags)
)
return tagged
def add_budget(source, budget_text, fixed_text='', hourly_text='', double=True):
codes = '$£€₹'
no_raw_budget = {'field': 'k:budget_raw'}
has_code = {'field': 'k:cur_code', 'include': True}
is_def_cur = {'field': 'k:cur_code', 'text': DEF_CUR_CODE}
not_def_cur = {'field': 'k:cur_code', 'text': DEF_CUR_CODE, 'include': True}
isnt_fixed = {'field': 'summary', 'text': fixed_text, 'include': True}
isnt_hourly = {'field': 'summary', 'text': hourly_text, 'include': True}
no_symbol = {
'field': 'k:budget_raw', 'text': codes, 'op': 'intersection',
'include': True
}
code_or_no_raw_budget = [has_code, no_raw_budget]
def_cur_or_no_raw_budget = [is_def_cur, no_raw_budget]
not_def_cur_or_no_raw_budget = [not_def_cur, no_raw_budget]
first_num_rule = {'find': r'\d+', 'location': 'at'}
last_num_rule = {'find': r'\d+', 'location': 'at', 'param': 'last'}
cur_rule = {'find': r'\b[A-Z]{3}\b', 'location': 'at'}
sym_rule = {'find': '[%s]' % codes, 'location': 'at'}
# make_regex('k:budget_raw', r'[(),.\s]', ''),
invalid_budgets = [
{'find': 'Less than', 'replace': '0-'},
{'find': 'Under', 'replace': '0-'},
{'find': 'Upto', 'replace': '0-'},
{'find': 'or less', 'replace': '-0'},
{'find': 'k', 'replace': '000'},
{'find': 'Not Sure', 'replace': ''},
{'find': 'Not sure', 'replace': ''},
{'find': '(', 'replace': ''},
{'find': ')', 'replace': ''},
{'find': '.', 'replace': ''},
{'find': ',', 'replace': ''},
{'find': ' ', 'replace': ''},
]
cur_strreplace_rule = [
{'find': '$', 'replace': 'USD'},
{'find': '£', 'replace': 'GBP'},
{'find': '€', 'replace': 'EUR'},
{'find': '₹', 'replace': 'INR'},
]
converted_budget_part = [
{'subkey': 'k:budget_w_sym'},
{'value': ' ('},
{'subkey': 'k:budget_converted_w_sym'},
{'value': ')'}
]
def_full_budget_part = {'subkey': 'k:budget_w_sym'}
hourly_budget_part = [{'subkey': 'k:budget_full'}, {'value': ' / hr'}]
exchangerate_conf = {'url': get_path('quote.json')}
native_currencyformat_conf = {'currency': {'subkey': 'k:cur_code'}}
def_currencyformat_conf = {'currency': DEF_CUR_CODE}
ave_budget_conf = make_simplemath('k:budget_raw2_num', 'mean')
convert_budget_conf = make_simplemath('k:rate', 'multiply')
if fixed_text:
source = source.strconcat(
conf={'part': {'value': 'fixed'}}, assign='k:job_type',
skip_if=isnt_fixed)
if hourly_text:
source = source.strconcat(
conf={'part': {'value': 'hourly'}}, assign='k:job_type',
skip_if=isnt_hourly)
source = (source
.refind(
conf={'rule': cur_rule}, field='k:budget_raw',
assign='k:cur_code', skip_if=no_raw_budget)
.strreplace(
conf={'rule': invalid_budgets}, field='k:budget_raw',
assign='k:budget_raw', skip_if=no_raw_budget))
if double:
source = (source
.refind(
conf={'rule': first_num_rule}, field='k:budget_raw',
assign='k:budget_raw_num', skip_if=no_raw_budget)
.refind(
conf={'rule': last_num_rule}, field='k:budget_raw',
assign='k:budget_raw2_num', skip_if=no_raw_budget)
.simplemath(
conf=ave_budget_conf, field='k:budget_raw_num',
assign='k:budget', skip_if=no_raw_budget)
)
else:
source = source.refind(
conf={'rule': first_num_rule}, field='k:budget_raw',
assign='k:budget', skip_if=no_raw_budget)
source = (source
.refind(
conf={'rule': sym_rule}, field='k:budget_raw',
assign='k:budget_raw_sym', skip_if=no_symbol)
.strreplace(
conf={'rule': cur_strreplace_rule}, field='k:budget_raw_sym',
assign='k:cur_code', skip_if=code_or_no_raw_budget)
.currencyformat(
conf=native_currencyformat_conf, field='k:budget',
assign='k:budget_w_sym', skip_if=no_raw_budget)
.exchangerate(
conf=exchangerate_conf, field='k:cur_code', assign='k:rate',
skip_if=def_cur_or_no_raw_budget)
.simplemath(
conf=convert_budget_conf, field='k:budget',
assign='k:budget_converted', skip_if=def_cur_or_no_raw_budget)
.currencyformat(
conf=def_currencyformat_conf, field='k:budget_converted',
assign='k:budget_converted_w_sym', skip_if=def_cur_or_no_raw_budget)
.strconcat(
conf={'part': converted_budget_part}, assign='k:budget_full',
skip_if=def_cur_or_no_raw_budget)
.strconcat(
conf={'part': def_full_budget_part}, assign='k:budget_full',
skip_if=not_def_cur_or_no_raw_budget)
)
if hourly_text:
source = (source
.strconcat(
conf={'part': hourly_budget_part}, assign='k:budget_full',
skip_if=isnt_hourly)
)
return source
def clean_locations(source):
no_client_loc = {'field': 'k:client_location'}
no_work_loc = {'field': 'k:work_location'}
rule = {'find': ', ', 'replace': ''}
cleaned = (source
.strreplace(
conf={'rule': rule}, field='k:client_location',
assign='k:client_location', skip_if=no_client_loc)
.strreplace(
conf={'rule': rule}, field='k:work_location',
assign='k:work_location', skip_if=no_work_loc)
)
return cleaned
def remove_cruft(source):
remove_rule = [
{'field': 'author'},
{'field': 'content'},
{'field': 'dc:creator'},
{'field': 'links'},
{'field': 'pubDate'},
{'field': 'summary'},
{'field': 'updated'},
{'field': 'updated_parsed'},
{'field': 'y:id'},
{'field': 'y:title'},
{'field': 'y:published'},
{'field': 'k:budget_raw'},
{'field': 'k:budget_raw2_num'},
{'field': 'k:budget_raw_num'},
{'field': 'k:budget_raw_sym'},
]
return source.rename(conf={'rule': remove_rule})
def parse_odesk(source, stream=True):
budget_text = 'Budget</b>:'
no_budget = {'field': 'summary', 'text': budget_text, 'include': True}
raw_budget_rule = [{'find': budget_text, 'location': 'after'}, BR]
title_rule = {'find': '- oDesk'}
find_id_rule = [{'find': 'ID</b>:', 'location': 'after'}, BR]
categ_rule = [{'find': 'Category</b>:', 'location': 'after'}, BR]
skills_rule = [{'find': 'Skills</b>:', 'location': 'after'}, BR]
client_loc_rule = [{'find': 'Country</b>:', 'location': 'after'}, BR]
posted_rule = [{'find': 'Posted On</b>:', 'location': 'after'}, BR]
desc_rule = [{'find': '<p>', 'location': 'after'}, {'find': '<br><br><b>'}]
source = (source
.strfind(conf={'rule': title_rule}, field='title', assign='title')
.strfind(
conf={'rule': client_loc_rule}, field='summary',
assign='k:client_location')
.strfind(
conf={'rule': desc_rule}, field='summary', assign='description')
.strfind(
conf={'rule': raw_budget_rule}, field='summary',
assign='k:budget_raw', skip_if=no_budget)
)
source = add_source(source)
source = add_posted(source, posted_rule)
source = add_id(source, find_id_rule, field='summary')
source = add_budget(source, budget_text, double=False)
source = add_tags(source, skills_rule)
source = add_tags(source, categ_rule, assign='k:categories')
source = clean_locations(source)
source = remove_cruft(source)
return source.output if stream else source
def parse_guru(source, stream=True):
budget_text = 'budget:</b>'
fixed_text = 'Fixed Price budget:</b>'
hourly_text = 'Hourly budget:</b>'
no_budget = {'field': 'summary', 'text': budget_text, 'include': True}
isnt_hourly = {'field': 'summary', 'text': hourly_text, 'include': True}
raw_budget_rule = [{'find': budget_text, 'location': 'after'}, BR]
after_hourly = {'rule': {'find': 'Rate:', 'location': 'after'}}
find_id_rule = {'find': '/', 'location': 'after', 'param': 'last'}
categ_rule = [{'find': 'Category:</b>', 'location': 'after'}, BR]
skills_rule = [{'find': 'Required skills:</b>', 'location': 'after'}, BR]
job_loc_conf = {
'rule': [{'find': 'Freelancer Location:</b>', 'location': 'after'}, BR]}
desc_conf = {
'rule': [{'find': 'Description:</b>', 'location': 'after'}, BR]}
source = (source
.strfind(conf=job_loc_conf, field='summary', assign='k:work_location')
.strfind(conf=desc_conf, field='summary', assign='description')
.strfind(
conf={'rule': raw_budget_rule}, field='summary',
assign='k:budget_raw', skip_if=no_budget)
.strfind(
conf=after_hourly, field='k:budget_raw', assign='k:budget_raw',
skip_if=isnt_hourly)
)
kwargs = {'fixed_text': fixed_text, 'hourly_text': hourly_text}
source = add_source(source)
source = add_posted(source)
source = add_id(source, find_id_rule)
source = add_budget(source, budget_text, **kwargs)
source = add_tags(source, skills_rule)
source = add_tags(source, categ_rule, assign='k:categories')
source = clean_locations(source)
source = remove_cruft(source)
return source.output if stream else source
def parse_elance(source, stream=True):
budget_text = 'Budget:</b>'
fixed_text = 'Budget:</b> Fixed Price'
hourly_text = 'Budget:</b> Hourly'
no_job_loc = {'field': 'summary', 'text': 'Preferred Job Location', 'include': True}
no_client_loc = {'field': 'summary', 'text': 'Client Location', 'include': True}
no_budget = {'field': 'summary', 'text': budget_text, 'include': True}
isnt_fixed = {'field': 'summary', 'text': fixed_text, 'include': True}
isnt_hourly = {'field': 'summary', 'text': hourly_text, 'include': True}
raw_budget_rule = [{'find': budget_text, 'location': 'after'}, BR]
after_hourly = {'rule': {'find': 'Hourly', 'location': 'after'}}
after_fixed = {'rule': {'find': 'Fixed Price', 'location': 'after'}}
title_conf = {'rule': {'find': '| Elance Job'}}
find_id_rule = [
{'find': '/', 'param': 'last'},
{'find': '/', 'location': 'after', 'param': 'last'}]
categ_rule = [{'find': 'Category:</b>', 'location': 'after'}, BR]
skills_rule = [{'find': 'Desired Skills:</b>', 'location': 'after'}, BR]
job_loc_conf = {
'rule': [
{'find': 'Preferred Job Location:</b>', 'location': 'after'}, BR]}
client_loc_conf = {
'rule': [{'find': 'Client Location:</b>', 'location': 'after'}, BR]}
desc_rule = [
{'find': '<p>', 'location': 'after'}, {'find': '...\n <br>'}]
proposals_conf = {
'rule': [
{'find': 'Proposals:</b>', 'location': 'after'}, {'find': '('}]}
jobs_posted_conf = {
'rule': [
{'find': 'Client:</b> Client (', 'location': 'after'},
{'find': 'jobs posted'}]}
jobs_awarded_conf = {
'rule': [
{'find': 'jobs posted,', 'location': 'after'},
{'find': 'awarded'}]}
purchased_conf = {
'rule': [
{'find': 'total purchased'},
{'find': ',', 'location': 'after', 'param': 'last'}]}
ends_conf = {
'rule': [
{'find': 'Time Left:</b>', 'location': 'after'},
{'find': ') <br>'},
{'find': 'h (Ends', 'location': 'after'}]}
source = (source
.strfind(conf=title_conf, field='title', assign='title')
.strfind(conf=proposals_conf, field='summary', assign='k:submissions')
.strfind(conf=jobs_posted_conf, field='summary', assign='k:num_jobs')
.strfind(
conf=jobs_awarded_conf, field='summary', assign='k:per_awarded')
.strfind(conf=purchased_conf, field='summary', assign='k:tot_purchased')
.strfind(conf=ends_conf, field='summary', assign='k:due')
.strfind(
conf=job_loc_conf, field='summary', assign='k:work_location',
skip_if=no_job_loc)
.strfind(
conf=client_loc_conf, field='summary', assign='k:client_location',
skip_if=no_client_loc)
.strfind(
conf={'rule': desc_rule}, field='summary', assign='description')
.strfind(
conf={'rule': raw_budget_rule}, field='summary',
assign='k:budget_raw', skip_if=no_budget)
.strfind(
conf=after_hourly, field='k:budget_raw', assign='k:budget_raw',
skip_if=isnt_hourly)
.strfind(
conf=after_fixed, field='k:budget_raw', assign='k:budget_raw',
skip_if=isnt_fixed)
)
kwargs = {'fixed_text': fixed_text, 'hourly_text': hourly_text}
source = add_source(source)
source = add_posted(source)
source = add_id(source, find_id_rule)
source = add_budget(source, budget_text, **kwargs)
source = add_tags(source, skills_rule)
source = add_tags(source, categ_rule, assign='k:categories')
source = clean_locations(source)
source = remove_cruft(source)
return source.output if stream else source
def parse_freelancer(source, stream=True):
budget_text = '(Budget:'
no_budget = {'field': 'summary', 'text': budget_text, 'include': True}
raw_budget_rule = [
{'find': budget_text, 'location': 'after'}, {'find': ','}]
title_rule = {'find': ' by '}
skills_rule = [{'find': ', Jobs:', 'location': 'after'}, {'find': ')</p>'}]
desc_rule = [{'find': '<p>', 'location': 'after'}, {'find': '(Budget:'}]
source = (source
.strfind(conf={'rule': title_rule}, field='title', assign='title')
.strfind(
conf={'rule': desc_rule}, field='summary', assign='description')
.strfind(
conf={'rule': raw_budget_rule}, field='summary',
assign='k:budget_raw', skip_if=no_budget)
)
source = add_source(source)
source = add_posted(source)
source = add_budget(source, budget_text)
source = add_tags(source, skills_rule)
source = clean_locations(source)
source = remove_cruft(source)
return source.output if stream else source
def pipe(test=False, parallel=False, threads=False):
kwargs = {'parallel': parallel, 'threads': threads}
Pipe = partial(SyncPipe, 'fetchdata', **kwargs)
odesk_source = Pipe(conf=odesk_conf)
guru_source = Pipe(conf=guru_conf)
freelancer_source = Pipe(conf=freelancer_conf)
elance_source = Pipe(conf=elance_conf)
# odesk_source = SyncPipe('fetchdata', conf=odesk_conf, **kwargs)
# guru_source = SyncPipe('fetchdata', conf=guru_conf, **kwargs)
# elance_source = SyncPipe('fetchdata', conf=elance_conf, **kwargs)
# freelancer_source = SyncPipe('fetchdata', conf=freelancer_conf, **kwargs)
odesk_pipe = parse_odesk(odesk_source, stream=False)
guru_stream = parse_guru(guru_source)
elance_stream = parse_elance(elance_source)
freelancer_stream = parse_freelancer(freelancer_source)
others = [guru_stream, freelancer_stream, elance_stream]
stream = odesk_pipe.union(others=others).list
pprint(stream[-1])
return stream
@coroutine
def async_pipe(reactor, test=None):
Pipe = partial(AsyncPipe, 'fetchdata')
odesk_source = Pipe(conf=odesk_conf)
guru_source = Pipe(conf=guru_conf)
freelancer_source = Pipe(conf=freelancer_conf)
elance_source = Pipe(conf=elance_conf)
odesk_pipe = yield parse_odesk(odesk_source, stream=False)
guru_stream = yield parse_guru(guru_source)
elance_stream = yield parse_elance(elance_source)
freelancer_stream = yield parse_freelancer(freelancer_source)
others = [guru_stream, freelancer_stream, elance_stream]
stream = odesk_pipe.union(others=others).list
pprint(stream[-1])
|
from typing import List
import numpy as np
import tensornetwork as tn
def add_fft(inputs: List[tn.Edge],) -> List[tn.Edge]:
"""Creates output node axes corresponding to the Fourier transform of inputs.
Uses Cooley-Tukey's FFT algorithm. All axes are expected to have length 2. The
input axes must be (and output axes will be) binary.
Args:
inputs: The node axes to act upon.
Returns:
A list of `Edges` containing the result.
"""
if not all(e.is_dangling() for e in inputs):
raise ValueError("Inputs must be dangling edges.")
hadamard = np.array([[1, 1], [1, -1]], dtype=np.complex128) / np.sqrt(2)
def cz(p: int) -> np.ndarray:
result = np.eye(4, dtype=np.complex128)
result[3, 3] = np.exp(-1j * np.pi / 2**p)
return result.reshape((2,) * 4)
def inline_stitch(targets: List[int], tensor: np.ndarray, name: str):
"""Applies an operation to the targeted axis indices."""
op_node = tn.Node(tensor, name)
for k, t in enumerate(targets):
incoming_state = state[t]
receiving_port = op_node[k]
output_port = op_node[k + len(targets)]
incoming_state ^ receiving_port
state[t] = output_port
state = list(inputs)
# Mix "n twiddle.
n = len(state)
for i in range(n):
for j in range(1, i + 1):
inline_stitch([i - j, i], cz(j), "TWIDDLE_{}_{}".format(j, i))
inline_stitch([i], hadamard, "MIX_{}".format(i))
# FFT reverses bit order.
return state[::-1]
|
import os
import os.path
import contextlib
from typing import Sequence
from PyQt5.QtCore import pyqtSlot, pyqtSignal, QObject
from qutebrowser.utils import log, utils, qtutils
from qutebrowser.config import config
class BaseLineParser(QObject):
"""A LineParser without any real data.
Attributes:
_configdir: Directory to read the config from, or None.
_configfile: The config file path.
_fname: Filename of the config.
_binary: Whether to open the file in binary mode.
Signals:
changed: Emitted when the history was changed.
"""
changed = pyqtSignal()
def __init__(self, configdir, fname, *, binary=False, parent=None):
"""Constructor.
Args:
configdir: Directory to read the config from.
fname: Filename of the config file.
binary: Whether to open the file in binary mode.
_opened: Whether the underlying file is open
"""
super().__init__(parent)
self._configdir = configdir
self._configfile = os.path.join(self._configdir, fname)
self._fname = fname
self._binary = binary
self._opened = False
def __repr__(self):
return utils.get_repr(self, constructor=True,
configdir=self._configdir, fname=self._fname,
binary=self._binary)
def _prepare_save(self):
"""Prepare saving of the file.
Return:
True if the file should be saved, False otherwise.
"""
os.makedirs(self._configdir, 0o755, exist_ok=True)
return True
def _after_save(self):
"""Log a message after saving is done."""
log.destroy.debug("Saved to {}".format(self._configfile))
@contextlib.contextmanager
def _open(self, mode):
"""Open self._configfile for reading.
Args:
mode: The mode to use ('a'/'r'/'w')
Raises:
IOError: if the file is already open
Yields:
a file object for the config file
"""
assert self._configfile is not None
if self._opened:
raise IOError("Refusing to double-open LineParser.")
self._opened = True
try:
if self._binary:
with open(self._configfile, mode + 'b') as f:
yield f
else:
with open(self._configfile, mode, encoding='utf-8') as f:
yield f
finally:
self._opened = False
def _write(self, fp, data):
"""Write the data to a file.
Args:
fp: A file object to write the data to.
data: The data to write.
"""
if not data:
return
if self._binary:
fp.write(b'\n'.join(data))
fp.write(b'\n')
else:
fp.write('\n'.join(data))
fp.write('\n')
def save(self):
"""Save the history to disk."""
raise NotImplementedError
def clear(self):
"""Clear the contents of the file."""
raise NotImplementedError
class LineParser(BaseLineParser):
"""Parser for configuration files which are simply line-based.
Attributes:
data: A list of lines.
"""
def __init__(self, configdir, fname, *, binary=False, parent=None):
"""Constructor.
Args:
configdir: Directory to read the config from.
fname: Filename of the config file.
binary: Whether to open the file in binary mode.
"""
super().__init__(configdir, fname, binary=binary, parent=parent)
if not os.path.isfile(self._configfile):
self.data: Sequence[str] = []
else:
log.init.debug("Reading {}".format(self._configfile))
self._read()
def __iter__(self):
return iter(self.data)
def __getitem__(self, key):
return self.data[key]
def _read(self):
"""Read the data from self._configfile."""
with self._open('r') as f:
if self._binary:
self.data = [line.rstrip(b'\n') for line in f]
else:
self.data = [line.rstrip('\n') for line in f]
def save(self):
"""Save the config file."""
if self._opened:
raise IOError("Refusing to double-open LineParser.")
do_save = self._prepare_save()
if not do_save:
return
self._opened = True
try:
assert self._configfile is not None
with qtutils.savefile_open(self._configfile, self._binary) as f:
self._write(f, self.data)
finally:
self._opened = False
self._after_save()
def clear(self):
self.data = []
self.save()
class LimitLineParser(LineParser):
"""A LineParser with a limited count of lines.
Attributes:
_limit: The config option used to limit the maximum number of lines.
"""
def __init__(self, configdir, fname, *, limit, binary=False, parent=None):
"""Constructor.
Args:
configdir: Directory to read the config from, or None.
fname: Filename of the config file.
limit: Config option which contains a limit.
binary: Whether to open the file in binary mode.
"""
super().__init__(configdir, fname, binary=binary, parent=parent)
self._limit = limit
if limit is not None and configdir is not None:
config.instance.changed.connect(self._cleanup_file)
def __repr__(self):
return utils.get_repr(self, constructor=True,
configdir=self._configdir, fname=self._fname,
limit=self._limit, binary=self._binary)
@pyqtSlot(str)
def _cleanup_file(self, option):
"""Delete the file if the limit was changed to 0."""
assert self._configfile is not None
if option != self._limit:
return
value = config.instance.get(option)
if value == 0:
if os.path.exists(self._configfile):
os.remove(self._configfile)
def save(self):
"""Save the config file."""
limit = config.instance.get(self._limit)
if limit == 0:
return
do_save = self._prepare_save()
if not do_save:
return
assert self._configfile is not None
with qtutils.savefile_open(self._configfile, self._binary) as f:
self._write(f, self.data[-limit:])
self._after_save()
|
import numpy as np
import os.path as op
from numpy.testing import assert_array_almost_equal, assert_allclose
from scipy.signal import welch
import pytest
from mne import pick_types, Epochs, read_events
from mne.io import RawArray, read_raw_fif
from mne.utils import catch_logging
from mne.time_frequency import psd_welch, psd_multitaper, psd_array_welch
base_dir = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data')
raw_fname = op.join(base_dir, 'test_raw.fif')
event_fname = op.join(base_dir, 'test-eve.fif')
def test_psd_nan():
"""Test handling of NaN in psd_array_welch."""
n_samples, n_fft, n_overlap = 2048, 1024, 512
x = np.random.RandomState(0).randn(1, n_samples)
psds, freqs = psd_array_welch(x[:, :n_fft + n_overlap], float(n_fft),
n_fft=n_fft, n_overlap=n_overlap)
x[:, n_fft + n_overlap:] = np.nan # what Raw.get_data() will give us
psds_2, freqs_2 = psd_array_welch(x, float(n_fft), n_fft=n_fft,
n_overlap=n_overlap)
assert_allclose(freqs, freqs_2)
assert_allclose(psds, psds_2)
# 1-d
psds_2, freqs_2 = psd_array_welch(
x[0], float(n_fft), n_fft=n_fft, n_overlap=n_overlap)
assert_allclose(freqs, freqs_2)
assert_allclose(psds[0], psds_2)
# defaults
with catch_logging() as log:
psd_array_welch(x, float(n_fft), verbose='debug')
log = log.getvalue()
assert 'using 256-point FFT on 256 samples with 0 overlap' in log
assert 'hamming window' in log
def test_psd():
"""Tests the welch and multitaper PSD."""
raw = read_raw_fif(raw_fname)
picks_psd = [0, 1]
# Populate raw with sinusoids
rng = np.random.RandomState(40)
data = 0.1 * rng.randn(len(raw.ch_names), raw.n_times)
freqs_sig = [8., 50.]
for ix, freq in zip(picks_psd, freqs_sig):
data[ix, :] += 2 * np.sin(np.pi * 2. * freq * raw.times)
first_samp = raw._first_samps[0]
raw = RawArray(data, raw.info)
tmin, tmax = 0, 20 # use a few seconds of data
fmin, fmax = 2, 70 # look at frequencies between 2 and 70Hz
n_fft = 128
# -- Raw --
kws_psd = dict(tmin=tmin, tmax=tmax, fmin=fmin, fmax=fmax,
picks=picks_psd) # Common to all
kws_welch = dict(n_fft=n_fft)
kws_mt = dict(low_bias=True)
funcs = [(psd_welch, kws_welch),
(psd_multitaper, kws_mt)]
for func, kws in funcs:
kws = kws.copy()
kws.update(kws_psd)
kws.update(verbose='debug')
if func is psd_welch:
kws.update(window='hann')
with catch_logging() as log:
psds, freqs = func(raw, proj=False, **kws)
log = log.getvalue()
if func is psd_welch:
assert f'{n_fft}-point FFT on {n_fft} samples with 0 overl' in log
assert 'hann window' in log
psds_proj, freqs_proj = func(raw, proj=True, **kws)
assert psds.shape == (len(kws['picks']), len(freqs))
assert np.sum(freqs < 0) == 0
assert np.sum(psds < 0) == 0
# Is power found where it should be
ixs_max = np.argmax(psds, axis=1)
for ixmax, ifreq in zip(ixs_max, freqs_sig):
# Find nearest frequency to the "true" freq
ixtrue = np.argmin(np.abs(ifreq - freqs))
assert (np.abs(ixmax - ixtrue) < 2)
# Make sure the projection doesn't change channels it shouldn't
assert_array_almost_equal(psds, psds_proj)
# Array input shouldn't work
pytest.raises(ValueError, func, raw[:3, :20][0])
# test n_per_seg in psd_welch (and padding)
psds1, freqs1 = psd_welch(raw, proj=False, n_fft=128, n_per_seg=128,
**kws_psd)
psds2, freqs2 = psd_welch(raw, proj=False, n_fft=256, n_per_seg=128,
**kws_psd)
assert (len(freqs1) == np.floor(len(freqs2) / 2.))
assert (psds1.shape[-1] == np.floor(psds2.shape[-1] / 2.))
kws_psd.update(dict(n_fft=tmax * 1.1 * raw.info['sfreq']))
with pytest.raises(ValueError, match='n_fft is not allowed to be > n_tim'):
psd_welch(raw, proj=False, n_per_seg=None,
**kws_psd)
kws_psd.update(dict(n_fft=128, n_per_seg=64, n_overlap=90))
with pytest.raises(ValueError, match='n_overlap cannot be greater'):
psd_welch(raw, proj=False, **kws_psd)
with pytest.raises(ValueError, match='No frequencies found'):
psd_array_welch(np.zeros((1, 1000)), 1000., fmin=10, fmax=1)
# -- Epochs/Evoked --
events = read_events(event_fname)
events[:, 0] -= first_samp
tmin, tmax, event_id = -0.5, 0.5, 1
epochs = Epochs(raw, events[:10], event_id, tmin, tmax, picks=picks_psd,
proj=False, preload=True, baseline=None)
evoked = epochs.average()
tmin_full, tmax_full = -1, 1
epochs_full = Epochs(raw, events[:10], event_id, tmin_full, tmax_full,
picks=picks_psd, proj=False, preload=True,
baseline=None)
kws_psd = dict(tmin=tmin, tmax=tmax, fmin=fmin, fmax=fmax,
picks=picks_psd) # Common to all
funcs = [(psd_welch, kws_welch),
(psd_multitaper, kws_mt)]
for func, kws in funcs:
kws = kws.copy()
kws.update(kws_psd)
psds, freqs = func(
epochs[:1], proj=False, **kws)
psds_proj, freqs_proj = func(
epochs[:1], proj=True, **kws)
psds_f, freqs_f = func(
epochs_full[:1], proj=False, **kws)
# this one will fail if you add for example 0.1 to tmin
assert_array_almost_equal(psds, psds_f, 27)
# Make sure the projection doesn't change channels it shouldn't
assert_array_almost_equal(psds, psds_proj, 27)
# Is power found where it should be
ixs_max = np.argmax(psds.mean(0), axis=1)
for ixmax, ifreq in zip(ixs_max, freqs_sig):
# Find nearest frequency to the "true" freq
ixtrue = np.argmin(np.abs(ifreq - freqs))
assert (np.abs(ixmax - ixtrue) < 2)
assert (psds.shape == (1, len(kws['picks']), len(freqs)))
assert (np.sum(freqs < 0) == 0)
assert (np.sum(psds < 0) == 0)
# Array input shouldn't work
pytest.raises(ValueError, func, epochs.get_data())
# Testing evoked (doesn't work w/ compute_epochs_psd)
psds_ev, freqs_ev = func(
evoked, proj=False, **kws)
psds_ev_proj, freqs_ev_proj = func(
evoked, proj=True, **kws)
# Is power found where it should be
ixs_max = np.argmax(psds_ev, axis=1)
for ixmax, ifreq in zip(ixs_max, freqs_sig):
# Find nearest frequency to the "true" freq
ixtrue = np.argmin(np.abs(ifreq - freqs_ev))
assert (np.abs(ixmax - ixtrue) < 2)
# Make sure the projection doesn't change channels it shouldn't
assert_array_almost_equal(psds_ev, psds_ev_proj, 27)
assert (psds_ev.shape == (len(kws['picks']), len(freqs)))
@pytest.mark.parametrize('kind', ('raw', 'epochs', 'evoked'))
def test_psd_welch_average_kwarg(kind):
"""Test `average` kwarg of psd_welch()."""
raw = read_raw_fif(raw_fname)
picks_psd = [0, 1]
# Populate raw with sinusoids
rng = np.random.RandomState(40)
data = 0.1 * rng.randn(len(raw.ch_names), raw.n_times)
freqs_sig = [8., 50.]
for ix, freq in zip(picks_psd, freqs_sig):
data[ix, :] += 2 * np.sin(np.pi * 2. * freq * raw.times)
first_samp = raw._first_samps[0]
raw = RawArray(data, raw.info)
tmin, tmax = -0.5, 0.5
fmin, fmax = 0, np.inf
n_fft = 256
n_per_seg = 128
n_overlap = 0
event_id = 2
events = read_events(event_fname)
events[:, 0] -= first_samp
kws = dict(fmin=fmin, fmax=fmax, tmin=tmin, tmax=tmax, n_fft=n_fft,
n_per_seg=n_per_seg, n_overlap=n_overlap, picks=picks_psd)
if kind == 'raw':
inst = raw
elif kind == 'epochs':
inst = Epochs(raw, events[:10], event_id, tmin, tmax, picks=picks_psd,
proj=False, preload=True, baseline=None)
elif kind == 'evoked':
inst = Epochs(raw, events[:10], event_id, tmin, tmax, picks=picks_psd,
proj=False, preload=True, baseline=None).average()
else:
raise ValueError('Unknown parametrization passed to test, check test '
'for typos.')
psds_mean, freqs_mean = psd_welch(inst=inst, average='mean', **kws)
psds_median, freqs_median = psd_welch(inst=inst, average='median', **kws)
psds_unagg, freqs_unagg = psd_welch(inst=inst, average=None, **kws)
# Frequencies should be equal across all "average" types, as we feed in
# the exact same data.
assert_allclose(freqs_mean, freqs_median)
assert_allclose(freqs_mean, freqs_unagg)
# For `average=None`, the last dimension contains the un-aggregated
# segments.
assert psds_mean.shape == psds_median.shape
assert psds_mean.shape == psds_unagg.shape[:-1]
assert_allclose(psds_mean, psds_unagg.mean(axis=-1))
# Compare with manual median calculation
assert_allclose(psds_median, np.median(psds_unagg, axis=-1))
@pytest.mark.slowtest
def test_compares_psd():
"""Test PSD estimation on raw for plt.psd and scipy.signal.welch."""
raw = read_raw_fif(raw_fname)
exclude = raw.info['bads'] + ['MEG 2443', 'EEG 053'] # bads + 2 more
# picks MEG gradiometers
picks = pick_types(raw.info, meg='grad', eeg=False, stim=False,
exclude=exclude)[:2]
tmin, tmax = 0, 10 # use the first 60s of data
fmin, fmax = 2, 70 # look at frequencies between 5 and 70Hz
n_fft = 2048
# Compute psds with the new implementation using Welch
psds_welch, freqs_welch = psd_welch(raw, tmin=tmin, tmax=tmax, fmin=fmin,
fmax=fmax, proj=False, picks=picks,
n_fft=n_fft, n_jobs=1)
# Compute psds with plt.psd
start, stop = raw.time_as_index([tmin, tmax])
data, times = raw[picks, start:(stop + 1)]
out = [welch(d, fs=raw.info['sfreq'], nperseg=n_fft, noverlap=0)
for d in data]
freqs_mpl = out[0][0]
psds_mpl = np.array([o[1] for o in out])
mask = (freqs_mpl >= fmin) & (freqs_mpl <= fmax)
freqs_mpl = freqs_mpl[mask]
psds_mpl = psds_mpl[:, mask]
assert_array_almost_equal(psds_welch, psds_mpl)
assert_array_almost_equal(freqs_welch, freqs_mpl)
assert (psds_welch.shape == (len(picks), len(freqs_welch)))
assert (psds_mpl.shape == (len(picks), len(freqs_mpl)))
assert (np.sum(freqs_welch < 0) == 0)
assert (np.sum(freqs_mpl < 0) == 0)
assert (np.sum(psds_welch < 0) == 0)
assert (np.sum(psds_mpl < 0) == 0)
|
import diamond.collector
import json
import subprocess
class OpenvzCollector(diamond.collector.Collector):
_FIELDS = (
'laverage',
'uptime'
)
def get_default_config_help(self):
config_help = super(OpenvzCollector, self).get_default_config_help()
config_help.update({
'bin': 'The path to the vzlist',
'keyname': 'key name for hostname metric value (hostname)',
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(OpenvzCollector, self).get_default_config()
config.update({
'path': 'openvz',
'bin': '/usr/sbin/vzlist',
'keyname': 'hostname'
})
return config
def collect(self):
output = self.poll()
instances_infos = json.loads(output)
if not instances_infos:
return
for instance_values in instances_infos:
serverkey = instance_values[self.config['keyname']].replace(
'.', '_'
)
for keyvalue in instance_values:
sfield = ['held', 'maxheld', 'usage']
# Get Array values
if isinstance(instance_values[keyvalue], dict):
for subkey in instance_values[keyvalue]:
stat_name = '%s.%s.%s' % (
serverkey,
keyvalue,
subkey
)
if subkey in sfield:
try:
metric_value = float(
instance_values[keyvalue][subkey]
)
except ValueError:
continue
self.publish(
stat_name,
metric_value,
precision=5
)
else:
# Get field value
if keyvalue in self._FIELDS:
# Get Load average values
if keyvalue == 'laverage':
submetric_name = ['01', '05', '15']
for idx in range(0, 3):
try:
metric_value = float(
instance_values[keyvalue][idx]
)
except ValueError:
continue
stat_name = '%s.%s.%s' % (
serverkey,
keyvalue,
submetric_name[idx]
)
self.publish(
stat_name,
metric_value,
precision=5
)
else:
# Field value
try:
metric_value = float(
instance_values[keyvalue]
)
except ValueError:
continue
stat_name = '%s.%s' % (serverkey, keyvalue)
self.publish(stat_name, metric_value, precision=5)
def poll(self):
try:
command = [self.config['bin'], '-j']
output = subprocess.Popen(command,
stdout=subprocess.PIPE).communicate()[0]
except OSError:
output = ""
return output
|
from typing import Dict, List
import voluptuous as vol
from homeassistant.components.device_automation import toggle_entity
from homeassistant.const import CONF_DOMAIN
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.condition import ConditionCheckerType
from homeassistant.helpers.typing import ConfigType
from . import DOMAIN
CONDITION_SCHEMA = toggle_entity.CONDITION_SCHEMA.extend(
{vol.Required(CONF_DOMAIN): DOMAIN}
)
@callback
def async_condition_from_config(
config: ConfigType, config_validation: bool
) -> ConditionCheckerType:
"""Evaluate state based on configuration."""
if config_validation:
config = CONDITION_SCHEMA(config)
return toggle_entity.async_condition_from_config(config)
async def async_get_conditions(
hass: HomeAssistant, device_id: str
) -> List[Dict[str, str]]:
"""List device conditions."""
return await toggle_entity.async_get_conditions(hass, device_id, DOMAIN)
async def async_get_condition_capabilities(hass: HomeAssistant, config: dict) -> dict:
"""List condition capabilities."""
return await toggle_entity.async_get_condition_capabilities(hass, config)
|
import argparse
from django.core.management.base import CommandError
from weblate.memory.models import Memory, MemoryImportError
from weblate.utils.management.base import BaseCommand
class Command(BaseCommand):
"""Command for importing translation memory."""
help = "imports translation memory"
def add_arguments(self, parser):
super().add_arguments(parser)
parser.add_argument(
"--language-map",
help="Map language codes in the TMX to Weblate, for example en_US:en",
)
parser.add_argument(
"file", type=argparse.FileType("rb"), help="File to import (TMX or JSON)"
)
def handle(self, *args, **options):
"""Translation memory import."""
langmap = None
if options["language_map"]:
langmap = dict(z.split(":", 1) for z in options["language_map"].split(","))
try:
Memory.objects.import_file(None, options["file"], langmap)
except MemoryImportError as error:
raise CommandError(f"Import failed: {error}")
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.