text
stringlengths 213
32.3k
|
---|
import io
import os
import lxml
import pytest
from nikola import __main__
from .helper import cd, patch_config
from .test_demo_build import prepare_demo_site
from .test_empty_build import ( # NOQA
test_archive_exists,
test_avoid_double_slash_in_rss,
test_check_files,
test_check_links,
)
def test_relative_links(build, output_dir):
"""Check that the links in output/index.html are correct"""
test_path = os.path.join(output_dir, "index.html")
with io.open(test_path, "rb") as inf:
data = inf.read()
assert not any(
url.startswith("..")
for _, _, url, _ in lxml.html.iterlinks(data)
if url.endswith("css")
)
def test_index_in_sitemap(build, output_dir):
"""Test that the correct path is in sitemap, and not the wrong one."""
sitemap_path = os.path.join(output_dir, "sitemap.xml")
with io.open(sitemap_path, "r", encoding="utf8") as inf:
sitemap_data = inf.read()
assert "<loc>https://example.com/</loc>" not in sitemap_data
assert "<loc>https://example.com/foo/bar/</loc>" in sitemap_data
@pytest.fixture(scope="module")
def build(target_dir):
"""Fill the site with demo content and build it."""
prepare_demo_site(target_dir)
# Set the SITE_URL to have a path with subfolder
patch_config(
target_dir,
(
'SITE_URL = "https://example.com/"',
'SITE_URL = "https://example.com/foo/bar/"',
),
)
with cd(target_dir):
__main__.main(["build"])
|
from homeassistant.components.roku.const import DOMAIN
from homeassistant.config_entries import SOURCE_SSDP, SOURCE_USER
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_SOURCE
from homeassistant.data_entry_flow import (
RESULT_TYPE_ABORT,
RESULT_TYPE_CREATE_ENTRY,
RESULT_TYPE_FORM,
)
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.setup import async_setup_component
from tests.async_mock import patch
from tests.components.roku import (
HOST,
MOCK_SSDP_DISCOVERY_INFO,
UPNP_FRIENDLY_NAME,
mock_connection,
setup_integration,
)
from tests.test_util.aiohttp import AiohttpClientMocker
async def test_duplicate_error(
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test that errors are shown when duplicates are added."""
await setup_integration(hass, aioclient_mock, skip_entry_setup=True)
mock_connection(aioclient_mock)
user_input = {CONF_HOST: HOST}
result = await hass.config_entries.flow.async_init(
DOMAIN, context={CONF_SOURCE: SOURCE_USER}, data=user_input
)
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
user_input = {CONF_HOST: HOST}
result = await hass.config_entries.flow.async_init(
DOMAIN, context={CONF_SOURCE: SOURCE_USER}, data=user_input
)
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
discovery_info = MOCK_SSDP_DISCOVERY_INFO.copy()
result = await hass.config_entries.flow.async_init(
DOMAIN, context={CONF_SOURCE: SOURCE_SSDP}, data=discovery_info
)
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_form(
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test the user step."""
await async_setup_component(hass, "persistent_notification", {})
mock_connection(aioclient_mock)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={CONF_SOURCE: SOURCE_USER}
)
assert result["type"] == RESULT_TYPE_FORM
assert result["errors"] == {}
user_input = {CONF_HOST: HOST}
with patch(
"homeassistant.components.roku.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.roku.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_configure(
flow_id=result["flow_id"], user_input=user_input
)
await hass.async_block_till_done()
assert result["type"] == RESULT_TYPE_CREATE_ENTRY
assert result["title"] == UPNP_FRIENDLY_NAME
assert result["data"]
assert result["data"][CONF_HOST] == HOST
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_cannot_connect(
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test we handle cannot connect roku error."""
mock_connection(aioclient_mock, error=True)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={CONF_SOURCE: SOURCE_USER}
)
result = await hass.config_entries.flow.async_configure(
flow_id=result["flow_id"], user_input={CONF_HOST: HOST}
)
assert result["type"] == RESULT_TYPE_FORM
assert result["errors"] == {"base": "cannot_connect"}
async def test_form_unknown_error(hass: HomeAssistantType) -> None:
"""Test we handle unknown error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={CONF_SOURCE: SOURCE_USER}
)
user_input = {CONF_HOST: HOST}
with patch(
"homeassistant.components.roku.config_flow.Roku.update",
side_effect=Exception,
) as mock_validate_input:
result = await hass.config_entries.flow.async_configure(
flow_id=result["flow_id"], user_input=user_input
)
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "unknown"
await hass.async_block_till_done()
assert len(mock_validate_input.mock_calls) == 1
async def test_ssdp_cannot_connect(
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test we abort SSDP flow on connection error."""
mock_connection(aioclient_mock, error=True)
discovery_info = MOCK_SSDP_DISCOVERY_INFO.copy()
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={CONF_SOURCE: SOURCE_SSDP},
data=discovery_info,
)
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "cannot_connect"
async def test_ssdp_unknown_error(
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test we abort SSDP flow on unknown error."""
discovery_info = MOCK_SSDP_DISCOVERY_INFO.copy()
with patch(
"homeassistant.components.roku.config_flow.Roku.update",
side_effect=Exception,
):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={CONF_SOURCE: SOURCE_SSDP},
data=discovery_info,
)
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "unknown"
async def test_ssdp_discovery(
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test the SSDP discovery flow."""
mock_connection(aioclient_mock)
discovery_info = MOCK_SSDP_DISCOVERY_INFO.copy()
result = await hass.config_entries.flow.async_init(
DOMAIN, context={CONF_SOURCE: SOURCE_SSDP}, data=discovery_info
)
assert result["type"] == RESULT_TYPE_FORM
assert result["step_id"] == "ssdp_confirm"
assert result["description_placeholders"] == {CONF_NAME: UPNP_FRIENDLY_NAME}
with patch(
"homeassistant.components.roku.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.roku.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_configure(
flow_id=result["flow_id"], user_input={}
)
await hass.async_block_till_done()
assert result["type"] == RESULT_TYPE_CREATE_ENTRY
assert result["title"] == UPNP_FRIENDLY_NAME
assert result["data"]
assert result["data"][CONF_HOST] == HOST
assert result["data"][CONF_NAME] == UPNP_FRIENDLY_NAME
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
|
from marshmallow import fields, post_dump
from lemur.schemas import PluginInputSchema, PluginOutputSchema
from lemur.common.schema import LemurInputSchema, LemurOutputSchema
class SourceInputSchema(LemurInputSchema):
id = fields.Integer()
label = fields.String(required=True)
description = fields.String()
plugin = fields.Nested(PluginInputSchema)
active = fields.Boolean()
class SourceOutputSchema(LemurOutputSchema):
id = fields.Integer()
label = fields.String()
description = fields.String()
plugin = fields.Nested(PluginOutputSchema)
options = fields.List(fields.Dict())
fields.Boolean()
@post_dump
def fill_object(self, data):
if data:
data["plugin"]["pluginOptions"] = data["options"]
return data
source_input_schema = SourceInputSchema()
sources_output_schema = SourceOutputSchema(many=True)
source_output_schema = SourceOutputSchema()
|
from kalliope.core.Models.settings.SettingsEntry import SettingsEntry
class Stt(SettingsEntry):
"""
This Class is representing a Speech To Text (STT) element with name and parameters
.. note:: must be defined in the settings.yml
"""
def __init__(self, name=None, parameters=None):
super(Stt, self).__init__(name=name)
self.parameters = parameters
def __str__(self):
return str(self.serialize())
def serialize(self):
return {
'name': self.name,
'parameters': self.parameters
}
def __eq__(self, other):
"""
This is used to compare 2 objects
:param other: the Stt to compare
:return: True if both stts are similar, False otherwise
"""
return self.__dict__ == other.__dict__
|
import logging
import W800rf32 as w800
import voluptuous as vol
from homeassistant.const import (
CONF_DEVICE,
EVENT_HOMEASSISTANT_START,
EVENT_HOMEASSISTANT_STOP,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import dispatcher_send
DATA_W800RF32 = "data_w800rf32"
DOMAIN = "w800rf32"
W800RF32_DEVICE = "w800rf32_{}"
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = vol.Schema(
{DOMAIN: vol.Schema({vol.Required(CONF_DEVICE): cv.string})}, extra=vol.ALLOW_EXTRA
)
def setup(hass, config):
"""Set up the w800rf32 component."""
# Declare the Handle event
def handle_receive(event):
"""Handle received messages from w800rf32 gateway."""
# Log event
if not event.device:
return
_LOGGER.debug("Receive W800rf32 event in handle_receive")
# Get device_type from device_id in hass.data
device_id = event.device.lower()
signal = W800RF32_DEVICE.format(device_id)
dispatcher_send(hass, signal, event)
# device --> /dev/ttyUSB0
device = config[DOMAIN][CONF_DEVICE]
w800_object = w800.Connect(device, None)
def _start_w800rf32(event):
w800_object.event_callback = handle_receive
hass.bus.listen_once(EVENT_HOMEASSISTANT_START, _start_w800rf32)
def _shutdown_w800rf32(event):
"""Close connection with w800rf32."""
w800_object.close_connection()
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, _shutdown_w800rf32)
hass.data[DATA_W800RF32] = w800_object
return True
|
import mock
from docker_registry.lib import cache
from tests.base import TestCase
class TestCache(TestCase):
def setUp(self):
self.cache = mock.MagicMock(
host='localhost', port=1234, db=0, password='pass')
def tearDown(self):
cache.redis_conn = None
cache.cache_prefix = None
@mock.patch.object(cache, 'logger')
def test_enable_redis_cache(self, logger):
self.assertEqual(cache.redis_conn, None)
self.assertEqual(cache.cache_prefix, None)
cache.enable_redis_cache(None, None)
self.assertEqual(logger.warn.call_count, 1)
cache.enable_redis_cache(self.cache, None)
self.assertTrue(cache.redis_conn is not None)
self.assertEqual(type(cache.redis_conn), cache.redis.StrictRedis)
self.assertTrue(cache.cache_prefix is not None)
self.assertEqual(cache.cache_prefix, 'cache_path:/')
cache.enable_redis_cache(self.cache, 'test')
self.assertEqual(cache.cache_prefix, 'cache_path:test')
@mock.patch.object(cache, 'logger')
@mock.patch.object(cache.lru, 'init')
def test_enable_redis_lru(self, lru_init, logger):
cache.enable_redis_lru(None, None)
self.assertEqual(logger.warn.call_count, 1)
cache.enable_redis_lru(self.cache, None)
self.assertEqual(logger.info.call_count, 2)
lru_init.assert_called_once_with(
host=self.cache.host, port=self.cache.port, db=self.cache.db,
password=self.cache.password, path='/')
lru_init.reset_mock()
path = 'test'
cache.enable_redis_lru(self.cache, path)
lru_init.assert_called_once_with(
host=self.cache.host, port=self.cache.port, db=self.cache.db,
password=self.cache.password, path=path)
|
from homeassistant.components.image_processing import ImageProcessingEntity
async def async_setup_platform(
hass, config, async_add_entities_callback, discovery_info=None
):
"""Set up the test image_processing platform."""
async_add_entities_callback([TestImageProcessing("camera.demo_camera", "Test")])
class TestImageProcessing(ImageProcessingEntity):
"""Test image processing entity."""
def __init__(self, camera_entity, name):
"""Initialize test image processing."""
self._name = name
self._camera = camera_entity
self._count = 0
self._image = ""
@property
def should_poll(self):
"""Return True if entity has to be polled for state."""
return False
@property
def camera_entity(self):
"""Return camera entity id from process pictures."""
return self._camera
@property
def name(self):
"""Return the name of the entity."""
return self._name
@property
def state(self):
"""Return the state of the entity."""
return self._count
@property
def device_state_attributes(self):
"""Return device specific state attributes."""
return {"image": self._image}
def process_image(self, image):
"""Process image."""
self._image = image
self._count += 1
|
import os
from copy import copy
from nikola.plugin_categories import Task
from nikola import utils
def update_deps(post, lang, task):
"""Update file dependencies as they might have been updated during compilation.
This is done for example by the ReST page compiler, which writes its
dependencies into a .dep file. This file is read and incorporated when calling
post.fragment_deps(), and only available /after/ compiling the fragment.
"""
task.file_dep.update([p for p in post.fragment_deps(lang) if not p.startswith("####MAGIC####")])
class RenderPosts(Task):
"""Build HTML fragments from metadata and text."""
name = "render_posts"
def gen_tasks(self):
"""Build HTML fragments from metadata and text."""
self.site.scan_posts()
kw = {
"translations": self.site.config["TRANSLATIONS"],
"timeline": self.site.timeline,
"default_lang": self.site.config["DEFAULT_LANG"],
"show_untranslated_posts": self.site.config['SHOW_UNTRANSLATED_POSTS'],
"demote_headers": self.site.config['DEMOTE_HEADERS'],
}
self.tl_changed = False
yield self.group_task()
def tl_ch():
self.tl_changed = True
yield {
'basename': self.name,
'name': 'timeline_changes',
'actions': [tl_ch],
'uptodate': [utils.config_changed({1: kw['timeline']})],
}
for lang in kw["translations"]:
deps_dict = copy(kw)
deps_dict.pop('timeline')
for post in kw['timeline']:
if not post.is_translation_available(lang) and not self.site.config['SHOW_UNTRANSLATED_POSTS']:
continue
# Extra config dependencies picked from config
for p in post.fragment_deps(lang):
if p.startswith('####MAGIC####CONFIG:'):
k = p.split('####MAGIC####CONFIG:', 1)[-1]
deps_dict[k] = self.site.config.get(k)
dest = post.translated_base_path(lang)
file_dep = [p for p in post.fragment_deps(lang) if not p.startswith("####MAGIC####")]
extra_targets = post.compiler.get_extra_targets(post, lang, dest)
task = {
'basename': self.name,
'name': dest,
'file_dep': file_dep,
'targets': [dest] + extra_targets,
'actions': [(post.compile, (lang, )),
(update_deps, (post, lang, )),
],
'clean': True,
'uptodate': [
utils.config_changed(deps_dict, 'nikola.plugins.task.posts'),
lambda p=post, l=lang: self.dependence_on_timeline(p, l)
] + post.fragment_deps_uptodate(lang),
'task_dep': ['render_posts:timeline_changes']
}
# Apply filters specified in the metadata
ff = [x.strip() for x in post.meta('filters', lang).split(',')]
flist = []
for i, f in enumerate(ff):
if not f:
continue
_f = self.site.filters.get(f)
if _f is not None: # A registered filter
flist.append(_f)
else:
flist.append(f)
yield utils.apply_filters(task, {os.path.splitext(dest)[-1]: flist})
def dependence_on_timeline(self, post, lang):
"""Check if a post depends on the timeline."""
if "####MAGIC####TIMELINE" not in post.fragment_deps(lang):
return True # No dependency on timeline
elif self.tl_changed:
return False # Timeline changed
return True
|
import logging
from urllib.parse import ParseResult, urlparse
from requests.exceptions import HTTPError, Timeout
from sunwatcher.solarlog.solarlog import SolarLog
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_HOST, CONF_NAME
from homeassistant.core import HomeAssistant, callback
from homeassistant.util import slugify
from .const import DEFAULT_HOST, DEFAULT_NAME, DOMAIN
_LOGGER = logging.getLogger(__name__)
@callback
def solarlog_entries(hass: HomeAssistant):
"""Return the hosts already configured."""
return {
entry.data[CONF_HOST] for entry in hass.config_entries.async_entries(DOMAIN)
}
class SolarLogConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow for solarlog."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL
def __init__(self) -> None:
"""Initialize the config flow."""
self._errors = {}
def _host_in_configuration_exists(self, host) -> bool:
"""Return True if host exists in configuration."""
if host in solarlog_entries(self.hass):
return True
return False
async def _test_connection(self, host):
"""Check if we can connect to the Solar-Log device."""
try:
await self.hass.async_add_executor_job(SolarLog, host)
return True
except (OSError, HTTPError, Timeout):
self._errors[CONF_HOST] = "cannot_connect"
_LOGGER.error(
"Could not connect to Solar-Log device at %s, check host ip address",
host,
)
return False
async def async_step_user(self, user_input=None):
"""Step when user initializes a integration."""
self._errors = {}
if user_input is not None:
# set some defaults in case we need to return to the form
name = slugify(user_input.get(CONF_NAME, DEFAULT_NAME))
host_entry = user_input.get(CONF_HOST, DEFAULT_HOST)
url = urlparse(host_entry, "http")
netloc = url.netloc or url.path
path = url.path if url.netloc else ""
url = ParseResult("http", netloc, path, *url[3:])
host = url.geturl()
if self._host_in_configuration_exists(host):
self._errors[CONF_HOST] = "already_configured"
else:
if await self._test_connection(host):
return self.async_create_entry(title=name, data={CONF_HOST: host})
else:
user_input = {}
user_input[CONF_NAME] = DEFAULT_NAME
user_input[CONF_HOST] = DEFAULT_HOST
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(
CONF_NAME, default=user_input.get(CONF_NAME, DEFAULT_NAME)
): str,
vol.Required(
CONF_HOST, default=user_input.get(CONF_HOST, DEFAULT_HOST)
): str,
}
),
errors=self._errors,
)
async def async_step_import(self, user_input=None):
"""Import a config entry."""
host_entry = user_input.get(CONF_HOST, DEFAULT_HOST)
url = urlparse(host_entry, "http")
netloc = url.netloc or url.path
path = url.path if url.netloc else ""
url = ParseResult("http", netloc, path, *url[3:])
host = url.geturl()
if self._host_in_configuration_exists(host):
return self.async_abort(reason="already_configured")
return await self.async_step_user(user_input)
|
import voluptuous as vol
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import Config, HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from .account import StarlineAccount
from .const import (
CONF_SCAN_INTERVAL,
DEFAULT_SCAN_INTERVAL,
DOMAIN,
PLATFORMS,
SERVICE_SET_SCAN_INTERVAL,
SERVICE_UPDATE_STATE,
)
async def async_setup(hass: HomeAssistant, config: Config) -> bool:
"""Set up configured StarLine."""
return True
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
"""Set up the StarLine device from a config entry."""
account = StarlineAccount(hass, config_entry)
await account.update()
if not account.api.available:
raise ConfigEntryNotReady
if DOMAIN not in hass.data:
hass.data[DOMAIN] = {}
hass.data[DOMAIN][config_entry.entry_id] = account
device_registry = await hass.helpers.device_registry.async_get_registry()
for device in account.api.devices.values():
device_registry.async_get_or_create(
config_entry_id=config_entry.entry_id, **account.device_info(device)
)
for domain in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, domain)
)
async def async_set_scan_interval(call):
"""Service for set scan interval."""
options = dict(config_entry.options)
options[CONF_SCAN_INTERVAL] = call.data[CONF_SCAN_INTERVAL]
hass.config_entries.async_update_entry(entry=config_entry, options=options)
hass.services.async_register(DOMAIN, SERVICE_UPDATE_STATE, account.update)
hass.services.async_register(
DOMAIN,
SERVICE_SET_SCAN_INTERVAL,
async_set_scan_interval,
schema=vol.Schema(
{
vol.Required(CONF_SCAN_INTERVAL): vol.All(
vol.Coerce(int), vol.Range(min=10)
)
}
),
)
config_entry.add_update_listener(async_options_updated)
await async_options_updated(hass, config_entry)
return True
async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
"""Unload a config entry."""
for domain in PLATFORMS:
await hass.config_entries.async_forward_entry_unload(config_entry, domain)
account: StarlineAccount = hass.data[DOMAIN][config_entry.entry_id]
account.unload()
return True
async def async_options_updated(hass: HomeAssistant, config_entry: ConfigEntry) -> None:
"""Triggered by config entry options updates."""
account: StarlineAccount = hass.data[DOMAIN][config_entry.entry_id]
scan_interval = config_entry.options.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL)
account.set_update_interval(scan_interval)
|
from __future__ import absolute_import
import sys
import unittest
from .common_imports import HelperTestCase, etree
DOC_NAME = b'libxml2:xmlDoc'
DESTRUCTOR_NAME = b'destructor:xmlFreeDoc'
class ExternalDocumentTestCase(HelperTestCase):
def setUp(self):
try:
import ctypes
from ctypes import pythonapi
from ctypes.util import find_library
except ImportError:
raise unittest.SkipTest("ctypes support missing")
def wrap(func, restype, *argtypes):
func.restype = restype
func.argtypes = list(argtypes)
return func
self.get_capsule_name = wrap(pythonapi.PyCapsule_GetName,
ctypes.c_char_p, ctypes.py_object)
self.capsule_is_valid = wrap(pythonapi.PyCapsule_IsValid, ctypes.c_int,
ctypes.py_object, ctypes.c_char_p)
self.new_capsule = wrap(pythonapi.PyCapsule_New, ctypes.py_object,
ctypes.c_void_p, ctypes.c_char_p,
ctypes.c_void_p)
self.set_capsule_name = wrap(pythonapi.PyCapsule_SetName, ctypes.c_int,
ctypes.py_object, ctypes.c_char_p)
self.set_capsule_context = wrap(pythonapi.PyCapsule_SetContext,
ctypes.c_int, ctypes.py_object,
ctypes.c_char_p)
self.get_capsule_context = wrap(pythonapi.PyCapsule_GetContext,
ctypes.c_char_p, ctypes.py_object)
self.get_capsule_pointer = wrap(pythonapi.PyCapsule_GetPointer,
ctypes.c_void_p, ctypes.py_object,
ctypes.c_char_p)
self.set_capsule_pointer = wrap(pythonapi.PyCapsule_SetPointer,
ctypes.c_int, ctypes.py_object,
ctypes.c_void_p)
self.set_capsule_destructor = wrap(pythonapi.PyCapsule_SetDestructor,
ctypes.c_int, ctypes.py_object,
ctypes.c_void_p)
self.PyCapsule_Destructor = ctypes.CFUNCTYPE(None, ctypes.py_object)
libxml2 = ctypes.CDLL(find_library('xml2'))
self.create_doc = wrap(libxml2.xmlReadMemory, ctypes.c_void_p,
ctypes.c_char_p, ctypes.c_int, ctypes.c_char_p,
ctypes.c_char_p, ctypes.c_int)
self.free_doc = wrap(libxml2.xmlFreeDoc, None, ctypes.c_void_p)
def as_capsule(self, text, capsule_name=DOC_NAME):
if not isinstance(text, bytes):
text = text.encode('utf-8')
doc = self.create_doc(text, len(text), b'base.xml', b'utf-8', 0)
ans = self.new_capsule(doc, capsule_name, None)
self.set_capsule_context(ans, DESTRUCTOR_NAME)
return ans
def test_external_document_adoption(self):
xml = '<r a="1">t</r>'
self.assertRaises(TypeError, etree.adopt_external_document, None)
capsule = self.as_capsule(xml)
self.assertTrue(self.capsule_is_valid(capsule, DOC_NAME))
self.assertEqual(DOC_NAME, self.get_capsule_name(capsule))
# Create an lxml tree from the capsule (this is a move not a copy)
root = etree.adopt_external_document(capsule).getroot()
self.assertIsNone(self.get_capsule_name(capsule))
self.assertEqual(root.text, 't')
root.text = 'new text'
# Now reset the capsule so we can copy it
self.assertEqual(0, self.set_capsule_name(capsule, DOC_NAME))
self.assertEqual(0, self.set_capsule_context(capsule, b'invalid'))
# Create an lxml tree from the capsule (this is a copy not a move)
root2 = etree.adopt_external_document(capsule).getroot()
self.assertEqual(self.get_capsule_context(capsule), b'invalid')
# Check that the modification to the tree using the transferred
# document was successful
self.assertEqual(root.text, root2.text)
# Check that further modifications do not show up in the copy (they are
# disjoint)
root.text = 'other text'
self.assertNotEqual(root.text, root2.text)
# delete root and ensure root2 survives
del root
self.assertEqual(root2.text, 'new text')
def test_suite():
suite = unittest.TestSuite()
if sys.platform != 'win32':
suite.addTests([unittest.makeSuite(ExternalDocumentTestCase)])
return suite
if __name__ == '__main__':
print('to test use test.py %s' % __file__)
|
import os
import unittest
from perfkitbenchmarker.traces import mpstat
class MpstatTestCase(unittest.TestCase):
def setUp(self):
super(MpstatTestCase, self).setUp()
path = os.path.join(
os.path.dirname(__file__), '../data', 'mpstat_output.txt')
with open(path) as fp:
self.contents = fp.read()
def testMpstatParse(self):
metadata = {
'event': 'mpstat',
'sender': 'run',
}
samples = mpstat._MpstatResults(metadata, self.contents)
for sample in samples:
if sample.metric == 'mpstat_avg_intr':
self.assertEqual(4452.74, sample.value)
self.assertEqual('mpstat', sample.metadata['event'])
self.assertEqual('run', sample.metadata['sender'])
elif sample.metric == 'mpstat_avg_irq':
self.assertEqual(0.0, sample.value)
self.assertEqual('mpstat', sample.metadata['event'])
self.assertEqual('run', sample.metadata['sender'])
elif sample.metric == 'mpstat_avg_soft':
self.assertEqual(1.20, sample.value)
self.assertEqual('mpstat', sample.metadata['event'])
self.assertEqual('run', sample.metadata['sender'])
# spot test a couple of per cpu samples
elif (sample.metric == 'mpstat_intr' and
sample.metadata['mpstat_cpu_id'] == 2):
self.assertEqual(248.26, sample.value)
elif (sample.metric == 'mpstat_soft' and
sample.metadata['mpstat_cpu_id'] == 0):
self.assertEqual(11.21, sample.value)
if __name__ == '__main__':
unittest.main()
|
from datetime import timedelta
import logging
import requests
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_ATTRIBUTION,
CONF_API_KEY,
CONF_BASE,
CONF_NAME,
CONF_QUOTE,
HTTP_OK,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
_RESOURCE = "https://openexchangerates.org/api/latest.json"
ATTRIBUTION = "Data provided by openexchangerates.org"
DEFAULT_BASE = "USD"
DEFAULT_NAME = "Exchange Rate Sensor"
MIN_TIME_BETWEEN_UPDATES = timedelta(hours=2)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_QUOTE): cv.string,
vol.Optional(CONF_BASE, default=DEFAULT_BASE): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Open Exchange Rates sensor."""
name = config.get(CONF_NAME)
api_key = config.get(CONF_API_KEY)
base = config.get(CONF_BASE)
quote = config.get(CONF_QUOTE)
parameters = {"base": base, "app_id": api_key}
rest = OpenexchangeratesData(_RESOURCE, parameters, quote)
response = requests.get(_RESOURCE, params=parameters, timeout=10)
if response.status_code != HTTP_OK:
_LOGGER.error("Check your OpenExchangeRates API key")
return False
rest.update()
add_entities([OpenexchangeratesSensor(rest, name, quote)], True)
class OpenexchangeratesSensor(Entity):
"""Representation of an Open Exchange Rates sensor."""
def __init__(self, rest, name, quote):
"""Initialize the sensor."""
self.rest = rest
self._name = name
self._quote = quote
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def device_state_attributes(self):
"""Return other attributes of the sensor."""
attr = self.rest.data
attr[ATTR_ATTRIBUTION] = ATTRIBUTION
return attr
def update(self):
"""Update current conditions."""
self.rest.update()
value = self.rest.data
self._state = round(value[str(self._quote)], 4)
class OpenexchangeratesData:
"""Get data from Openexchangerates.org."""
def __init__(self, resource, parameters, quote):
"""Initialize the data object."""
self._resource = resource
self._parameters = parameters
self._quote = quote
self.data = None
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the latest data from openexchangerates.org."""
try:
result = requests.get(self._resource, params=self._parameters, timeout=10)
self.data = result.json()["rates"]
except requests.exceptions.HTTPError:
_LOGGER.error("Check the Openexchangerates API key")
self.data = None
return False
|
from django.db import migrations
def update_expired(apps, schema_editor):
Billing = apps.get_model("billing", "Billing")
Billing.objects.using(schema_editor.connection.alias).filter(state=2).update(
state=1
)
class Migration(migrations.Migration):
dependencies = [
("billing", "0003_auto_20201118_1217"),
]
operations = [
migrations.RunPython(update_expired, migrations.RunPython.noop, elidable=True),
]
|
import logging
import os
import socket
import unittest
import vcr
from vcr import config
from vcr.stubs import VCRHTTPSConnection
from pyVmomi import SoapAdapter
def tests_resource_path(local_path=''):
this_file = os.path.dirname(os.path.abspath(__file__))
return os.path.join(this_file, local_path)
# Fully qualified path to the fixtures directory underneath this module
fixtures_path = tests_resource_path('fixtures')
def monkey_patch_vcrpy():
# TODO (hartsock): This should be unnecessary. Remove after vcrpy updates.
vcr.stubs.VCRHTTPSConnection.is_verified = True
vcr.stubs.VCRFakeSocket = socket.socket
class VCRTestBase(unittest.TestCase):
my_vcr = config.VCR(
custom_patches=((SoapAdapter, '_HTTPSConnection', VCRHTTPSConnection),))
def setUp(self):
monkey_patch_vcrpy()
logging.basicConfig()
vcr_log = logging.getLogger('vcr')
vcr_log.setLevel(logging.WARNING)
|
import copy
from plexapi.exceptions import BadRequest, NotFound
from requests.exceptions import ConnectionError, RequestException
from homeassistant.components.media_player import DOMAIN as MP_DOMAIN
from homeassistant.components.media_player.const import (
ATTR_MEDIA_CONTENT_ID,
ATTR_MEDIA_CONTENT_TYPE,
MEDIA_TYPE_EPISODE,
MEDIA_TYPE_MOVIE,
MEDIA_TYPE_MUSIC,
MEDIA_TYPE_PLAYLIST,
MEDIA_TYPE_VIDEO,
SERVICE_PLAY_MEDIA,
)
from homeassistant.components.plex.const import (
CONF_IGNORE_NEW_SHARED_USERS,
CONF_IGNORE_PLEX_WEB_CLIENTS,
CONF_MONITORED_USERS,
CONF_SERVER,
DOMAIN,
SERVERS,
)
from homeassistant.const import ATTR_ENTITY_ID
from .const import DEFAULT_DATA, DEFAULT_OPTIONS
from .helpers import trigger_plex_update
from .mock_classes import (
MockGDM,
MockPlexAccount,
MockPlexAlbum,
MockPlexArtist,
MockPlexLibrary,
MockPlexLibrarySection,
MockPlexMediaItem,
MockPlexSeason,
MockPlexServer,
MockPlexShow,
)
from tests.async_mock import patch
async def test_new_users_available(hass, entry, mock_websocket, setup_plex_server):
"""Test setting up when new users available on Plex server."""
MONITORED_USERS = {"Owner": {"enabled": True}}
OPTIONS_WITH_USERS = copy.deepcopy(DEFAULT_OPTIONS)
OPTIONS_WITH_USERS[MP_DOMAIN][CONF_MONITORED_USERS] = MONITORED_USERS
entry.options = OPTIONS_WITH_USERS
mock_plex_server = await setup_plex_server(config_entry=entry)
server_id = mock_plex_server.machineIdentifier
trigger_plex_update(mock_websocket)
await hass.async_block_till_done()
monitored_users = hass.data[DOMAIN][SERVERS][server_id].option_monitored_users
ignored_users = [x for x in monitored_users if not monitored_users[x]["enabled"]]
assert len(monitored_users) == 1
assert len(ignored_users) == 0
sensor = hass.states.get("sensor.plex_plex_server_1")
assert sensor.state == str(len(mock_plex_server.accounts))
async def test_new_ignored_users_available(
hass, caplog, entry, mock_websocket, setup_plex_server
):
"""Test setting up when new users available on Plex server but are ignored."""
MONITORED_USERS = {"Owner": {"enabled": True}}
OPTIONS_WITH_USERS = copy.deepcopy(DEFAULT_OPTIONS)
OPTIONS_WITH_USERS[MP_DOMAIN][CONF_MONITORED_USERS] = MONITORED_USERS
OPTIONS_WITH_USERS[MP_DOMAIN][CONF_IGNORE_NEW_SHARED_USERS] = True
entry.options = OPTIONS_WITH_USERS
mock_plex_server = await setup_plex_server(config_entry=entry)
server_id = mock_plex_server.machineIdentifier
trigger_plex_update(mock_websocket)
await hass.async_block_till_done()
monitored_users = hass.data[DOMAIN][SERVERS][server_id].option_monitored_users
ignored_users = [x for x in mock_plex_server.accounts if x not in monitored_users]
assert len(monitored_users) == 1
assert len(ignored_users) == 2
for ignored_user in ignored_users:
ignored_client = [
x.players[0]
for x in mock_plex_server.sessions()
if x.usernames[0] == ignored_user
][0]
assert (
f"Ignoring {ignored_client.product} client owned by '{ignored_user}'"
in caplog.text
)
sensor = hass.states.get("sensor.plex_plex_server_1")
assert sensor.state == str(len(mock_plex_server.accounts))
async def test_network_error_during_refresh(
hass, caplog, mock_plex_server, mock_websocket
):
"""Test network failures during refreshes."""
server_id = mock_plex_server.machineIdentifier
loaded_server = hass.data[DOMAIN][SERVERS][server_id]
trigger_plex_update(mock_websocket)
await hass.async_block_till_done()
sensor = hass.states.get("sensor.plex_plex_server_1")
assert sensor.state == str(len(mock_plex_server.accounts))
with patch.object(mock_plex_server, "clients", side_effect=RequestException):
await loaded_server._async_update_platforms()
await hass.async_block_till_done()
assert (
f"Could not connect to Plex server: {DEFAULT_DATA[CONF_SERVER]}" in caplog.text
)
async def test_gdm_client_failure(hass, mock_websocket, setup_plex_server):
"""Test connection failure to a GDM discovered client."""
mock_plex_server = await setup_plex_server(disable_gdm=False)
with patch(
"homeassistant.components.plex.server.PlexClient", side_effect=ConnectionError
):
trigger_plex_update(mock_websocket)
await hass.async_block_till_done()
sensor = hass.states.get("sensor.plex_plex_server_1")
assert sensor.state == str(len(mock_plex_server.accounts))
with patch.object(mock_plex_server, "clients", side_effect=RequestException):
trigger_plex_update(mock_websocket)
await hass.async_block_till_done()
async def test_mark_sessions_idle(hass, mock_plex_server, mock_websocket):
"""Test marking media_players as idle when sessions end."""
server_id = mock_plex_server.machineIdentifier
loaded_server = hass.data[DOMAIN][SERVERS][server_id]
trigger_plex_update(mock_websocket)
await hass.async_block_till_done()
sensor = hass.states.get("sensor.plex_plex_server_1")
assert sensor.state == str(len(mock_plex_server.accounts))
mock_plex_server.clear_clients()
mock_plex_server.clear_sessions()
await loaded_server._async_update_platforms()
await hass.async_block_till_done()
sensor = hass.states.get("sensor.plex_plex_server_1")
assert sensor.state == "0"
async def test_ignore_plex_web_client(hass, entry, mock_websocket):
"""Test option to ignore Plex Web clients."""
OPTIONS = copy.deepcopy(DEFAULT_OPTIONS)
OPTIONS[MP_DOMAIN][CONF_IGNORE_PLEX_WEB_CLIENTS] = True
entry.options = OPTIONS
mock_plex_server = MockPlexServer(config_entry=entry)
with patch("plexapi.server.PlexServer", return_value=mock_plex_server), patch(
"plexapi.myplex.MyPlexAccount", return_value=MockPlexAccount(players=0)
), patch("homeassistant.components.plex.GDM", return_value=MockGDM(disabled=True)):
entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
trigger_plex_update(mock_websocket)
await hass.async_block_till_done()
sensor = hass.states.get("sensor.plex_plex_server_1")
assert sensor.state == str(len(mock_plex_server.accounts))
media_players = hass.states.async_entity_ids("media_player")
assert len(media_players) == int(sensor.state) - 1
async def test_media_lookups(hass, mock_plex_server, mock_websocket):
"""Test media lookups to Plex server."""
server_id = mock_plex_server.machineIdentifier
loaded_server = hass.data[DOMAIN][SERVERS][server_id]
# Plex Key searches
trigger_plex_update(mock_websocket)
await hass.async_block_till_done()
media_player_id = hass.states.async_entity_ids("media_player")[0]
with patch("homeassistant.components.plex.PlexServer.create_playqueue"):
assert await hass.services.async_call(
MP_DOMAIN,
SERVICE_PLAY_MEDIA,
{
ATTR_ENTITY_ID: media_player_id,
ATTR_MEDIA_CONTENT_TYPE: DOMAIN,
ATTR_MEDIA_CONTENT_ID: 123,
},
True,
)
with patch.object(MockPlexServer, "fetchItem", side_effect=NotFound):
assert await hass.services.async_call(
MP_DOMAIN,
SERVICE_PLAY_MEDIA,
{
ATTR_ENTITY_ID: media_player_id,
ATTR_MEDIA_CONTENT_TYPE: DOMAIN,
ATTR_MEDIA_CONTENT_ID: 123,
},
True,
)
# TV show searches
with patch.object(MockPlexLibrary, "section", side_effect=NotFound):
assert (
loaded_server.lookup_media(
MEDIA_TYPE_EPISODE, library_name="Not a Library", show_name="TV Show"
)
is None
)
with patch.object(MockPlexLibrarySection, "get", side_effect=NotFound):
assert (
loaded_server.lookup_media(
MEDIA_TYPE_EPISODE, library_name="TV Shows", show_name="Not a TV Show"
)
is None
)
assert (
loaded_server.lookup_media(
MEDIA_TYPE_EPISODE, library_name="TV Shows", episode_name="An Episode"
)
is None
)
assert loaded_server.lookup_media(
MEDIA_TYPE_EPISODE, library_name="TV Shows", show_name="TV Show"
)
assert loaded_server.lookup_media(
MEDIA_TYPE_EPISODE,
library_name="TV Shows",
show_name="TV Show",
season_number=2,
)
assert loaded_server.lookup_media(
MEDIA_TYPE_EPISODE,
library_name="TV Shows",
show_name="TV Show",
season_number=2,
episode_number=3,
)
with patch.object(MockPlexShow, "season", side_effect=NotFound):
assert (
loaded_server.lookup_media(
MEDIA_TYPE_EPISODE,
library_name="TV Shows",
show_name="TV Show",
season_number=2,
)
is None
)
with patch.object(MockPlexSeason, "episode", side_effect=NotFound):
assert (
loaded_server.lookup_media(
MEDIA_TYPE_EPISODE,
library_name="TV Shows",
show_name="TV Show",
season_number=2,
episode_number=1,
)
is None
)
# Music searches
assert (
loaded_server.lookup_media(
MEDIA_TYPE_MUSIC, library_name="Music", album_name="Album"
)
is None
)
assert loaded_server.lookup_media(
MEDIA_TYPE_MUSIC, library_name="Music", artist_name="Artist"
)
assert loaded_server.lookup_media(
MEDIA_TYPE_MUSIC,
library_name="Music",
artist_name="Artist",
track_name="Track 3",
)
assert loaded_server.lookup_media(
MEDIA_TYPE_MUSIC,
library_name="Music",
artist_name="Artist",
album_name="Album",
)
with patch.object(MockPlexLibrarySection, "get", side_effect=NotFound):
assert (
loaded_server.lookup_media(
MEDIA_TYPE_MUSIC,
library_name="Music",
artist_name="Not an Artist",
album_name="Album",
)
is None
)
with patch.object(MockPlexArtist, "album", side_effect=NotFound):
assert (
loaded_server.lookup_media(
MEDIA_TYPE_MUSIC,
library_name="Music",
artist_name="Artist",
album_name="Not an Album",
)
is None
)
with patch.object(MockPlexAlbum, "track", side_effect=NotFound):
assert (
loaded_server.lookup_media(
MEDIA_TYPE_MUSIC,
library_name="Music",
artist_name="Artist",
album_name=" Album",
track_name="Not a Track",
)
is None
)
with patch.object(MockPlexArtist, "get", side_effect=NotFound):
assert (
loaded_server.lookup_media(
MEDIA_TYPE_MUSIC,
library_name="Music",
artist_name="Artist",
track_name="Not a Track",
)
is None
)
assert loaded_server.lookup_media(
MEDIA_TYPE_MUSIC,
library_name="Music",
artist_name="Artist",
album_name="Album",
track_number=3,
)
assert (
loaded_server.lookup_media(
MEDIA_TYPE_MUSIC,
library_name="Music",
artist_name="Artist",
album_name="Album",
track_number=30,
)
is None
)
assert loaded_server.lookup_media(
MEDIA_TYPE_MUSIC,
library_name="Music",
artist_name="Artist",
album_name="Album",
track_name="Track 3",
)
# Playlist searches
assert loaded_server.lookup_media(MEDIA_TYPE_PLAYLIST, playlist_name="A Playlist")
assert loaded_server.lookup_media(MEDIA_TYPE_PLAYLIST) is None
with patch.object(MockPlexServer, "playlist", side_effect=NotFound):
assert (
loaded_server.lookup_media(
MEDIA_TYPE_PLAYLIST, playlist_name="Not a Playlist"
)
is None
)
# Legacy Movie searches
assert loaded_server.lookup_media(MEDIA_TYPE_VIDEO, video_name="Movie") is None
assert loaded_server.lookup_media(MEDIA_TYPE_VIDEO, library_name="Movies") is None
assert loaded_server.lookup_media(
MEDIA_TYPE_VIDEO, library_name="Movies", video_name="Movie"
)
with patch.object(MockPlexLibrarySection, "get", side_effect=NotFound):
assert (
loaded_server.lookup_media(
MEDIA_TYPE_VIDEO, library_name="Movies", video_name="Not a Movie"
)
is None
)
# Movie searches
assert loaded_server.lookup_media(MEDIA_TYPE_MOVIE, title="Movie") is None
assert loaded_server.lookup_media(MEDIA_TYPE_MOVIE, library_name="Movies") is None
assert loaded_server.lookup_media(
MEDIA_TYPE_MOVIE, library_name="Movies", title="Movie"
)
with patch.object(MockPlexLibrarySection, "search", side_effect=BadRequest):
assert (
loaded_server.lookup_media(
MEDIA_TYPE_MOVIE, library_name="Movies", title="Not a Movie"
)
is None
)
with patch.object(MockPlexLibrarySection, "search", return_value=[]):
assert (
loaded_server.lookup_media(
MEDIA_TYPE_MOVIE, library_name="Movies", title="Not a Movie"
)
is None
)
similar_movies = []
for title in "Duplicate Movie", "Duplicate Movie 2":
similar_movies.append(MockPlexMediaItem(title))
with patch.object(
loaded_server.library.section("Movies"), "search", return_value=similar_movies
):
found_media = loaded_server.lookup_media(
MEDIA_TYPE_MOVIE, library_name="Movies", title="Duplicate Movie"
)
assert found_media.title == "Duplicate Movie"
duplicate_movies = []
for title in "Duplicate Movie - Original", "Duplicate Movie - Remake":
duplicate_movies.append(MockPlexMediaItem(title))
with patch.object(
loaded_server.library.section("Movies"), "search", return_value=duplicate_movies
):
assert (
loaded_server.lookup_media(
MEDIA_TYPE_MOVIE, library_name="Movies", title="Duplicate Movie"
)
) is None
|
import asyncio
from datetime import datetime, timedelta
import logging
from time import monotonic
from typing import Awaitable, Callable, Generic, List, Optional, TypeVar
import urllib.error
import aiohttp
import requests
from homeassistant.core import CALLBACK_TYPE, HassJob, HomeAssistant, callback
from homeassistant.helpers import entity, event
from homeassistant.util.dt import utcnow
from .debounce import Debouncer
REQUEST_REFRESH_DEFAULT_COOLDOWN = 10
REQUEST_REFRESH_DEFAULT_IMMEDIATE = True
T = TypeVar("T")
class UpdateFailed(Exception):
"""Raised when an update has failed."""
class DataUpdateCoordinator(Generic[T]):
"""Class to manage fetching data from single endpoint."""
def __init__(
self,
hass: HomeAssistant,
logger: logging.Logger,
*,
name: str,
update_interval: Optional[timedelta] = None,
update_method: Optional[Callable[[], Awaitable[T]]] = None,
request_refresh_debouncer: Optional[Debouncer] = None,
):
"""Initialize global data updater."""
self.hass = hass
self.logger = logger
self.name = name
self.update_method = update_method
self.update_interval = update_interval
self.data: Optional[T] = None
self._listeners: List[CALLBACK_TYPE] = []
self._job = HassJob(self._handle_refresh_interval)
self._unsub_refresh: Optional[CALLBACK_TYPE] = None
self._request_refresh_task: Optional[asyncio.TimerHandle] = None
self.last_update_success = True
if request_refresh_debouncer is None:
request_refresh_debouncer = Debouncer(
hass,
logger,
cooldown=REQUEST_REFRESH_DEFAULT_COOLDOWN,
immediate=REQUEST_REFRESH_DEFAULT_IMMEDIATE,
function=self.async_refresh,
)
else:
request_refresh_debouncer.function = self.async_refresh
self._debounced_refresh = request_refresh_debouncer
@callback
def async_add_listener(self, update_callback: CALLBACK_TYPE) -> Callable[[], None]:
"""Listen for data updates."""
schedule_refresh = not self._listeners
self._listeners.append(update_callback)
# This is the first listener, set up interval.
if schedule_refresh:
self._schedule_refresh()
@callback
def remove_listener() -> None:
"""Remove update listener."""
self.async_remove_listener(update_callback)
return remove_listener
@callback
def async_remove_listener(self, update_callback: CALLBACK_TYPE) -> None:
"""Remove data update."""
self._listeners.remove(update_callback)
if not self._listeners and self._unsub_refresh:
self._unsub_refresh()
self._unsub_refresh = None
@callback
def _schedule_refresh(self) -> None:
"""Schedule a refresh."""
if self.update_interval is None:
return
if self._unsub_refresh:
self._unsub_refresh()
self._unsub_refresh = None
# We _floor_ utcnow to create a schedule on a rounded second,
# minimizing the time between the point and the real activation.
# That way we obtain a constant update frequency,
# as long as the update process takes less than a second
self._unsub_refresh = event.async_track_point_in_utc_time(
self.hass,
self._job,
utcnow().replace(microsecond=0) + self.update_interval,
)
async def _handle_refresh_interval(self, _now: datetime) -> None:
"""Handle a refresh interval occurrence."""
self._unsub_refresh = None
await self.async_refresh()
async def async_request_refresh(self) -> None:
"""Request a refresh.
Refresh will wait a bit to see if it can batch them.
"""
await self._debounced_refresh.async_call()
async def _async_update_data(self) -> Optional[T]:
"""Fetch the latest data from the source."""
if self.update_method is None:
raise NotImplementedError("Update method not implemented")
return await self.update_method()
async def async_refresh(self) -> None:
"""Refresh data."""
if self._unsub_refresh:
self._unsub_refresh()
self._unsub_refresh = None
self._debounced_refresh.async_cancel()
try:
start = monotonic()
self.data = await self._async_update_data()
except (asyncio.TimeoutError, requests.exceptions.Timeout):
if self.last_update_success:
self.logger.error("Timeout fetching %s data", self.name)
self.last_update_success = False
except (aiohttp.ClientError, requests.exceptions.RequestException) as err:
if self.last_update_success:
self.logger.error("Error requesting %s data: %s", self.name, err)
self.last_update_success = False
except urllib.error.URLError as err:
if self.last_update_success:
if err.reason == "timed out":
self.logger.error("Timeout fetching %s data", self.name)
else:
self.logger.error("Error requesting %s data: %s", self.name, err)
self.last_update_success = False
except UpdateFailed as err:
if self.last_update_success:
self.logger.error("Error fetching %s data: %s", self.name, err)
self.last_update_success = False
except NotImplementedError as err:
raise err
except Exception as err: # pylint: disable=broad-except
self.last_update_success = False
self.logger.exception(
"Unexpected error fetching %s data: %s", self.name, err
)
else:
if not self.last_update_success:
self.last_update_success = True
self.logger.info("Fetching %s data recovered", self.name)
finally:
self.logger.debug(
"Finished fetching %s data in %.3f seconds",
self.name,
monotonic() - start,
)
if self._listeners:
self._schedule_refresh()
for update_callback in self._listeners:
update_callback()
class CoordinatorEntity(entity.Entity):
"""A class for entities using DataUpdateCoordinator."""
def __init__(self, coordinator: DataUpdateCoordinator) -> None:
"""Create the entity with a DataUpdateCoordinator."""
self.coordinator = coordinator
@property
def should_poll(self) -> bool:
"""No need to poll. Coordinator notifies entity of updates."""
return False
@property
def available(self) -> bool:
"""Return if entity is available."""
return self.coordinator.last_update_success
async def async_added_to_hass(self) -> None:
"""When entity is added to hass."""
await super().async_added_to_hass()
self.async_on_remove(
self.coordinator.async_add_listener(self._handle_coordinator_update)
)
@callback
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
self.async_write_ha_state()
async def async_update(self) -> None:
"""Update the entity.
Only used by the generic entity update service.
"""
# Ignore manual update requests if the entity is disabled
if not self.enabled:
return
await self.coordinator.async_request_refresh()
|
from pygal.graph.graph import Graph
from pygal.util import alter, decorate, ident, swap
class Bar(Graph):
"""Bar graph class"""
_series_margin = .06
_serie_margin = .06
def _bar(self, serie, parent, x, y, i, zero, secondary=False):
"""Internal bar drawing function"""
width = (self.view.x(1) - self.view.x(0)) / self._len
x, y = self.view((x, y))
series_margin = width * self._series_margin
x += series_margin
width -= 2 * series_margin
width /= self._order
if self.horizontal:
serie_index = self._order - serie.index - 1
else:
serie_index = serie.index
x += serie_index * width
serie_margin = width * self._serie_margin
x += serie_margin
width -= 2 * serie_margin
height = self.view.y(zero) - y
r = serie.rounded_bars * 1 if serie.rounded_bars else 0
alter(
self.svg.transposable_node(
parent,
'rect',
x=x,
y=y,
rx=r,
ry=r,
width=width,
height=height,
class_='rect reactive tooltip-trigger'
), serie.metadata.get(i)
)
return x, y, width, height
def _tooltip_and_print_values(
self, serie_node, serie, parent, i, val, metadata, x, y, width,
height
):
transpose = swap if self.horizontal else ident
x_center, y_center = transpose((x + width / 2, y + height / 2))
x_top, y_top = transpose((x + width, y + height))
x_bottom, y_bottom = transpose((x, y))
if self._dual:
v = serie.values[i][0]
else:
v = serie.values[i]
sign = -1 if v < self.zero else 1
self._tooltip_data(
parent, val, x_center, y_center, "centered", self._get_x_label(i)
)
if self.print_values_position == 'top':
if self.horizontal:
x = x_bottom + sign * self.style.value_font_size / 2
y = y_center
else:
x = x_center
y = y_bottom - sign * self.style.value_font_size / 2
elif self.print_values_position == 'bottom':
if self.horizontal:
x = x_top + sign * self.style.value_font_size / 2
y = y_center
else:
x = x_center
y = y_top - sign * self.style.value_font_size / 2
else:
x = x_center
y = y_center
self._static_value(serie_node, val, x, y, metadata, "middle")
def bar(self, serie, rescale=False):
"""Draw a bar graph for a serie"""
serie_node = self.svg.serie(serie)
bars = self.svg.node(serie_node['plot'], class_="bars")
if rescale and self.secondary_series:
points = self._rescale(serie.points)
else:
points = serie.points
for i, (x, y) in enumerate(points):
if None in (x, y) or (self.logarithmic and y <= 0):
continue
metadata = serie.metadata.get(i)
val = self._format(serie, i)
bar = decorate(
self.svg, self.svg.node(bars, class_='bar'), metadata
)
x_, y_, width, height = self._bar(
serie, bar, x, y, i, self.zero, secondary=rescale
)
self._confidence_interval(
serie_node['overlay'], x_ + width / 2, y_, serie.values[i],
metadata
)
self._tooltip_and_print_values(
serie_node, serie, bar, i, val, metadata, x_, y_, width, height
)
def _compute(self):
"""Compute y min and max and y scale and set labels"""
if self._min:
self._box.ymin = min(self._min, self.zero)
if self._max:
self._box.ymax = max(self._max, self.zero)
self._x_pos = [
x / self._len for x in range(self._len + 1)
] if self._len > 1 else [0, 1] # Center if only one value
self._points(self._x_pos)
self._x_pos = [(i + .5) / self._len for i in range(self._len)]
def _plot(self):
"""Draw bars for series and secondary series"""
for serie in self.series:
self.bar(serie)
for serie in self.secondary_series:
self.bar(serie, True)
|
import numpy as np
from jax import config
import pytest
import tensornetwork
import tensornetwork.linalg.krylov
import tensornetwork.linalg.initialization
from tensornetwork.tests import testing_utils
# pylint: disable=no-member
config.update("jax_enable_x64", True)
sparse_backends = ["numpy", "jax"]
@pytest.mark.parametrize("sparse_backend", sparse_backends + ["pytorch", ])
@pytest.mark.parametrize("dtype", testing_utils.np_float_dtypes)
def test_matvec_cache(sparse_backend, dtype):
shape = (4, 4)
dtype = testing_utils.np_dtype_to_backend(sparse_backend, dtype)
A = tensornetwork.linalg.initialization.ones(shape,
backend=sparse_backend,
dtype=dtype)
B = -1.3 * tensornetwork.linalg.initialization.ones(shape,
backend=sparse_backend,
dtype=dtype)
def matvec(B, A):
return A @ B
def matvec_array(B, A):
return A.array @ B.array
matvec_cache = tensornetwork.linalg.krylov.MatvecCache()
mv = matvec_cache.retrieve(sparse_backend, matvec)
result = mv(B.array, A.array)
test = matvec_array(B, A)
assert np.all(np.isfinite(np.ravel(result)))
np.testing.assert_allclose(result, test)
def test_eigsh_lanczos_raises():
n = 2
shape = (n, n)
tensor = tensornetwork.linalg.initialization.ones(shape,
backend="jax",
dtype=np.float64)
def matvec(B):
return tensor @ B
with pytest.raises(ValueError):
_ = tensornetwork.linalg.krylov.eigsh_lanczos(matvec,
backend=None,
x0=None)
with pytest.raises(ValueError):
_ = tensornetwork.linalg.krylov.eigsh_lanczos(matvec,
backend="numpy",
x0=tensor)
with pytest.raises(TypeError):
_ = tensornetwork.linalg.krylov.eigsh_lanczos(matvec,
backend="numpy",
x0=tensor.array)
with pytest.raises(TypeError):
_ = tensornetwork.linalg.krylov.eigsh_lanczos(matvec,
backend="jax",
x0=tensor,
args=[tensor.array, ])
@pytest.mark.parametrize("sparse_backend", sparse_backends + ["pytorch", ])
@pytest.mark.parametrize("dtype", testing_utils.np_float_dtypes)
def test_eigsh_lanczos(sparse_backend, dtype):
"""
Compares linalg.krylov.eigsh_lanczos with backend.eigsh_lanczos.
"""
n = 2
shape = (n, n)
dtype = testing_utils.np_dtype_to_backend(sparse_backend, dtype)
A = tensornetwork.linalg.initialization.ones(shape,
backend=sparse_backend,
dtype=dtype)
x0 = tensornetwork.linalg.initialization.ones((n, 1), backend=sparse_backend,
dtype=dtype)
def matvec(B):
return A @ B
result = tensornetwork.linalg.krylov.eigsh_lanczos(matvec, backend=A.backend,
x0=x0, num_krylov_vecs=n-1)
def array_matvec(B):
return A.array @ B
rev, reV = result
test_result = A.backend.eigsh_lanczos(array_matvec, initial_state=x0.array,
num_krylov_vecs=n-1)
tev, teV = test_result
assert np.all(np.isfinite(np.ravel(np.array(rev))))
np.testing.assert_allclose(np.array(rev), np.array(tev))
for r, t in zip(reV, teV):
assert np.all(np.isfinite(np.ravel(r.array)))
np.testing.assert_allclose(r.array, t)
@pytest.mark.parametrize("sparse_backend", sparse_backends + ["pytorch", ])
@pytest.mark.parametrize("dtype", testing_utils.np_float_dtypes)
def test_eigsh_lanczos_with_args(sparse_backend, dtype):
"""
Compares linalg.krylov.eigsh_lanczos with backend.eigsh_lanczos.
"""
n = 2
shape = (n, n)
dtype = testing_utils.np_dtype_to_backend(sparse_backend, dtype)
A = tensornetwork.linalg.initialization.ones(shape,
backend=sparse_backend,
dtype=dtype)
x0 = tensornetwork.linalg.initialization.ones((n, 1), backend=sparse_backend,
dtype=dtype)
def matvec(B):
return A @ B
def matvec_args(B, A):
return A @ B
result = tensornetwork.linalg.krylov.eigsh_lanczos(matvec, backend=A.backend,
x0=x0, num_krylov_vecs=n-1)
test = tensornetwork.linalg.krylov.eigsh_lanczos(matvec_args,
backend=A.backend,
x0=x0, num_krylov_vecs=n-1,
args=[A, ])
rev, reV = result
tev, teV = test
assert np.all(np.isfinite(np.ravel(np.array(rev))))
np.testing.assert_allclose(np.array(rev), np.array(tev))
for r, t in zip(reV, teV):
assert np.all(np.isfinite(np.ravel(r.array)))
np.testing.assert_allclose(r.array, t.array)
def test_eigs_raises():
n = 2
shape = (n, n)
tensor = tensornetwork.linalg.initialization.ones(shape,
backend="jax",
dtype=np.float64)
def matvec(B):
return tensor @ B
with pytest.raises(ValueError):
_ = tensornetwork.linalg.krylov.eigs(matvec,
backend=None,
x0=None)
with pytest.raises(ValueError):
_ = tensornetwork.linalg.krylov.eigs(matvec,
backend="numpy",
x0=tensor)
with pytest.raises(TypeError):
_ = tensornetwork.linalg.krylov.eigs(matvec,
backend="numpy",
x0=tensor.array)
with pytest.raises(TypeError):
_ = tensornetwork.linalg.krylov.eigs(matvec,
backend="jax",
x0=tensor,
args=[tensor.array, ])
@pytest.mark.parametrize("sparse_backend", sparse_backends)
@pytest.mark.parametrize("dtype", testing_utils.np_float_dtypes)
def test_eigs(sparse_backend, dtype):
shape = (4, 4)
tensor = tensornetwork.linalg.initialization.ones(shape,
backend=sparse_backend,
dtype=dtype)
x0 = tensornetwork.linalg.initialization.ones(shape, backend=sparse_backend,
dtype=dtype)
def matvec(B):
return tensor @ B
def test_matvec(B):
return tensor.array @ B
result = tensornetwork.linalg.krylov.eigs(matvec, backend=sparse_backend,
x0=x0, num_krylov_vecs=3,
numeig=1)
rev, reV = result
test_result = tensor.backend.eigs(test_matvec, initial_state=x0.array,
num_krylov_vecs=3, numeig=1)
tev, _ = test_result
for r, t, R in zip(rev, tev, reV):
np.testing.assert_allclose(np.array(r), np.array(t))
testR = matvec(R) / r
np.testing.assert_allclose(testR.array, R.array, rtol=1E-5)
@pytest.mark.parametrize("sparse_backend", sparse_backends)
@pytest.mark.parametrize("dtype", testing_utils.np_float_dtypes)
def test_eigs_with_args(sparse_backend, dtype):
shape = (4, 4)
tensor = tensornetwork.linalg.initialization.ones(shape,
backend=sparse_backend,
dtype=dtype)
x0 = tensornetwork.linalg.initialization.ones(shape, backend=sparse_backend,
dtype=dtype)
def matvec(B):
return tensor @ B
def test_matvec(B, A):
return A @ B
result = tensornetwork.linalg.krylov.eigs(matvec, backend=sparse_backend,
x0=x0, num_krylov_vecs=3,
numeig=1)
rev, _ = result
test_result = tensornetwork.linalg.krylov.eigs(test_matvec, x0=x0,
num_krylov_vecs=3, numeig=1,
args=[tensor, ])
tev, teV = test_result
for r, t, R in zip(rev, tev, teV):
np.testing.assert_allclose(np.array(r), np.array(t))
testR = matvec(R) / r
np.testing.assert_allclose(testR.array, R.array, rtol=1E-5)
def test_gmres_raises():
n = 2
shape = (n, 1)
tensor = tensornetwork.linalg.initialization.ones(shape,
backend="jax",
dtype=np.float64)
tensornp = tensornetwork.linalg.initialization.ones(shape,
backend="numpy",
dtype=np.float64)
def matvec(B):
return tensor @ B
with pytest.raises(ValueError):
_ = tensornetwork.linalg.krylov.gmres(matvec, tensor, x0=tensornp)
with pytest.raises(ValueError):
_ = tensornetwork.linalg.krylov.gmres(matvec, tensornp, x0=tensor)
with pytest.raises(TypeError):
_ = tensornetwork.linalg.krylov.gmres(matvec, tensor.array)
with pytest.raises(TypeError):
_ = tensornetwork.linalg.krylov.gmres(matvec, tensor, x0=tensor.array)
with pytest.raises(TypeError):
_ = tensornetwork.linalg.krylov.gmres(matvec, tensor,
A_args=[tensor.array, ])
@pytest.mark.parametrize("sparse_backend", sparse_backends)
@pytest.mark.parametrize("dtype", testing_utils.np_float_dtypes)
def test_gmres(dtype, sparse_backend):
Adat = np.array(([[1, 1], [3, -4]]), dtype=dtype)
A = tensornetwork.tensor.Tensor(Adat, backend=sparse_backend)
bdat = np.array([3, 2], dtype=dtype).reshape((2, 1))
b = tensornetwork.tensor.Tensor(bdat, backend=sparse_backend)
x0dat = np.ones((2, 1), dtype=dtype)
x0 = tensornetwork.tensor.Tensor(x0dat, backend=sparse_backend)
n_kry = 2
def A_mv(y):
return A @ y
def A_mv_arr(y):
return A.array @ y
x, _ = A.backend.gmres(A_mv_arr, bdat, x0=x0dat, num_krylov_vectors=n_kry)
xT, _ = tensornetwork.linalg.krylov.gmres(A_mv, b, x0=x0,
num_krylov_vectors=n_kry)
np.testing.assert_allclose(x, xT.array)
@pytest.mark.parametrize("sparse_backend", sparse_backends)
@pytest.mark.parametrize("dtype", testing_utils.np_float_dtypes)
def test_gmres_with_args(dtype, sparse_backend):
Adat = np.array(([[1, 1], [3, -4]]), dtype=dtype)
A = tensornetwork.tensor.Tensor(Adat, backend=sparse_backend)
bdat = np.array([3, 2], dtype=dtype).reshape((2, 1))
b = tensornetwork.tensor.Tensor(bdat, backend=sparse_backend)
x0dat = np.ones((2, 1), dtype=dtype)
x0 = tensornetwork.tensor.Tensor(x0dat, backend=sparse_backend)
n_kry = 2
def A_mv(y):
return A @ y
def A_mv_test(y, A):
return A @ y
x, _ = tensornetwork.linalg.krylov.gmres(A_mv, b, x0=x0,
num_krylov_vectors=n_kry)
xT, _ = tensornetwork.linalg.krylov.gmres(A_mv_test, b, x0=x0,
num_krylov_vectors=n_kry,
A_args=[A, ])
np.testing.assert_allclose(x.array, xT.array)
|
from flask_script import Manager
from lemur.policies import service as policy_service
manager = Manager(usage="Handles all policy related tasks.")
@manager.option("-d", "--days", dest="days", help="Number of days before expiration.")
@manager.option("-n", "--name", dest="name", help="Policy name.")
def create(days, name):
"""
Create a new certificate rotation policy
:return:
"""
print("[+] Creating a new certificate rotation policy.")
policy_service.create(days=days, name=name)
print("[+] Successfully created a new certificate rotation policy")
|
import tensorflow as tf
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import clip_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import gen_image_ops
from tensorflow.python.ops import gen_nn_ops
from tensorflow.python.ops import string_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variables
# =========================================================================== #
# Modification of TensorFlow image routines.
# =========================================================================== #
def _assert(cond, ex_type, msg):
"""A polymorphic assert, works with tensors and boolean expressions.
If `cond` is not a tensor, behave like an ordinary assert statement, except
that a empty list is returned. If `cond` is a tensor, return a list
containing a single TensorFlow assert op.
Args:
cond: Something evaluates to a boolean value. May be a tensor.
ex_type: The exception class to use.
msg: The error message.
Returns:
A list, containing at most one assert op.
"""
if _is_tensor(cond):
return [control_flow_ops.Assert(cond, [msg])]
else:
if not cond:
raise ex_type(msg)
else:
return []
def _is_tensor(x):
"""Returns `True` if `x` is a symbolic tensor-like object.
Args:
x: A python object to check.
Returns:
`True` if `x` is a `tf.Tensor` or `tf.Variable`, otherwise `False`.
"""
return isinstance(x, (ops.Tensor, variables.Variable))
def _ImageDimensions(image):
"""Returns the dimensions of an image tensor.
Args:
image: A 3-D Tensor of shape `[height, width, channels]`.
Returns:
A list of `[height, width, channels]` corresponding to the dimensions of the
input image. Dimensions that are statically known are python integers,
otherwise they are integer scalar tensors.
"""
if image.get_shape().is_fully_defined():
return image.get_shape().as_list()
else:
static_shape = image.get_shape().with_rank(3).as_list()
dynamic_shape = array_ops.unstack(array_ops.shape(image), 3)
return [s if s is not None else d
for s, d in zip(static_shape, dynamic_shape)]
def _Check3DImage(image, require_static=True):
"""Assert that we are working with properly shaped image.
Args:
image: 3-D Tensor of shape [height, width, channels]
require_static: If `True`, requires that all dimensions of `image` are
known and non-zero.
Raises:
ValueError: if `image.shape` is not a 3-vector.
Returns:
An empty list, if `image` has fully defined dimensions. Otherwise, a list
containing an assert op is returned.
"""
try:
image_shape = image.get_shape().with_rank(3)
except ValueError:
raise ValueError("'image' must be three-dimensional.")
if require_static and not image_shape.is_fully_defined():
raise ValueError("'image' must be fully defined.")
if any(x == 0 for x in image_shape):
raise ValueError("all dims of 'image.shape' must be > 0: %s" %
image_shape)
if not image_shape.is_fully_defined():
return [check_ops.assert_positive(array_ops.shape(image),
["all dims of 'image.shape' "
"must be > 0."])]
else:
return []
def fix_image_flip_shape(image, result):
"""Set the shape to 3 dimensional if we don't know anything else.
Args:
image: original image size
result: flipped or transformed image
Returns:
An image whose shape is at least None,None,None.
"""
image_shape = image.get_shape()
if image_shape == tensor_shape.unknown_shape():
result.set_shape([None, None, None])
else:
result.set_shape(image_shape)
return result
# =========================================================================== #
# Image + BBoxes methods: cropping, resizing, flipping, ...
# =========================================================================== #
def bboxes_crop_or_pad(bboxes,
height, width,
offset_y, offset_x,
target_height, target_width):
"""Adapt bounding boxes to crop or pad operations.
Coordinates are always supposed to be relative to the image.
Arguments:
bboxes: Tensor Nx4 with bboxes coordinates [y_min, x_min, y_max, x_max];
height, width: Original image dimension;
offset_y, offset_x: Offset to apply,
negative if cropping, positive if padding;
target_height, target_width: Target dimension after cropping / padding.
"""
with tf.name_scope('bboxes_crop_or_pad'):
# Rescale bounding boxes in pixels.
scale = tf.cast(tf.stack([height, width, height, width]), bboxes.dtype)
bboxes = bboxes * scale
# Add offset.
offset = tf.cast(tf.stack([offset_y, offset_x, offset_y, offset_x]), bboxes.dtype)
bboxes = bboxes + offset
# Rescale to target dimension.
scale = tf.cast(tf.stack([target_height, target_width,
target_height, target_width]), bboxes.dtype)
bboxes = bboxes / scale
return bboxes
def resize_image_bboxes_with_crop_or_pad(image, bboxes,
target_height, target_width):
"""Crops and/or pads an image to a target width and height.
Resizes an image to a target width and height by either centrally
cropping the image or padding it evenly with zeros.
If `width` or `height` is greater than the specified `target_width` or
`target_height` respectively, this op centrally crops along that dimension.
If `width` or `height` is smaller than the specified `target_width` or
`target_height` respectively, this op centrally pads with 0 along that
dimension.
Args:
image: 3-D tensor of shape `[height, width, channels]`
target_height: Target height.
target_width: Target width.
Raises:
ValueError: if `target_height` or `target_width` are zero or negative.
Returns:
Cropped and/or padded image of shape
`[target_height, target_width, channels]`
"""
with tf.name_scope('resize_with_crop_or_pad'):
image = ops.convert_to_tensor(image, name='image')
assert_ops = []
assert_ops += _Check3DImage(image, require_static=False)
assert_ops += _assert(target_width > 0, ValueError,
'target_width must be > 0.')
assert_ops += _assert(target_height > 0, ValueError,
'target_height must be > 0.')
image = control_flow_ops.with_dependencies(assert_ops, image)
# `crop_to_bounding_box` and `pad_to_bounding_box` have their own checks.
# Make sure our checks come first, so that error messages are clearer.
if _is_tensor(target_height):
target_height = control_flow_ops.with_dependencies(
assert_ops, target_height)
if _is_tensor(target_width):
target_width = control_flow_ops.with_dependencies(assert_ops, target_width)
def max_(x, y):
if _is_tensor(x) or _is_tensor(y):
return math_ops.maximum(x, y)
else:
return max(x, y)
def min_(x, y):
if _is_tensor(x) or _is_tensor(y):
return math_ops.minimum(x, y)
else:
return min(x, y)
def equal_(x, y):
if _is_tensor(x) or _is_tensor(y):
return math_ops.equal(x, y)
else:
return x == y
height, width, _ = _ImageDimensions(image)
width_diff = target_width - width
offset_crop_width = max_(-width_diff // 2, 0)
offset_pad_width = max_(width_diff // 2, 0)
height_diff = target_height - height
offset_crop_height = max_(-height_diff // 2, 0)
offset_pad_height = max_(height_diff // 2, 0)
# Maybe crop if needed.
height_crop = min_(target_height, height)
width_crop = min_(target_width, width)
cropped = tf.image.crop_to_bounding_box(image, offset_crop_height, offset_crop_width,
height_crop, width_crop)
bboxes = bboxes_crop_or_pad(bboxes,
height, width,
-offset_crop_height, -offset_crop_width,
height_crop, width_crop)
# Maybe pad if needed.
resized = tf.image.pad_to_bounding_box(cropped, offset_pad_height, offset_pad_width,
target_height, target_width)
bboxes = bboxes_crop_or_pad(bboxes,
height_crop, width_crop,
offset_pad_height, offset_pad_width,
target_height, target_width)
# In theory all the checks below are redundant.
if resized.get_shape().ndims is None:
raise ValueError('resized contains no shape.')
resized_height, resized_width, _ = _ImageDimensions(resized)
assert_ops = []
assert_ops += _assert(equal_(resized_height, target_height), ValueError,
'resized height is not correct.')
assert_ops += _assert(equal_(resized_width, target_width), ValueError,
'resized width is not correct.')
resized = control_flow_ops.with_dependencies(assert_ops, resized)
return resized, bboxes
def resize_image(image, size,
method=tf.image.ResizeMethod.BILINEAR,
align_corners=False):
"""Resize an image and bounding boxes.
"""
# Resize image.
with tf.name_scope('resize_image'):
height, width, channels = _ImageDimensions(image)
image = tf.expand_dims(image, 0)
image = tf.image.resize_images(image, size,
method, align_corners)
image = tf.reshape(image, tf.stack([size[0], size[1], channels]))
return image
def random_flip_left_right(image, bboxes, seed=None):
"""Random flip left-right of an image and its bounding boxes.
"""
def flip_bboxes(bboxes):
"""Flip bounding boxes coordinates.
"""
bboxes = tf.stack([bboxes[:, 0], 1 - bboxes[:, 3],
bboxes[:, 2], 1 - bboxes[:, 1]], axis=-1)
return bboxes
# Random flip. Tensorflow implementation.
with tf.name_scope('random_flip_left_right'):
image = ops.convert_to_tensor(image, name='image')
_Check3DImage(image, require_static=False)
uniform_random = random_ops.random_uniform([], 0, 1.0, seed=seed)
mirror_cond = math_ops.less(uniform_random, .5)
# Flip image.
result = control_flow_ops.cond(mirror_cond,
lambda: array_ops.reverse_v2(image, [1]),
lambda: image)
# Flip bboxes.
bboxes = control_flow_ops.cond(mirror_cond,
lambda: flip_bboxes(bboxes),
lambda: bboxes)
return fix_image_flip_shape(image, result), bboxes
|
import mock
import pysensu_yelp
import pytest
from paasta_tools import check_flink_services_health
from paasta_tools import check_services_replication_tools
from paasta_tools.check_flink_services_health import check_under_registered_taskmanagers
from paasta_tools.utils import compose_job_id
check_flink_services_health.log = mock.Mock()
check_services_replication_tools.log = mock.Mock()
@pytest.fixture
def instance_config():
service = "fake_service"
instance = "fake_instance"
job_id = compose_job_id(service, instance)
mock_instance_config = mock.Mock(
service=service,
instance=instance,
cluster="fake_cluster",
soa_dir="fake_soa_dir",
job_id=job_id,
config_dict={},
)
mock_instance_config.get_replication_crit_percentage.return_value = 100
mock_instance_config.get_registrations.return_value = [job_id]
return mock_instance_config
@mock.patch(
"paasta_tools.flink_tools.get_flink_jobmanager_overview",
autospec=True,
return_value={"taskmanagers": 3},
)
def test_check_under_registered_taskmanagers_ok(mock_overview, instance_config):
under, output = check_under_registered_taskmanagers(
instance_config, expected_count=3, cr_name="fake--service-575c857546"
)
assert not under
assert (
"Service fake_service.fake_instance has 3 out of 3 expected instances of "
"taskmanager reported by dashboard!\n"
"(threshold: 100%)"
) in output
@mock.patch(
"paasta_tools.flink_tools.get_flink_jobmanager_overview",
autospec=True,
return_value={"taskmanagers": 2},
)
def test_check_under_registered_taskmanagers_under(mock_overview, instance_config):
under, output = check_under_registered_taskmanagers(
instance_config, expected_count=3, cr_name="fake--service-575c857546"
)
assert under
assert (
"Service fake_service.fake_instance has 2 out of 3 expected instances of "
"taskmanager reported by dashboard!\n"
"(threshold: 100%)"
) in output
assert (
"paasta status -s fake_service -i fake_instance -c fake_cluster -vv" in output
)
@mock.patch(
"paasta_tools.flink_tools.get_flink_jobmanager_overview",
autospec=True,
side_effect=ValueError("dummy exception"),
)
def test_check_under_registered_taskmanagers_error(mock_overview, instance_config):
under, output = check_under_registered_taskmanagers(
instance_config, expected_count=3, cr_name="fake--service-575c857546"
)
assert under
assert (
"Dashboard of service fake_service.fake_instance is not available!\n"
"(dummy exception)\n"
"What this alert"
) in output
assert (
"paasta status -s fake_service -i fake_instance -c fake_cluster -vv" in output
)
def test_check_flink_service_health_healthy(instance_config):
all_pods = []
with mock.patch(
"paasta_tools.check_flink_services_health.healthy_flink_containers_cnt",
autospec=True,
return_value=1,
), mock.patch(
"paasta_tools.check_flink_services_health.check_under_replication",
autospec=True,
return_value=(False, "OK"),
) as mock_check_under_replication, mock.patch(
"paasta_tools.check_flink_services_health.check_under_registered_taskmanagers",
autospec=True,
return_value=(False, "OK"),
) as mock_check_under_registered_taskmanagers, mock.patch(
"paasta_tools.check_flink_services_health.send_replication_event", autospec=True
) as mock_send_replication_event:
instance_config.config_dict["taskmanager"] = {"instances": 3}
check_flink_services_health.check_flink_service_health(
instance_config=instance_config,
all_tasks_or_pods=all_pods,
replication_checker=None,
)
expected = [
mock.call(
instance_config=instance_config,
expected_count=1,
num_available=1,
sub_component="supervisor",
),
mock.call(
instance_config=instance_config,
expected_count=1,
num_available=1,
sub_component="jobmanager",
),
mock.call(
instance_config=instance_config,
expected_count=3,
num_available=1,
sub_component="taskmanager",
),
]
mock_check_under_replication.assert_has_calls(expected)
mock_check_under_registered_taskmanagers.assert_called_once_with(
instance_config=instance_config, expected_count=3, cr_name=""
)
mock_send_replication_event.assert_called_once_with(
instance_config=instance_config,
status=pysensu_yelp.Status.OK,
output="OK\n########\nOK\n########\nOK\n########\nOK",
)
def test_check_flink_service_health_too_few_taskmanagers(instance_config):
def check_under_replication_side_effect(*args, **kwargs):
if kwargs["sub_component"] == "supervisor":
return False, "OK"
if kwargs["sub_component"] == "jobmanager":
return False, "OK"
if kwargs["sub_component"] == "taskmanager":
return True, "NOPE"
all_pods = []
with mock.patch(
"paasta_tools.check_flink_services_health.healthy_flink_containers_cnt",
autospec=True,
return_value=1,
), mock.patch(
"paasta_tools.check_flink_services_health.check_under_registered_taskmanagers",
autospec=True,
return_value=(True, "NOPE"),
) as mock_check_under_registered_taskmanagers, mock.patch(
"paasta_tools.check_flink_services_health.check_under_replication",
autospec=True,
side_effect=check_under_replication_side_effect,
) as mock_check_under_replication, mock.patch(
"paasta_tools.check_flink_services_health.send_replication_event", autospec=True
) as mock_send_replication_event:
instance_config.config_dict["taskmanager"] = {"instances": 3}
check_flink_services_health.check_flink_service_health(
instance_config=instance_config,
all_tasks_or_pods=all_pods,
replication_checker=None,
)
expected = [
mock.call(
instance_config=instance_config,
expected_count=1,
num_available=1,
sub_component="supervisor",
),
mock.call(
instance_config=instance_config,
expected_count=1,
num_available=1,
sub_component="jobmanager",
),
mock.call(
instance_config=instance_config,
expected_count=3,
num_available=1,
sub_component="taskmanager",
),
]
mock_check_under_replication.assert_has_calls(expected)
mock_check_under_registered_taskmanagers.assert_called_once_with(
instance_config=instance_config, expected_count=3, cr_name=""
)
mock_send_replication_event.assert_called_once_with(
instance_config=instance_config,
status=pysensu_yelp.Status.CRITICAL,
output="OK\n########\nOK\n########\nNOPE\n########\nNOPE",
)
def test_check_flink_service_health_under_registered_taskamanagers(instance_config):
all_pods = []
with mock.patch(
"paasta_tools.check_flink_services_health.healthy_flink_containers_cnt",
autospec=True,
return_value=1,
), mock.patch(
"paasta_tools.check_flink_services_health.check_under_replication",
autospec=True,
return_value=(False, "OK"),
) as mock_check_under_replication, mock.patch(
"paasta_tools.check_flink_services_health.check_under_registered_taskmanagers",
autospec=True,
return_value=(True, "NOPE"),
) as mock_check_under_registered_taskmanagers, mock.patch(
"paasta_tools.check_flink_services_health.send_replication_event", autospec=True
) as mock_send_replication_event:
instance_config.config_dict["taskmanager"] = {"instances": 3}
check_flink_services_health.check_flink_service_health(
instance_config=instance_config,
all_tasks_or_pods=all_pods,
replication_checker=None,
)
expected = [
mock.call(
instance_config=instance_config,
expected_count=1,
num_available=1,
sub_component="supervisor",
),
mock.call(
instance_config=instance_config,
expected_count=1,
num_available=1,
sub_component="jobmanager",
),
mock.call(
instance_config=instance_config,
expected_count=3,
num_available=1,
sub_component="taskmanager",
),
]
mock_check_under_replication.assert_has_calls(expected)
mock_check_under_registered_taskmanagers.assert_called_once_with(
instance_config=instance_config, expected_count=3, cr_name=""
)
mock_send_replication_event.assert_called_once_with(
instance_config=instance_config,
status=pysensu_yelp.Status.CRITICAL,
output="OK\n########\nOK\n########\nOK\n########\nNOPE",
)
|
from homeassistant.components import persistent_notification
from homeassistant.core import HomeAssistant
DOMAIN = "safe_mode"
async def async_setup(hass: HomeAssistant, config: dict):
"""Set up the Safe Mode component."""
persistent_notification.async_create(
hass,
"Home Assistant is running in safe mode. Check [the error log](/config/logs) to see what went wrong.",
"Safe Mode",
)
return True
|
import argparse
from collections import OrderedDict
from flask import Flask, render_template
from werkzeug.serving import run_simple
try:
from werkzeug.middleware.dispatcher import DispatcherMiddleware
except ImportError:
from werkzeug.wsgi import DispatcherMiddleware
from flasgger import __version__
from flasgger.utils import get_examples
app = Flask(__name__)
examples = OrderedDict({
'/{0}'.format(mod.__name__.split('.')[-1]): mod
for mod in sorted(get_examples(), key=lambda x: x.__name__)
})
@app.route('/')
def index():
return render_template('flasgger.html',
examples=examples,
version=__version__)
for example, mod in examples.items():
if hasattr(mod.app, 'swag'):
mod.app.swag.config['basePath'] = example
application = DispatcherMiddleware(
app,
{name: mod.app for name, mod in examples.items()}
)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Run App for WSGI")
parser.add_argument('-p', '--port', help='App Port')
parser.add_argument('-i', '--host', help='App Host')
parser.add_argument('-r', '--reloader', action='store_true',
help='Turn reloader on')
parser.add_argument('-d', '--debug', action='store_true',
help='Turn debug on')
args = parser.parse_args()
run_simple(
args.host or '0.0.0.0',
int(args.port) if args.port else 5000,
application,
use_reloader=args.reloader or False,
use_debugger=args.debug or False,
)
|
import numpy as np
from tensornetwork.block_sparse.index import Index
from tensornetwork.block_sparse.blocksparsetensor import _randn, _random
from tensornetwork.block_sparse.blocksparsetensor import BlockSparseTensor
from typing import Tuple, Type, Optional, Sequence
def ones(indices: Sequence[Index],
dtype: Optional[Type[np.number]] = None) -> BlockSparseTensor:
"""
Initialize a symmetric tensor with ones.
Args:
indices: List of `Index` objecst, one for each leg.
dtype: An optional numpy dtype. The dtype of the tensor
Returns:
BlockSparseTensor
"""
return BlockSparseTensor.ones(indices, dtype)
def zeros(indices: Sequence[Index],
dtype: Optional[Type[np.number]] = None) -> BlockSparseTensor:
"""
Initialize a symmetric tensor with zeros.
Args:
indices: List of `Index` objecst, one for each leg.
dtype: An optional numpy dtype. The dtype of the tensor
Returns:
BlockSparseTensor
"""
return BlockSparseTensor.zeros(indices, dtype)
def randn(indices: Sequence[Index],
dtype: Optional[Type[np.number]] = None) -> BlockSparseTensor:
"""
Initialize a random symmetric tensor from random normal distribution.
Args:
indices: List of `Index` objecst, one for each leg.
dtype: An optional numpy dtype. The dtype of the tensor
Returns:
BlockSparseTensor
"""
return BlockSparseTensor.randn(indices, dtype)
def random(indices: Sequence[Index],
boundaries: Optional[Tuple[float, float]] = (0.0, 1.0),
dtype: Optional[Type[np.number]] = None) -> BlockSparseTensor:
"""
Initialize a random symmetric tensor from random uniform distribution.
Args:
indices: List of `Index` objecst, one for each leg.
boundaries: Tuple of interval boundaries for the random uniform
distribution.
dtype: An optional numpy dtype. The dtype of the tensor
Returns:
BlockSparseTensor
"""
return BlockSparseTensor.random(indices, boundaries, dtype)
def empty_like(tensor: BlockSparseTensor) -> BlockSparseTensor:
"""
Initialize a symmetric tensor with an uninitialized np.ndarray.
The resulting tensor has the same shape and dtype as `tensor`.
Args:
tensor: A BlockSparseTensor.
Returns:
BlockSparseTensor
"""
return BlockSparseTensor(
np.empty(tensor.data.size, dtype=tensor.dtype),
charges=tensor._charges,
flows=tensor._flows,
order=tensor._order,
check_consistency=False)
def ones_like(tensor: BlockSparseTensor) -> BlockSparseTensor:
"""
Initialize a symmetric tensor with ones.
The resulting tensor has the same shape and dtype as `tensor`.
Args:
tensor: A BlockSparseTensor.
Returns:
BlockSparseTensor
"""
return BlockSparseTensor(
np.ones(tensor.data.size, dtype=tensor.dtype),
charges=tensor._charges,
flows=tensor._flows,
order=tensor._order,
check_consistency=False)
def zeros_like(tensor: BlockSparseTensor) -> BlockSparseTensor:
"""
Initialize a symmetric tensor with zeros.
The resulting tensor has the same shape and dtype as `tensor`.
Args:
tensor: A BlockSparseTensor.
Returns:
BlockSparseTensor
"""
return BlockSparseTensor(
np.zeros(tensor.data.size, dtype=tensor.dtype),
charges=tensor._charges,
flows=tensor._flows,
order=tensor._order,
check_consistency=False)
def randn_like(tensor: BlockSparseTensor) -> BlockSparseTensor:
"""
Initialize a symmetric tensor with random gaussian numbers.
The resulting tensor has the same shape and dtype as `tensor`.
Args:
tensor: A BlockSparseTensor.
Returns:
BlockSparseTensor
"""
return BlockSparseTensor(
_randn(tensor.data.size, dtype=tensor.dtype),
charges=tensor._charges,
flows=tensor._flows,
order=tensor._order,
check_consistency=False)
def random_like(
tensor: BlockSparseTensor, boundaries: Tuple = (0, 1)) -> BlockSparseTensor:
"""
Initialize a symmetric tensor with random uniform numbers.
The resulting tensor has the same shape and dtype as `tensor`.
Args:
tensor: A BlockSparseTensor.
Returns:
BlockSparseTensor
"""
return BlockSparseTensor(
_random(tensor.data.size, dtype=tensor.dtype, boundaries=boundaries),
charges=tensor._charges,
flows=tensor._flows,
order=tensor._order,
check_consistency=False)
|
import logging
import string
from absl import flags
from perfkitbenchmarker import disk
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.providers.cloudstack import util
FLAGS = flags.FLAGS
class CloudStackDisk(disk.BaseDisk):
"""Object representing a Cloudstack Disk."""
def __init__(self, disk_spec, name, zone_id, project_id=None):
super(CloudStackDisk, self).__init__(disk_spec)
self.cs = util.CsClient(
FLAGS.CS_API_URL,
FLAGS.CS_API_KEY,
FLAGS.CS_API_SECRET
)
self.attached_vm_name = None
self.attached_vm_id = None
self.name = name
self.zone_id = zone_id
self.project_id = project_id
self.disk_offering_id = self._GetBestOfferingId(self.disk_size)
assert self.disk_offering_id, "Unable get disk offering of given size"
if disk_spec.disk_type:
logging.warn("Cloudstack does not support disk types")
@vm_util.Retry(max_retries=3)
def _Create(self):
"""Creates the disk."""
volume = self.cs.create_volume(self.name,
self.disk_offering_id,
self.zone_id,
self.project_id)
assert volume, "Unable to create volume"
self.volume_id = volume['id']
self.disk_type = volume['type']
self.actual_disk_size = int(volume['size']) / (2 ** 30) # In GB
def _Delete(self):
"""Deletes the disk."""
vol = self.cs.get_volume(self.name, self.project_id)
if vol:
self.cs.delete_volume(self.volume_id)
def _Exists(self):
"""Returns true if the disk exists."""
vol = self.cs.get_volume(self.name, self.project_id)
if vol:
return True
return False
@vm_util.Retry(max_retries=3)
def Attach(self, vm):
"""Attaches the disk to a VM.
Args:
vm: The CloudStackVirtualMachine instance to which
the disk will be attached.
"""
res = self.cs.attach_volume(self.volume_id, vm.id)
assert res, "Unable to attach volume"
self.device_id = res['deviceid']
self.device_path = "/dev/xvd" + \
str(string.ascii_lowercase[self.device_id])
def Detach(self):
"""Detaches the disk from a VM."""
self.cs.detach_volume(self.volume_id)
def _GetBestOfferingId(self, disk_size):
""" Given a disk_size (in GB), try to find a disk
offering that is atleast as big as the requested
one.
"""
disk_offerings = self.cs.list_disk_offerings()
sorted_do = sorted(disk_offerings, key=lambda x: x['disksize'])
for do in sorted_do:
if int(do['disksize']) >= disk_size:
return do['id']
return None
|
from datetime import datetime
from homeassistant.const import EVENT_HOMEASSISTANT_START
from homeassistant.core import CoreState, State
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.restore_state import (
DATA_RESTORE_STATE_TASK,
STORAGE_KEY,
RestoreEntity,
RestoreStateData,
StoredState,
)
from homeassistant.util import dt as dt_util
from tests.async_mock import patch
async def test_caching_data(hass):
"""Test that we cache data."""
now = dt_util.utcnow()
stored_states = [
StoredState(State("input_boolean.b0", "on"), now),
StoredState(State("input_boolean.b1", "on"), now),
StoredState(State("input_boolean.b2", "on"), now),
]
data = await RestoreStateData.async_get_instance(hass)
await hass.async_block_till_done()
await data.store.async_save([state.as_dict() for state in stored_states])
# Emulate a fresh load
hass.data[DATA_RESTORE_STATE_TASK] = None
entity = RestoreEntity()
entity.hass = hass
entity.entity_id = "input_boolean.b1"
# Mock that only b1 is present this run
with patch(
"homeassistant.helpers.restore_state.Store.async_save"
) as mock_write_data:
state = await entity.async_get_last_state()
await hass.async_block_till_done()
assert state is not None
assert state.entity_id == "input_boolean.b1"
assert state.state == "on"
assert mock_write_data.called
async def test_hass_starting(hass):
"""Test that we cache data."""
hass.state = CoreState.starting
now = dt_util.utcnow()
stored_states = [
StoredState(State("input_boolean.b0", "on"), now),
StoredState(State("input_boolean.b1", "on"), now),
StoredState(State("input_boolean.b2", "on"), now),
]
data = await RestoreStateData.async_get_instance(hass)
await hass.async_block_till_done()
await data.store.async_save([state.as_dict() for state in stored_states])
# Emulate a fresh load
hass.data[DATA_RESTORE_STATE_TASK] = None
entity = RestoreEntity()
entity.hass = hass
entity.entity_id = "input_boolean.b1"
# Mock that only b1 is present this run
states = [State("input_boolean.b1", "on")]
with patch(
"homeassistant.helpers.restore_state.Store.async_save"
) as mock_write_data, patch.object(hass.states, "async_all", return_value=states):
state = await entity.async_get_last_state()
await hass.async_block_till_done()
assert state is not None
assert state.entity_id == "input_boolean.b1"
assert state.state == "on"
# Assert that no data was written yet, since hass is still starting.
assert not mock_write_data.called
# Finish hass startup
with patch(
"homeassistant.helpers.restore_state.Store.async_save"
) as mock_write_data:
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
# Assert that this session states were written
assert mock_write_data.called
async def test_dump_data(hass):
"""Test that we cache data."""
states = [
State("input_boolean.b0", "on"),
State("input_boolean.b1", "on"),
State("input_boolean.b2", "on"),
State("input_boolean.b5", "unavailable", {"restored": True}),
]
entity = Entity()
entity.hass = hass
entity.entity_id = "input_boolean.b0"
await entity.async_internal_added_to_hass()
entity = RestoreEntity()
entity.hass = hass
entity.entity_id = "input_boolean.b1"
await entity.async_internal_added_to_hass()
data = await RestoreStateData.async_get_instance(hass)
now = dt_util.utcnow()
data.last_states = {
"input_boolean.b0": StoredState(State("input_boolean.b0", "off"), now),
"input_boolean.b1": StoredState(State("input_boolean.b1", "off"), now),
"input_boolean.b2": StoredState(State("input_boolean.b2", "off"), now),
"input_boolean.b3": StoredState(State("input_boolean.b3", "off"), now),
"input_boolean.b4": StoredState(
State("input_boolean.b4", "off"),
datetime(1985, 10, 26, 1, 22, tzinfo=dt_util.UTC),
),
"input_boolean.b5": StoredState(State("input_boolean.b5", "off"), now),
}
with patch(
"homeassistant.helpers.restore_state.Store.async_save"
) as mock_write_data, patch.object(hass.states, "async_all", return_value=states):
await data.async_dump_states()
assert mock_write_data.called
args = mock_write_data.mock_calls[0][1]
written_states = args[0]
# b0 should not be written, since it didn't extend RestoreEntity
# b1 should be written, since it is present in the current run
# b2 should not be written, since it is not registered with the helper
# b3 should be written, since it is still not expired
# b4 should not be written, since it is now expired
# b5 should be written, since current state is restored by entity registry
assert len(written_states) == 3
assert written_states[0]["state"]["entity_id"] == "input_boolean.b1"
assert written_states[0]["state"]["state"] == "on"
assert written_states[1]["state"]["entity_id"] == "input_boolean.b3"
assert written_states[1]["state"]["state"] == "off"
assert written_states[2]["state"]["entity_id"] == "input_boolean.b5"
assert written_states[2]["state"]["state"] == "off"
# Test that removed entities are not persisted
await entity.async_remove()
with patch(
"homeassistant.helpers.restore_state.Store.async_save"
) as mock_write_data, patch.object(hass.states, "async_all", return_value=states):
await data.async_dump_states()
assert mock_write_data.called
args = mock_write_data.mock_calls[0][1]
written_states = args[0]
assert len(written_states) == 2
assert written_states[0]["state"]["entity_id"] == "input_boolean.b3"
assert written_states[0]["state"]["state"] == "off"
assert written_states[1]["state"]["entity_id"] == "input_boolean.b5"
assert written_states[1]["state"]["state"] == "off"
async def test_dump_error(hass):
"""Test that we cache data."""
states = [
State("input_boolean.b0", "on"),
State("input_boolean.b1", "on"),
State("input_boolean.b2", "on"),
]
entity = Entity()
entity.hass = hass
entity.entity_id = "input_boolean.b0"
await entity.async_internal_added_to_hass()
entity = RestoreEntity()
entity.hass = hass
entity.entity_id = "input_boolean.b1"
await entity.async_internal_added_to_hass()
data = await RestoreStateData.async_get_instance(hass)
with patch(
"homeassistant.helpers.restore_state.Store.async_save",
side_effect=HomeAssistantError,
) as mock_write_data, patch.object(hass.states, "async_all", return_value=states):
await data.async_dump_states()
assert mock_write_data.called
async def test_load_error(hass):
"""Test that we cache data."""
entity = RestoreEntity()
entity.hass = hass
entity.entity_id = "input_boolean.b1"
with patch(
"homeassistant.helpers.storage.Store.async_load",
side_effect=HomeAssistantError,
):
state = await entity.async_get_last_state()
assert state is None
async def test_state_saved_on_remove(hass):
"""Test that we save entity state on removal."""
entity = RestoreEntity()
entity.hass = hass
entity.entity_id = "input_boolean.b0"
await entity.async_internal_added_to_hass()
now = dt_util.utcnow()
hass.states.async_set(
"input_boolean.b0", "on", {"complicated": {"value": {1, 2, now}}}
)
data = await RestoreStateData.async_get_instance(hass)
# No last states should currently be saved
assert not data.last_states
await entity.async_remove()
# We should store the input boolean state when it is removed
state = data.last_states["input_boolean.b0"].state
assert state.state == "on"
assert isinstance(state.attributes["complicated"]["value"], list)
assert set(state.attributes["complicated"]["value"]) == {1, 2, now.isoformat()}
async def test_restoring_invalid_entity_id(hass, hass_storage):
"""Test restoring invalid entity IDs."""
entity = RestoreEntity()
entity.hass = hass
entity.entity_id = "test.invalid__entity_id"
now = dt_util.utcnow().isoformat()
hass_storage[STORAGE_KEY] = {
"version": 1,
"key": STORAGE_KEY,
"data": [
{
"state": {
"entity_id": "test.invalid__entity_id",
"state": "off",
"attributes": {},
"last_changed": now,
"last_updated": now,
"context": {
"id": "3c2243ff5f30447eb12e7348cfd5b8ff",
"user_id": None,
},
},
"last_seen": dt_util.utcnow().isoformat(),
}
],
}
state = await entity.async_get_last_state()
assert state is None
|
import argparse
import sys
import time
from time import sleep
from typing import Optional
from typing import Sequence
from typing import Tuple
from paasta_tools import mesos_tools
from paasta_tools.frameworks.native_scheduler import create_driver
from paasta_tools.frameworks.native_scheduler import get_paasta_native_jobs_for_cluster
from paasta_tools.frameworks.native_scheduler import load_paasta_native_job_config
from paasta_tools.frameworks.native_scheduler import NativeScheduler
from paasta_tools.long_running_service_tools import load_service_namespace_config
from paasta_tools.long_running_service_tools import ServiceNamespaceConfig
from paasta_tools.utils import compose_job_id
from paasta_tools.utils import decompose_job_id
from paasta_tools.utils import DEFAULT_SOA_DIR
from paasta_tools.utils import load_system_paasta_config
from paasta_tools.utils import PaastaNotConfiguredError
def parse_args(argv):
parser = argparse.ArgumentParser(description="Runs native paasta mesos scheduler.")
parser.add_argument(
"-d", "--soa-dir", dest="soa_dir", metavar="SOA_DIR", default=DEFAULT_SOA_DIR
)
parser.add_argument(
"--stay-alive-seconds", dest="stay_alive_seconds", type=int, default=300
)
parser.add_argument(
"--periodic-interval", dest="periodic_interval", type=int, default=30
)
parser.add_argument(
"--staging-timeout", dest="staging_timeout", type=float, default=60
)
return parser.parse_args(argv)
def main(argv):
args = parse_args(argv)
system_paasta_config = load_system_paasta_config()
cluster = system_paasta_config.get_cluster()
drivers = []
schedulers = []
for service, instance in get_paasta_native_jobs_for_cluster(
cluster=cluster, soa_dir=args.soa_dir
):
scheduler = NativeScheduler(
service_name=service,
instance_name=instance,
cluster=cluster,
staging_timeout=args.staging_timeout,
system_paasta_config=system_paasta_config,
soa_dir=args.soa_dir,
)
schedulers.append(scheduler)
driver = create_driver(
framework_name="paasta_native %s" % compose_job_id(service, instance),
scheduler=scheduler,
system_paasta_config=system_paasta_config,
)
driver.start()
drivers.append(driver)
end_time = time.time() + args.stay_alive_seconds
while time.time() < end_time:
sleep(args.periodic_interval)
for scheduler, driver in zip(schedulers, drivers):
scheduler.periodic(driver)
return schedulers
def get_app_id_and_task_uuid_from_executor_id(executor_id):
"""Parse the paasta_native executor ID and return the (app id, task uuid)"""
return executor_id.rsplit(".", 1)
def parse_service_instance_from_executor_id(task_id):
app_id, task_uuid = get_app_id_and_task_uuid_from_executor_id(task_id)
(srv_name, srv_instance, _, __) = decompose_job_id(app_id)
return srv_name, srv_instance
def paasta_native_services_running_here(hostname=None, framework_id=None):
"""See what paasta_native services are being run by a mesos-slave on this host.
:returns: A list of triples of (service, instance, port)
:param hostname: query the mesos slave on this hostname.
:param framework_id: If specified, return info only for tasks belonging to this framework id.
"""
def framework_filter(fw):
return fw["name"].startswith("paasta_native ") and (
framework_id is None or fw["id"] == framework_id
)
return mesos_tools.mesos_services_running_here(
framework_filter=framework_filter,
parse_service_instance_from_executor_id=parse_service_instance_from_executor_id,
hostname=hostname,
)
def get_paasta_native_services_running_here_for_nerve(
cluster: Optional[str], soa_dir: str, hostname: Optional[str] = None
) -> Sequence[Tuple[str, ServiceNamespaceConfig]]:
if not cluster:
try:
system_paasta_config = load_system_paasta_config()
cluster = system_paasta_config.get_cluster()
# In the cases where there is *no* cluster or in the case
# where there isn't a Paasta configuration file at *all*, then
# there must be no native services running here, so we catch
# these custom exceptions and return [].
except (PaastaNotConfiguredError):
return []
if not system_paasta_config.get_register_native_services():
return []
# When a cluster is defined in mesos, let's iterate through paasta_native services
paasta_native_services = paasta_native_services_running_here(hostname=hostname)
nerve_list = []
for name, instance, port in paasta_native_services:
try:
job_config = load_paasta_native_job_config(
service=name,
instance=instance,
cluster=cluster,
load_deployments=False,
soa_dir=soa_dir,
)
for registration in job_config.get_registrations():
reg_service, reg_namespace, _, __ = decompose_job_id(registration)
nerve_dict = load_service_namespace_config(
service=reg_service, namespace=reg_namespace, soa_dir=soa_dir
)
if not nerve_dict.is_in_smartstack():
continue
nerve_dict["port"] = port
nerve_list.append((registration, nerve_dict))
except KeyError:
continue # SOA configs got deleted for this app, it'll get cleaned up
return nerve_list
if __name__ == "__main__":
main(sys.argv[1:])
|
from abc import ABC
import logging
from typing import Any, Dict, Optional
from pymodbus.exceptions import ConnectionException, ModbusException
from pymodbus.pdu import ExceptionResponse
import voluptuous as vol
from homeassistant.components.switch import PLATFORM_SCHEMA, SwitchEntity
from homeassistant.const import (
CONF_COMMAND_OFF,
CONF_COMMAND_ON,
CONF_NAME,
CONF_SLAVE,
STATE_ON,
)
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.entity import ToggleEntity
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
from . import ModbusHub
from .const import (
CALL_TYPE_COIL,
CALL_TYPE_REGISTER_HOLDING,
CALL_TYPE_REGISTER_INPUT,
CONF_COILS,
CONF_HUB,
CONF_REGISTER,
CONF_REGISTER_TYPE,
CONF_REGISTERS,
CONF_STATE_OFF,
CONF_STATE_ON,
CONF_VERIFY_REGISTER,
CONF_VERIFY_STATE,
DEFAULT_HUB,
MODBUS_DOMAIN,
)
_LOGGER = logging.getLogger(__name__)
REGISTERS_SCHEMA = vol.Schema(
{
vol.Required(CONF_COMMAND_OFF): cv.positive_int,
vol.Required(CONF_COMMAND_ON): cv.positive_int,
vol.Required(CONF_NAME): cv.string,
vol.Required(CONF_REGISTER): cv.positive_int,
vol.Optional(CONF_HUB, default=DEFAULT_HUB): cv.string,
vol.Optional(CONF_REGISTER_TYPE, default=CALL_TYPE_REGISTER_HOLDING): vol.In(
[CALL_TYPE_REGISTER_HOLDING, CALL_TYPE_REGISTER_INPUT]
),
vol.Optional(CONF_SLAVE): cv.positive_int,
vol.Optional(CONF_STATE_OFF): cv.positive_int,
vol.Optional(CONF_STATE_ON): cv.positive_int,
vol.Optional(CONF_VERIFY_REGISTER): cv.positive_int,
vol.Optional(CONF_VERIFY_STATE, default=True): cv.boolean,
}
)
COILS_SCHEMA = vol.Schema(
{
vol.Required(CALL_TYPE_COIL): cv.positive_int,
vol.Required(CONF_NAME): cv.string,
vol.Required(CONF_SLAVE): cv.positive_int,
vol.Optional(CONF_HUB, default=DEFAULT_HUB): cv.string,
}
)
PLATFORM_SCHEMA = vol.All(
cv.has_at_least_one_key(CONF_COILS, CONF_REGISTERS),
PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_COILS): [COILS_SCHEMA],
vol.Optional(CONF_REGISTERS): [REGISTERS_SCHEMA],
}
),
)
async def async_setup_platform(
hass: HomeAssistantType, config: ConfigType, async_add_entities, discovery_info=None
):
"""Read configuration and create Modbus switches."""
switches = []
if CONF_COILS in config:
for coil in config[CONF_COILS]:
hub: ModbusHub = hass.data[MODBUS_DOMAIN][coil[CONF_HUB]]
switches.append(ModbusCoilSwitch(hub, coil))
if CONF_REGISTERS in config:
for register in config[CONF_REGISTERS]:
hub: ModbusHub = hass.data[MODBUS_DOMAIN][register[CONF_HUB]]
switches.append(ModbusRegisterSwitch(hub, register))
async_add_entities(switches)
class ModbusBaseSwitch(ToggleEntity, RestoreEntity, ABC):
"""Base class representing a Modbus switch."""
def __init__(self, hub: ModbusHub, config: Dict[str, Any]):
"""Initialize the switch."""
self._hub: ModbusHub = hub
self._name = config[CONF_NAME]
self._slave = config.get(CONF_SLAVE)
self._is_on = None
self._available = True
async def async_added_to_hass(self):
"""Handle entity which will be added."""
state = await self.async_get_last_state()
if not state:
return
self._is_on = state.state == STATE_ON
@property
def is_on(self):
"""Return true if switch is on."""
return self._is_on
@property
def name(self):
"""Return the name of the switch."""
return self._name
@property
def available(self) -> bool:
"""Return True if entity is available."""
return self._available
class ModbusCoilSwitch(ModbusBaseSwitch, SwitchEntity):
"""Representation of a Modbus coil switch."""
def __init__(self, hub: ModbusHub, config: Dict[str, Any]):
"""Initialize the coil switch."""
super().__init__(hub, config)
self._coil = config[CALL_TYPE_COIL]
def turn_on(self, **kwargs):
"""Set switch on."""
self._write_coil(self._coil, True)
self._is_on = True
def turn_off(self, **kwargs):
"""Set switch off."""
self._write_coil(self._coil, False)
self._is_on = False
def update(self):
"""Update the state of the switch."""
self._is_on = self._read_coil(self._coil)
def _read_coil(self, coil) -> bool:
"""Read coil using the Modbus hub slave."""
try:
result = self._hub.read_coils(self._slave, coil, 1)
except ConnectionException:
self._available = False
return False
if isinstance(result, (ModbusException, ExceptionResponse)):
self._available = False
return False
self._available = True
# bits[0] select the lowest bit in result,
# is_on for a binary_sensor is true if the bit is 1
# The other bits are not considered.
return bool(result.bits[0] & 1)
def _write_coil(self, coil, value):
"""Write coil using the Modbus hub slave."""
try:
self._hub.write_coil(self._slave, coil, value)
except ConnectionException:
self._available = False
return
self._available = True
class ModbusRegisterSwitch(ModbusBaseSwitch, SwitchEntity):
"""Representation of a Modbus register switch."""
def __init__(self, hub: ModbusHub, config: Dict[str, Any]):
"""Initialize the register switch."""
super().__init__(hub, config)
self._register = config[CONF_REGISTER]
self._command_on = config[CONF_COMMAND_ON]
self._command_off = config[CONF_COMMAND_OFF]
self._state_on = config.get(CONF_STATE_ON, self._command_on)
self._state_off = config.get(CONF_STATE_OFF, self._command_off)
self._verify_state = config[CONF_VERIFY_STATE]
self._verify_register = config.get(CONF_VERIFY_REGISTER, self._register)
self._register_type = config[CONF_REGISTER_TYPE]
self._available = True
self._is_on = None
def turn_on(self, **kwargs):
"""Set switch on."""
# Only holding register is writable
if self._register_type == CALL_TYPE_REGISTER_HOLDING:
self._write_register(self._command_on)
if not self._verify_state:
self._is_on = True
def turn_off(self, **kwargs):
"""Set switch off."""
# Only holding register is writable
if self._register_type == CALL_TYPE_REGISTER_HOLDING:
self._write_register(self._command_off)
if not self._verify_state:
self._is_on = False
@property
def available(self) -> bool:
"""Return True if entity is available."""
return self._available
def update(self):
"""Update the state of the switch."""
if not self._verify_state:
return
value = self._read_register()
if value == self._state_on:
self._is_on = True
elif value == self._state_off:
self._is_on = False
elif value is not None:
_LOGGER.error(
"Unexpected response from hub %s, slave %s register %s, got 0x%2x",
self._hub.name,
self._slave,
self._register,
value,
)
def _read_register(self) -> Optional[int]:
try:
if self._register_type == CALL_TYPE_REGISTER_INPUT:
result = self._hub.read_input_registers(
self._slave, self._verify_register, 1
)
else:
result = self._hub.read_holding_registers(
self._slave, self._verify_register, 1
)
except ConnectionException:
self._available = False
return
if isinstance(result, (ModbusException, ExceptionResponse)):
self._available = False
return
self._available = True
return int(result.registers[0])
def _write_register(self, value):
"""Write holding register using the Modbus hub slave."""
try:
self._hub.write_register(self._slave, self._register, value)
except ConnectionException:
self._available = False
return
self._available = True
|
import ast
import os
import sys
# =================== check if run inside pythonista ===================
IN_PYTHONISTA = sys.executable.find('Pythonista') >= 0
if IN_PYTHONISTA:
print("It appears that you are running this file using the pythonista app.")
print("The setup.py file is intended for the installation on a PC.")
print("Please choose one of the following options:")
print("[1] run pythonista-specific installer")
print("[2] continue with setup")
print("[3] abort")
try:
v = int(input(">"))
except Exception:
v = None
if v == 1:
# pythonista install
cmd = "import requests as r; exec(r.get('https://bit.ly/get-stash').text)"
print('Executing: "' + cmd + '"')
exec(cmd)
sys.exit(0)
elif v == 2:
# continue
pass
else:
# exit
if v != 3:
print("Unknown input!")
print("Aborting...")
sys.exit(1)
# =================== SETUP ===================
from distutils.core import setup
from setuptools import find_packages
TEST_REQUIREMENTS = [
"pyparsing==2.0.1",
"pytest>=3.6.0",
"flake8>=3.5.0",
"pycrypto==2.6",
"requests==2.9.1",
]
PARENT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
STASH_DIR = os.path.dirname(os.path.abspath(__file__))
CORE_PATH = os.path.join(STASH_DIR, "core.py")
TO_IGNORE = [os.path.abspath(os.path.join(STASH_DIR, p)) for p in ("build", "dist")]
def get_package_data_files(directory, exclude=[]):
"""
Find data files recursibely.
Original version from: https://stackoverflow.com/questions/27664504/how-to-add-package-data-recursively-in-python-setup-py
:param directory: directory to search recursively
:type directory: str
:param exclude: list of absolute paths to ignore
:type exclude: list of str
:return: package data files to include
:rtype: list of str
"""
paths = []
for (path, directories, filenames) in os.walk(directory):
for filename in filenames:
fp = os.path.abspath(os.path.join('..', path, filename))
skip = False
for v in exclude:
if fp.startswith(v):
skip = True
if not skip:
paths.append(fp)
return paths
def get_stash_version(corepath):
"""
Find and return the current StaSh version.
:param corepath: path to the 'core.py' file in the StaSh root directory
:type corepath: str
:return: the StaSh version defined in the corepath
:rtype: str
"""
with open(corepath, "r") as fin:
for line in fin:
if line.startswith("__version__"):
version = ast.literal_eval(line.split("=")[1].strip())
return version
raise Exception("Could not find StaSh version in file '{f}'", f=corepath)
# before we start with the setup, we must be outside of the stash root path.
os.chdir(STASH_DIR)
print(STASH_DIR)
setup(
name="StaSh",
version=get_stash_version(CORE_PATH),
description="StaSh for PC",
author="https://github.com/ywangd and various contributors",
url="https://github.com/ywangd/stash/",
packages=[
"stash",
"stash.system",
"stash.system.shui",
"stash.lib",
],
package_dir={
"": STASH_DIR,
"stash": STASH_DIR,
},
package_data={
"": get_package_data_files(STASH_DIR, exclude=TO_IGNORE),
},
scripts=[os.path.join(STASH_DIR, "launch_stash.py")],
zip_safe=False,
install_requires=[
"six", # required by StaSh
"pyperclip", # required by libdist for copy/paste on PC
"requests",
"pyte",
],
extras_require={
"testing": TEST_REQUIREMENTS,
},
)
|
VERSION = '7.1.0'
long_description = '''\
ReText is simple text editor that supports Markdown and reStructuredText
markup languages. It is written in Python using PyQt libraries.
It supports live preview, tabs, math formulas, export to various formats
including PDF and HTML.
For more details, please go to the `home page`_ or to the `wiki`_.
.. _`home page`: https://github.com/retext-project/retext
.. _`wiki`: https://github.com/retext-project/retext/wiki'''
import os
import sys
from os.path import join, basename
from distutils import log
from distutils.command.build import build
from setuptools import setup, Command
from setuptools.command.sdist import sdist
from setuptools.command.install import install
from subprocess import check_call
from glob import glob, iglob
if sys.version_info[0] < 3:
sys.exit('Error: Python 3.x is required.')
def bundle_icons():
import urllib.request
import tarfile
from io import BytesIO
icons_tgz = 'https://github.com/retext-project/retext/archive/icons.tar.gz'
response = urllib.request.urlopen(icons_tgz)
tario = BytesIO(response.read())
tar = tarfile.open(fileobj=tario, mode='r')
for member in tar:
if member.isfile():
member.path = basename(member.path)
log.info('bundling icons/%s', member.path)
tar.extract(member, 'icons')
tar.close()
class retext_build_translations(Command):
description = 'Build .qm files from .ts files using lrelease'
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
environment = dict(os.environ, QT_SELECT='5')
for ts_file in glob(join('locale', '*.ts')):
try:
check_call(('lrelease', ts_file), env=environment)
except Exception as e:
log.warn('Failed to build translations: %s', e)
break
class retext_build(build):
sub_commands = build.sub_commands + [('build_translations', None)]
class retext_sdist(sdist):
def run(self):
self.run_command('build_translations')
bundle_icons()
sdist.run(self)
class retext_install(install):
def change_roots(self, *names):
self.orig_install_scripts = self.install_scripts
self.orig_install_data = self.install_data
install.change_roots(self, *names)
def run(self):
install.run(self)
if self.root is None:
self.orig_install_scripts = self.install_scripts
self.orig_install_data = self.install_data
elif self.root.endswith("/wheel"):
raise RuntimeError("Building wheels is disabled, because it breaks .desktop"
" files. See issues #452 and #497 for details.\n"
"If you are using pip, please ignore this error,"
" installation should still succeed.")
retext = join(self.orig_install_scripts, 'retext')
# Fix Exec and Icon fields in the desktop file
desktop_file_path = join(self.install_data, 'share', 'applications',
'me.mitya57.ReText.desktop')
icon_path = join(self.orig_install_data, 'share', 'retext', 'icons', 'retext.svg')
with open(desktop_file_path, encoding="utf-8") as desktop_file:
desktop_contents = desktop_file.read()
log.info('fixing Exec line in %s', desktop_file_path)
desktop_contents = desktop_contents.replace('Exec=retext', 'Exec=%s' % retext)
if self.orig_install_data != '/usr':
log.info('fixing Icon line in %s', desktop_file_path)
desktop_contents = desktop_contents.replace('Icon=retext', 'Icon=%s' % icon_path)
with open(desktop_file_path, 'w', encoding="utf-8") as desktop_file:
desktop_file.write(desktop_contents)
classifiers = [
'Development Status :: 5 - Production/Stable',
'Environment :: X11 Applications :: Qt',
'License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)',
'Programming Language :: Python :: 3 :: Only',
'Topic :: Text Editors',
'Topic :: Text Processing :: Markup'
]
setup(name='ReText',
version=VERSION,
description='Simple editor for Markdown and reStructuredText',
long_description=long_description,
author='Dmitry Shachnev',
author_email='[email protected]',
url='https://github.com/retext-project/retext',
packages=['ReText'],
entry_points={
'gui_scripts': ['retext = ReText.__main__:main'],
},
data_files=[
('share/applications', ['data/me.mitya57.ReText.desktop']),
('share/icons/hicolor/scalable/apps', ['icons/retext.svg']),
('share/metainfo', ['data/me.mitya57.ReText.appdata.xml']),
('share/retext/icons', iglob('icons/*')),
('share/retext/locale', iglob('locale/*.qm'))
],
python_requires='>=3.6',
requires=['docutils', 'Markdown', 'Markups(>=2.0)', 'pyenchant', 'Pygments', 'PyQt5'],
install_requires=[
'docutils',
'Markdown>=3.0',
'Markups>=2.0',
'Pygments',
'chardet>=2.3',
'PyQt5',
],
extras_require={
'spellcheck': ['pyenchant'],
},
cmdclass={
'build_translations': retext_build_translations,
'build': retext_build,
'sdist': retext_sdist,
'install': retext_install,
},
test_suite='tests',
classifiers=classifiers,
license='GPL 2+'
)
|
from distutils.version import LooseVersion
import numpy as np
import pandas as pd
import pytest
import xarray as xr
from xarray.core import formatting_html as fh
@pytest.fixture
def dataarray():
return xr.DataArray(np.random.RandomState(0).randn(4, 6))
@pytest.fixture
def dask_dataarray(dataarray):
pytest.importorskip("dask")
return dataarray.chunk()
@pytest.fixture
def multiindex():
mindex = pd.MultiIndex.from_product(
[["a", "b"], [1, 2]], names=("level_1", "level_2")
)
return xr.Dataset({}, {"x": mindex})
@pytest.fixture
def dataset():
times = pd.date_range("2000-01-01", "2001-12-31", name="time")
annual_cycle = np.sin(2 * np.pi * (times.dayofyear.values / 365.25 - 0.28))
base = 10 + 15 * annual_cycle.reshape(-1, 1)
tmin_values = base + 3 * np.random.randn(annual_cycle.size, 3)
tmax_values = base + 10 + 3 * np.random.randn(annual_cycle.size, 3)
return xr.Dataset(
{
"tmin": (("time", "location"), tmin_values),
"tmax": (("time", "location"), tmax_values),
},
{"time": times, "location": ["<IA>", "IN", "IL"]},
attrs={"description": "Test data."},
)
def test_short_data_repr_html(dataarray):
data_repr = fh.short_data_repr_html(dataarray)
assert data_repr.startswith("<pre>array")
def test_short_data_repr_html_non_str_keys(dataset):
ds = dataset.assign({2: lambda x: x["tmin"]})
fh.dataset_repr(ds)
def test_short_data_repr_html_dask(dask_dataarray):
import dask
if LooseVersion(dask.__version__) < "2.0.0":
assert not hasattr(dask_dataarray.data, "_repr_html_")
data_repr = fh.short_data_repr_html(dask_dataarray)
assert (
data_repr
== "dask.array<xarray-<this-array>, shape=(4, 6), dtype=float64, chunksize=(4, 6)>"
)
else:
assert hasattr(dask_dataarray.data, "_repr_html_")
data_repr = fh.short_data_repr_html(dask_dataarray)
assert data_repr == dask_dataarray.data._repr_html_()
def test_format_dims_no_dims():
dims, coord_names = {}, []
formatted = fh.format_dims(dims, coord_names)
assert formatted == ""
def test_format_dims_unsafe_dim_name():
dims, coord_names = {"<x>": 3, "y": 2}, []
formatted = fh.format_dims(dims, coord_names)
assert "<x>" in formatted
def test_format_dims_non_index():
dims, coord_names = {"x": 3, "y": 2}, ["time"]
formatted = fh.format_dims(dims, coord_names)
assert "class='xr-has-index'" not in formatted
def test_format_dims_index():
dims, coord_names = {"x": 3, "y": 2}, ["x"]
formatted = fh.format_dims(dims, coord_names)
assert "class='xr-has-index'" in formatted
def test_summarize_attrs_with_unsafe_attr_name_and_value():
attrs = {"<x>": 3, "y": "<pd.DataFrame>"}
formatted = fh.summarize_attrs(attrs)
assert "<dt><span><x> :</span></dt>" in formatted
assert "<dt><span>y :</span></dt>" in formatted
assert "<dd>3</dd>" in formatted
assert "<dd><pd.DataFrame></dd>" in formatted
def test_repr_of_dataarray(dataarray):
formatted = fh.array_repr(dataarray)
assert "dim_0" in formatted
# has an expanded data section
assert formatted.count("class='xr-array-in' type='checkbox' checked>") == 1
# coords and attrs don't have an items so they'll be be disabled and collapsed
assert (
formatted.count("class='xr-section-summary-in' type='checkbox' disabled >") == 2
)
def test_summary_of_multiindex_coord(multiindex):
idx = multiindex.x.variable.to_index_variable()
formatted = fh._summarize_coord_multiindex("foo", idx)
assert "(level_1, level_2)" in formatted
assert "MultiIndex" in formatted
assert "<span class='xr-has-index'>foo</span>" in formatted
def test_repr_of_multiindex(multiindex):
formatted = fh.dataset_repr(multiindex)
assert "(x)" in formatted
def test_repr_of_dataset(dataset):
formatted = fh.dataset_repr(dataset)
# coords, attrs, and data_vars are expanded
assert (
formatted.count("class='xr-section-summary-in' type='checkbox' checked>") == 3
)
assert "<U4" in formatted or ">U4" in formatted
assert "<IA>" in formatted
def test_repr_text_fallback(dataset):
formatted = fh.dataset_repr(dataset)
# Just test that the "pre" block used for fallback to plain text is present.
assert "<pre class='xr-text-repr-fallback'>" in formatted
def test_variable_repr_html():
v = xr.Variable(["time", "x"], [[1, 2, 3], [4, 5, 6]], {"foo": "bar"})
assert hasattr(v, "_repr_html_")
with xr.set_options(display_style="html"):
html = v._repr_html_().strip()
# We don't do a complete string identity since
# html output is probably subject to change, is long and... reasons.
# Just test that something reasonable was produced.
assert html.startswith("<div") and html.endswith("</div>")
assert "xarray.Variable" in html
|
import os.path as op
import pytest
import numpy as np
from numpy.testing import assert_allclose, assert_array_less
from mne.datasets.testing import data_path
from mne.io import read_raw_nirx
from mne.preprocessing.nirs import optical_density, scalp_coupling_index,\
beer_lambert_law
from mne.datasets import testing
fname_nirx_15_0 = op.join(data_path(download=False),
'NIRx', 'nirscout', 'nirx_15_0_recording')
fname_nirx_15_2 = op.join(data_path(download=False),
'NIRx', 'nirscout', 'nirx_15_2_recording')
fname_nirx_15_2_short = op.join(data_path(download=False),
'NIRx', 'nirscout',
'nirx_15_2_recording_w_short')
@testing.requires_testing_data
@pytest.mark.parametrize('fname', ([fname_nirx_15_2_short, fname_nirx_15_2,
fname_nirx_15_0]))
@pytest.mark.parametrize('fmt', ('nirx', 'fif'))
def test_scalp_coupling_index(fname, fmt, tmpdir):
"""Test converting NIRX files."""
assert fmt in ('nirx', 'fif')
raw = read_raw_nirx(fname).load_data()
with pytest.raises(RuntimeError, match='Scalp'):
scalp_coupling_index(raw)
raw = optical_density(raw)
sci = scalp_coupling_index(raw)
# All values should be between -1 and +1
assert_array_less(sci, 1.0)
assert_array_less(sci * -1.0, 1.0)
# Fill in some data with known correlation values
rng = np.random.RandomState(0)
new_data = rng.rand(raw._data[0].shape[0])
# Set first two channels to perfect correlation
raw._data[0] = new_data
raw._data[1] = new_data
# Set next two channels to perfect correlation
raw._data[2] = new_data
raw._data[3] = new_data * 0.3 # check scale invariance
# Set next two channels to anti correlation
raw._data[4] = new_data
raw._data[5] = new_data * -1.0
# Set next two channels to be uncorrelated
raw._data[6] = new_data
raw._data[7] = rng.rand(raw._data[0].shape[0])
# Check values
sci = scalp_coupling_index(raw)
assert_allclose(sci[0:6], [1, 1, 1, 1, -1, -1], atol=0.01)
assert np.abs(sci[6]) < 0.5
assert np.abs(sci[7]) < 0.5
# Ensure function errors if wrong type is passed in
raw = beer_lambert_law(raw)
with pytest.raises(RuntimeError, match='Scalp'):
scalp_coupling_index(raw)
|
from copy import deepcopy
import numpy as np
from scipy import linalg
from ..cov import Covariance, make_ad_hoc_cov
from ..forward.forward import is_fixed_orient, _restrict_forward_to_src_sel
from ..io.proj import make_projector, Projection
from ..minimum_norm.inverse import _get_vertno, _prepare_forward
from ..source_space import label_src_vertno_sel
from ..utils import (verbose, check_fname, _reg_pinv, _check_option, logger,
_pl, _check_src_normal, check_version, _sym_mat_pow, warn)
from ..time_frequency.csd import CrossSpectralDensity
from ..externals.h5io import read_hdf5, write_hdf5
def _check_proj_match(proj, filters):
"""Check whether SSP projections in data and spatial filter match."""
proj_data, _, _ = make_projector(proj, filters['ch_names'])
if not np.allclose(proj_data, filters['proj'],
atol=np.finfo(float).eps, rtol=1e-13):
raise ValueError('The SSP projections present in the data '
'do not match the projections used when '
'calculating the spatial filter.')
def _check_src_type(filters):
"""Check whether src_type is in filters and set custom warning."""
if 'src_type' not in filters:
filters['src_type'] = None
warn_text = ('The spatial filter does not contain src_type and a robust '
'guess of src_type is not possible without src. Consider '
'recomputing the filter.')
return filters, warn_text
def _prepare_beamformer_input(info, forward, label=None, pick_ori=None,
noise_cov=None, rank=None, pca=False, loose=None,
combine_xyz='fro', exp=None, limit=None,
allow_fixed_depth=True, limit_depth_chs=False):
"""Input preparation common for LCMV, DICS, and RAP-MUSIC."""
_check_option('pick_ori', pick_ori,
('normal', 'max-power', 'vector', None))
# Restrict forward solution to selected vertices
if label is not None:
_, src_sel = label_src_vertno_sel(label, forward['src'])
forward = _restrict_forward_to_src_sel(forward, src_sel)
if loose is None:
loose = 0. if is_fixed_orient(forward) else 1.
if noise_cov is None:
noise_cov = make_ad_hoc_cov(info, std=1.)
forward, info_picked, gain, _, orient_prior, _, trace_GRGT, noise_cov, \
whitener = _prepare_forward(
forward, info, noise_cov, 'auto', loose, rank=rank, pca=pca,
use_cps=True, exp=exp, limit_depth_chs=limit_depth_chs,
combine_xyz=combine_xyz, limit=limit,
allow_fixed_depth=allow_fixed_depth)
is_free_ori = not is_fixed_orient(forward) # could have been changed
nn = forward['source_nn']
if is_free_ori: # take Z coordinate
nn = nn[2::3]
nn = nn.copy()
vertno = _get_vertno(forward['src'])
if forward['surf_ori']:
nn[...] = [0, 0, 1] # align to local +Z coordinate
if pick_ori is not None and not is_free_ori:
raise ValueError(
'Normal or max-power orientation (got %r) can only be picked when '
'a forward operator with free orientation is used.' % (pick_ori,))
if pick_ori == 'normal' and not forward['surf_ori']:
raise ValueError('Normal orientation can only be picked when a '
'forward operator oriented in surface coordinates is '
'used.')
_check_src_normal(pick_ori, forward['src'])
del forward, info
# Undo the scaling that MNE prefers
scale = np.sqrt((noise_cov['eig'] > 0).sum() / trace_GRGT)
gain /= scale
if orient_prior is not None:
orient_std = np.sqrt(orient_prior)
else:
orient_std = np.ones(gain.shape[1])
# Get the projector
proj, _, _ = make_projector(
info_picked['projs'], info_picked['ch_names'])
return (is_free_ori, info_picked, proj, vertno, gain, whitener, nn,
orient_std)
def _reduce_leadfield_rank(G):
"""Reduce the rank of the leadfield."""
# decompose lead field
u, s, v = np.linalg.svd(G, full_matrices=False)
# backproject, omitting one direction (equivalent to setting the smallest
# singular value to zero)
G = np.matmul(u[:, :, :-1], s[:, :-1, np.newaxis] * v[:, :-1, :])
return G
def _sym_inv_sm(x, reduce_rank, inversion, sk):
"""Symmetric inversion with single- or matrix-style inversion."""
if x.shape[1:] == (1, 1):
with np.errstate(divide='ignore', invalid='ignore'):
x_inv = 1. / x
x_inv[~np.isfinite(x_inv)] = 1.
else:
assert x.shape[1:] == (3, 3)
if inversion == 'matrix':
x_inv = _sym_mat_pow(x, -1, reduce_rank=reduce_rank)
# Reapply source covariance after inversion
x_inv *= sk[:, :, np.newaxis]
x_inv *= sk[:, np.newaxis, :]
else:
# Invert for each dipole separately using plain division
diags = np.diagonal(x, axis1=1, axis2=2)
assert not reduce_rank # guaranteed earlier
with np.errstate(divide='ignore'):
diags = 1. / diags
# set the diagonal of each 3x3
x_inv = np.zeros_like(x)
for k in range(x.shape[0]):
this = diags[k]
# Reapply source covariance after inversion
this *= (sk[k] * sk[k])
x_inv[k].flat[::4] = this
return x_inv
def _compute_beamformer(G, Cm, reg, n_orient, weight_norm, pick_ori,
reduce_rank, rank, inversion, nn, orient_std,
whitener):
"""Compute a spatial beamformer filter (LCMV or DICS).
For more detailed information on the parameters, see the docstrings of
`make_lcmv` and `make_dics`.
Parameters
----------
G : ndarray, shape (n_dipoles, n_channels)
The leadfield.
Cm : ndarray, shape (n_channels, n_channels)
The data covariance matrix.
reg : float
Regularization parameter.
n_orient : int
Number of dipole orientations defined at each source point
weight_norm : None | 'unit-noise-gain' | 'nai'
The weight normalization scheme to use.
pick_ori : None | 'normal' | 'max-power'
The source orientation to compute the beamformer in.
reduce_rank : bool
Whether to reduce the rank by one during computation of the filter.
rank : dict | None | 'full' | 'info'
See compute_rank.
inversion : 'matrix' | 'single'
The inversion scheme to compute the weights.
nn : ndarray, shape (n_dipoles, 3)
The source normals.
orient_std : ndarray, shape (n_dipoles,)
The std of the orientation prior used in weighting the lead fields.
whitener : ndarray, shape (n_channels, n_channels)
The whitener.
Returns
-------
W : ndarray, shape (n_dipoles, n_channels)
The beamformer filter weights.
"""
_check_option('weight_norm', weight_norm,
['unit-noise-gain-invariant', 'unit-noise-gain',
'nai', None])
# Whiten the data covariance
Cm = whitener @ Cm @ whitener.T.conj()
# Restore to properly Hermitian as large whitening coefs can have bad
# rounding error
Cm[:] = (Cm + Cm.T.conj()) / 2.
assert Cm.shape == (G.shape[0],) * 2
s, _ = np.linalg.eigh(Cm)
if not (s >= -s.max() * 1e-7).all():
# This shouldn't ever happen, but just in case
warn('data covariance does not appear to be positive semidefinite, '
'results will likely be incorrect')
# Tikhonov regularization using reg parameter to control for
# trade-off between spatial resolution and noise sensitivity
# eq. 25 in Gross and Ioannides, 1999 Phys. Med. Biol. 44 2081
Cm_inv, loading_factor, rank = _reg_pinv(Cm, reg, rank)
assert orient_std.shape == (G.shape[1],)
n_sources = G.shape[1] // n_orient
assert nn.shape == (n_sources, 3)
logger.info('Computing beamformer filters for %d source%s'
% (n_sources, _pl(n_sources)))
n_channels = G.shape[0]
assert n_orient in (3, 1)
Gk = np.reshape(G.T, (n_sources, n_orient, n_channels)).transpose(0, 2, 1)
assert Gk.shape == (n_sources, n_channels, n_orient)
sk = np.reshape(orient_std, (n_sources, n_orient))
del G, orient_std
pinv_kwargs = dict()
if check_version('numpy', '1.17'):
pinv_kwargs['hermitian'] = True
_check_option('reduce_rank', reduce_rank, (True, False))
# inversion of the denominator
_check_option('inversion', inversion, ('matrix', 'single'))
if inversion == 'single' and n_orient > 1 and pick_ori == 'vector' and \
weight_norm == 'unit-noise-gain-invariant':
raise ValueError(
'Cannot use pick_ori="vector" with inversion="single" and '
'weight_norm="unit-noise-gain-invariant"')
if reduce_rank and inversion == 'single':
raise ValueError('reduce_rank cannot be used with inversion="single"; '
'consider using inversion="matrix" if you have a '
'rank-deficient forward model (i.e., from a sphere '
'model with MEG channels), otherwise consider using '
'reduce_rank=False')
if n_orient > 1:
_, Gk_s, _ = np.linalg.svd(Gk, full_matrices=False)
assert Gk_s.shape == (n_sources, n_orient)
if not reduce_rank and (Gk_s[:, 0] > 1e6 * Gk_s[:, 2]).any():
raise ValueError(
'Singular matrix detected when estimating spatial filters. '
'Consider reducing the rank of the forward operator by using '
'reduce_rank=True.')
del Gk_s
#
# 1. Reduce rank of the lead field
#
if reduce_rank:
Gk = _reduce_leadfield_rank(Gk)
def _compute_bf_terms(Gk, Cm_inv):
bf_numer = np.matmul(Gk.swapaxes(-2, -1).conj(), Cm_inv)
bf_denom = np.matmul(bf_numer, Gk)
return bf_numer, bf_denom
#
# 2. Reorient lead field in direction of max power or normal
#
if pick_ori == 'max-power':
assert n_orient == 3
_, bf_denom = _compute_bf_terms(Gk, Cm_inv)
if weight_norm is None:
ori_numer = np.eye(n_orient)[np.newaxis]
ori_denom = bf_denom
else:
# compute power, cf Sekihara & Nagarajan 2008, eq. 4.47
ori_numer = bf_denom
# Cm_inv should be Hermitian so no need for .T.conj()
ori_denom = np.matmul(
np.matmul(Gk.swapaxes(-2, -1).conj(), Cm_inv @ Cm_inv), Gk)
ori_denom_inv = _sym_inv_sm(ori_denom, reduce_rank, inversion, sk)
ori_pick = np.matmul(ori_denom_inv, ori_numer)
assert ori_pick.shape == (n_sources, n_orient, n_orient)
# pick eigenvector that corresponds to maximum eigenvalue:
eig_vals, eig_vecs = np.linalg.eig(ori_pick.real) # not Hermitian!
# sort eigenvectors by eigenvalues for picking:
order = np.argsort(np.abs(eig_vals), axis=-1)
# eig_vals = np.take_along_axis(eig_vals, order, axis=-1)
max_power_ori = eig_vecs[np.arange(len(eig_vecs)), :, order[:, -1]]
assert max_power_ori.shape == (n_sources, n_orient)
# set the (otherwise arbitrary) sign to match the normal
signs = np.sign(np.sum(max_power_ori * nn, axis=1, keepdims=True))
signs[signs == 0] = 1.
max_power_ori *= signs
# Compute the lead field for the optimal orientation,
# and adjust numer/denom
Gk = np.matmul(Gk, max_power_ori[..., np.newaxis])
n_orient = 1
else:
max_power_ori = None
if pick_ori == 'normal':
Gk = Gk[..., 2:3]
n_orient = 1
#
# 3. Compute numerator and denominator of beamformer formula (unit-gain)
#
bf_numer, bf_denom = _compute_bf_terms(Gk, Cm_inv)
assert bf_denom.shape == (n_sources,) + (n_orient,) * 2
assert bf_numer.shape == (n_sources, n_orient, n_channels)
del Gk # lead field has been adjusted and should not be used anymore
#
# 4. Invert the denominator
#
# Here W is W_ug, i.e.:
# G.T @ Cm_inv / (G.T @ Cm_inv @ G)
bf_denom_inv = _sym_inv_sm(bf_denom, reduce_rank, inversion, sk)
assert bf_denom_inv.shape == (n_sources, n_orient, n_orient)
W = np.matmul(bf_denom_inv, bf_numer)
assert W.shape == (n_sources, n_orient, n_channels)
del bf_denom_inv, sk
#
# 5. Re-scale filter weights according to the selected weight_norm
#
# Weight normalization is done by computing, for each source::
#
# W_ung = W_ug / sqrt(W_ug @ W_ug.T)
#
# with W_ung referring to the unit-noise-gain (weight normalized) filter
# and W_ug referring to the above-calculated unit-gain filter stored in W.
if weight_norm is not None:
# Three different ways to calculate the normalization factors here.
# Only matters when in vector mode, as otherwise n_orient == 1 and
# they are all equivalent. Sekihara 2008 says to use
#
# In MNE < 0.21, we just used the Frobenius matrix norm:
#
# noise_norm = np.linalg.norm(W, axis=(1, 2), keepdims=True)
# assert noise_norm.shape == (n_sources, 1, 1)
# W /= noise_norm
#
# Sekihara 2008 says to use sqrt(diag(W_ug @ W_ug.T)), which is not
# rotation invariant:
if weight_norm in ('unit-noise-gain', 'nai'):
noise_norm = np.matmul(W, W.swapaxes(-2, -1).conj()).real
noise_norm = np.reshape( # np.diag operation over last two axes
noise_norm, (n_sources, -1, 1))[:, ::n_orient + 1]
np.sqrt(noise_norm, out=noise_norm)
noise_norm[noise_norm == 0] = np.inf
assert noise_norm.shape == (n_sources, n_orient, 1)
W /= noise_norm
else:
assert weight_norm == 'unit-noise-gain-invariant'
# Here we use sqrtm. The shortcut:
#
# use = W
#
# ... does not match the direct route (it is rotated!), so we'll
# use the direct one to match FieldTrip:
use = bf_numer
inner = np.matmul(use, use.swapaxes(-2, -1).conj())
W = np.matmul(_sym_mat_pow(inner, -0.5), use)
noise_norm = 1.
if weight_norm == 'nai':
# Estimate noise level based on covariance matrix, taking the
# first eigenvalue that falls outside the signal subspace or the
# loading factor used during regularization, whichever is largest.
if rank > len(Cm):
# Covariance matrix is full rank, no noise subspace!
# Use the loading factor as noise ceiling.
if loading_factor == 0:
raise RuntimeError(
'Cannot compute noise subspace with a full-rank '
'covariance matrix and no regularization. Try '
'manually specifying the rank of the covariance '
'matrix or using regularization.')
noise = loading_factor
else:
noise, _ = linalg.eigh(Cm)
noise = noise[-rank]
noise = max(noise, loading_factor)
W /= np.sqrt(noise)
W = W.reshape(n_sources * n_orient, n_channels)
logger.info('Filter computation complete')
return W, max_power_ori
# TODO: Eventually we can @jit() this to make it faster
def _compute_power(Cm, W, n_orient):
"""Use beamformer filters to compute source power.
Parameters
----------
Cm : ndarray, shape (n_channels, n_channels)
Data covariance matrix or CSD matrix.
W : ndarray, shape (nvertices*norient, nchannels)
Beamformer weights.
Returns
-------
power : ndarray, shape (nvertices,)
Source power.
"""
n_sources = W.shape[0] // n_orient
source_power = np.zeros(n_sources)
for k in range(n_sources):
Wk = W[n_orient * k: n_orient * k + n_orient]
source_power[k] = np.trace(Wk @ Cm @ Wk.conj().T).real
return source_power
class Beamformer(dict):
"""A computed beamformer.
Notes
-----
.. versionadded:: 0.17
"""
def copy(self):
"""Copy the beamformer.
Returns
-------
beamformer : instance of Beamformer
A deep copy of the beamformer.
"""
return deepcopy(self)
def __repr__(self): # noqa: D105
n_verts = sum(len(v) for v in self['vertices'])
n_channels = len(self['ch_names'])
if self['subject'] is None:
subject = 'unknown'
else:
subject = '"%s"' % (self['subject'],)
out = ('<Beamformer | %s, subject %s, %s vert, %s ch'
% (self['kind'], subject, n_verts, n_channels))
if self['pick_ori'] is not None:
out += ', %s ori' % (self['pick_ori'],)
if self['weight_norm'] is not None:
out += ', %s norm' % (self['weight_norm'],)
if self.get('inversion') is not None:
out += ', %s inversion' % (self['inversion'],)
if 'rank' in self:
out += ', rank %s' % (self['rank'],)
out += '>'
return out
@verbose
def save(self, fname, overwrite=False, verbose=None):
"""Save the beamformer filter.
Parameters
----------
fname : str
The filename to use to write the HDF5 data.
Should end in ``'-lcmv.h5'`` or ``'-dics.h5'``.
overwrite : bool
If True, overwrite the file (if it exists).
%(verbose)s
"""
ending = '-%s.h5' % (self['kind'].lower(),)
check_fname(fname, self['kind'], (ending,))
csd_orig = None
try:
if 'csd' in self:
csd_orig = self['csd']
self['csd'] = self['csd'].__getstate__()
write_hdf5(fname, self, overwrite=overwrite, title='mnepython')
finally:
if csd_orig is not None:
self['csd'] = csd_orig
def read_beamformer(fname):
"""Read a beamformer filter.
Parameters
----------
fname : str
The filename of the HDF5 file.
Returns
-------
filter : instance of Beamformer
The beamformer filter.
"""
beamformer = read_hdf5(fname, title='mnepython')
if 'csd' in beamformer:
beamformer['csd'] = CrossSpectralDensity(**beamformer['csd'])
# h5io seems to cast `bool` to `int` on round-trip, probably a bug
# we should fix at some point (if possible -- could be HDF5 limitation)
for key in ('normalize_fwd', 'is_free_ori', 'is_ssp'):
if key in beamformer:
beamformer[key] = bool(beamformer[key])
for key in ('data_cov', 'noise_cov'):
if beamformer.get(key) is not None:
for pi, p in enumerate(beamformer[key]['projs']):
p = Projection(**p)
p['active'] = bool(p['active'])
beamformer[key]['projs'][pi] = p
beamformer[key] = Covariance(
*[beamformer[key].get(arg)
for arg in ('data', 'names', 'bads', 'projs', 'nfree', 'eig',
'eigvec', 'method', 'loglik')])
return Beamformer(beamformer)
def _proj_whiten_data(M, proj, filters):
if filters.get('is_ssp', True):
# check whether data and filter projs match
_check_proj_match(proj, filters)
if filters['whitener'] is None:
M = np.dot(filters['proj'], M)
if filters['whitener'] is not None:
M = np.dot(filters['whitener'], M)
return M
|
from flask import current_app
from lemur import database
from lemur.certificates.models import Certificate
from lemur.common.utils import truthiness
from lemur.notifications.models import Notification
def create_default_expiration_notifications(name, recipients, intervals=None):
"""
Will create standard 30, 10 and 2 day notifications for a given owner unless an alternate set of
intervals is supplied. If standard notifications already exist these will be returned instead of
new notifications.
:param name:
:param recipients:
:return:
"""
if not recipients:
return []
options = [
{
"name": "unit",
"type": "select",
"required": True,
"validation": "",
"available": ["days", "weeks", "months"],
"helpMessage": "Interval unit",
"value": "days",
},
{
"name": "recipients",
"type": "str",
"required": True,
"validation": r"^([\w+-.%]+@[\w-.]+\.[A-Za-z]{2,4},?)+$",
"helpMessage": "Comma delimited list of email addresses",
"value": ",".join(recipients),
},
]
if intervals is None:
intervals = current_app.config.get(
"LEMUR_DEFAULT_EXPIRATION_NOTIFICATION_INTERVALS", [30, 15, 2]
)
notifications = []
for i in intervals:
n = get_by_label("{name}_{interval}_DAY".format(name=name, interval=i))
if not n:
inter = [
{
"name": "interval",
"type": "int",
"required": True,
"validation": r"^\d+$",
"helpMessage": "Number of days to be alert before expiration.",
"value": i,
}
]
inter.extend(options)
n = create(
label="{name}_{interval}_DAY".format(name=name, interval=i),
plugin_name=current_app.config.get(
"LEMUR_DEFAULT_NOTIFICATION_PLUGIN", "email-notification"
),
options=list(inter),
description="Default {interval} day expiration notification".format(
interval=i
),
certificates=[],
)
notifications.append(n)
return notifications
def create(label, plugin_name, options, description, certificates):
"""
Creates a new notification.
:param label: Notification label
:param plugin_name:
:param options:
:param description:
:param certificates:
:rtype : Notification
:return:
"""
notification = Notification(
label=label, options=options, plugin_name=plugin_name, description=description
)
notification.certificates = certificates
return database.create(notification)
def update(notification_id, label, plugin_name, options, description, active, certificates):
"""
Updates an existing notification.
:param notification_id:
:param label: Notification label
:param plugin_name:
:param options:
:param description:
:param active:
:param certificates:
:rtype : Notification
:return:
"""
notification = get(notification_id)
notification.label = label
notification.plugin_name = plugin_name
notification.options = options
notification.description = description
notification.active = active
notification.certificates = certificates
return database.update(notification)
def delete(notification_id):
"""
Deletes an notification.
:param notification_id: Lemur assigned ID
"""
database.delete(get(notification_id))
def get(notification_id):
"""
Retrieves an notification by its lemur assigned ID.
:param notification_id: Lemur assigned ID
:rtype : Notification
:return:
"""
return database.get(Notification, notification_id)
def get_by_label(label):
"""
Retrieves a notification by its label
:param label:
:return:
"""
return database.get(Notification, label, field="label")
def get_all():
"""
Retrieves all notification currently known by Lemur.
:return:
"""
query = database.session_query(Notification)
return database.find_all(query, Notification, {}).all()
def render(args):
filt = args.pop("filter")
certificate_id = args.pop("certificate_id", None)
if certificate_id:
query = database.session_query(Notification).join(
Certificate, Notification.certificate
)
query = query.filter(Certificate.id == certificate_id)
else:
query = database.session_query(Notification)
if filt:
terms = filt.split(";")
if terms[0] == "active":
query = query.filter(Notification.active == truthiness(terms[1]))
else:
query = database.filter(query, Notification, terms)
return database.sort_and_page(query, Notification, args)
|
import unittest
import numpy as np
from chainer import testing
from chainercv.transforms import random_rotate
class TestRandomRotate(unittest.TestCase):
def test_random_rotate(self):
img = np.random.uniform(size=(3, 24, 24))
out, param = random_rotate(img, return_param=True)
k = param['k']
expected = np.transpose(img, axes=(1, 2, 0))
expected = np.rot90(expected, k)
expected = np.transpose(expected, axes=(2, 0, 1))
np.testing.assert_equal(out, expected)
testing.run_module(__name__, __file__)
|
from django.utils import timezone
from rest_framework import serializers
from shop.conf import app_settings
from shop.models.cart import CartModel
from shop.models.order import OrderModel
from shop.modifiers.pool import cart_modifiers_pool
from shop.rest.money import MoneyField
class OrderListSerializer(serializers.ModelSerializer):
number = serializers.CharField(
source='get_number',
read_only=True,
)
url = serializers.URLField(
source='get_absolute_url',
read_only=True,
)
status = serializers.CharField(
source='status_name',
read_only=True,
)
subtotal = MoneyField()
total = MoneyField()
class Meta:
model = OrderModel
fields = ['number', 'url', 'created_at', 'updated_at', 'subtotal', 'total', 'status',
'shipping_address_text', 'billing_address_text'] # TODO: these fields are not part of the base model
read_only_fields = ['shipping_address_text', 'billing_address_text']
class OrderDetailSerializer(OrderListSerializer):
items = app_settings.ORDER_ITEM_SERIALIZER(
many=True,
read_only=True,
)
extra = serializers.DictField(read_only=True)
amount_paid = MoneyField(read_only=True)
outstanding_amount = MoneyField(read_only=True)
cancelable = serializers.BooleanField(read_only=True)
is_partially_paid = serializers.SerializerMethodField(
method_name='get_partially_paid',
help_text="Returns true, if order has been partially paid",
)
annotation = serializers.CharField(
write_only=True,
required=False,
)
reorder = serializers.BooleanField(
write_only=True,
default=False,
)
cancel = serializers.BooleanField(
write_only=True,
default=False,
)
active_payment_method = serializers.SerializerMethodField()
active_shipping_method = serializers.SerializerMethodField()
class Meta:
model = OrderModel
exclude = ['id', 'customer', 'stored_request', '_subtotal', '_total']
read_only_fields = ['shipping_address_text', 'billing_address_text'] # TODO: not part of OrderBase
def get_partially_paid(self, order):
return order.amount_paid > 0
def get_active_payment_method(self, order):
modifier = cart_modifiers_pool.get_active_payment_modifier(order.extra.get('payment_modifier'))
value, label = modifier.get_choice() if modifier else (None, "")
return {'value': value, 'label': label}
def get_active_shipping_method(self, order):
modifier = cart_modifiers_pool.get_active_shipping_modifier(order.extra.get('shipping_modifier'))
value, label = modifier.get_choice() if modifier else (None, "")
return {'value': value, 'label': label}
def update(self, order, validated_data):
order.extra.setdefault('addendum', [])
if validated_data.get('annotation'):
timestamp = timezone.now().isoformat()
order.extra['addendum'].append((timestamp, validated_data['annotation']))
order.save()
if validated_data['reorder'] is True:
cart = CartModel.objects.get_from_request(self.context['request'])
order.readd_to_cart(cart)
if validated_data['cancel'] is True and order.cancelable():
order.cancel_order()
order.save(with_notification=True)
return order
|
from pysqueezebox import Server
from homeassistant import config_entries
from homeassistant.components.squeezebox.const import DOMAIN
from homeassistant.const import (
CONF_HOST,
CONF_PASSWORD,
CONF_PORT,
CONF_USERNAME,
HTTP_UNAUTHORIZED,
)
from homeassistant.data_entry_flow import (
RESULT_TYPE_ABORT,
RESULT_TYPE_CREATE_ENTRY,
RESULT_TYPE_FORM,
)
from tests.async_mock import patch
from tests.common import MockConfigEntry
HOST = "1.1.1.1"
HOST2 = "2.2.2.2"
PORT = 9000
UUID = "test-uuid"
UNKNOWN_ERROR = "1234"
async def mock_discover(_discovery_callback):
"""Mock discovering a Logitech Media Server."""
_discovery_callback(Server(None, HOST, PORT, uuid=UUID))
async def mock_failed_discover(_discovery_callback):
"""Mock unsuccessful discovery by doing nothing."""
async def patch_async_query_unauthorized(self, *args):
"""Mock an unauthorized query."""
self.http_status = HTTP_UNAUTHORIZED
return False
async def test_user_form(hass):
"""Test user-initiated flow, including discovery and the edit step."""
with patch("pysqueezebox.Server.async_query", return_value={"uuid": UUID},), patch(
"homeassistant.components.squeezebox.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.squeezebox.async_setup_entry",
return_value=True,
) as mock_setup_entry, patch(
"homeassistant.components.squeezebox.config_flow.async_discover", mock_discover
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == RESULT_TYPE_FORM
assert result["step_id"] == "edit"
assert CONF_HOST in result["data_schema"].schema
for key in result["data_schema"].schema:
if key == CONF_HOST:
assert key.description == {"suggested_value": HOST}
# test the edit step
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_HOST: HOST, CONF_PORT: PORT, CONF_USERNAME: "", CONF_PASSWORD: ""},
)
assert result["type"] == RESULT_TYPE_CREATE_ENTRY
assert result["title"] == HOST
assert result["data"] == {
CONF_HOST: HOST,
CONF_PORT: PORT,
CONF_USERNAME: "",
CONF_PASSWORD: "",
}
await hass.async_block_till_done()
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_user_form_timeout(hass):
"""Test we handle server search timeout."""
with patch(
"homeassistant.components.squeezebox.config_flow.async_discover",
mock_failed_discover,
), patch("homeassistant.components.squeezebox.config_flow.TIMEOUT", 0.1):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == RESULT_TYPE_FORM
assert result["errors"] == {"base": "no_server_found"}
# simulate manual input of host
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_HOST: HOST2}
)
assert result2["type"] == RESULT_TYPE_FORM
assert result2["step_id"] == "edit"
assert CONF_HOST in result2["data_schema"].schema
for key in result2["data_schema"].schema:
if key == CONF_HOST:
assert key.description == {"suggested_value": HOST2}
async def test_user_form_duplicate(hass):
"""Test duplicate discovered servers are skipped."""
with patch(
"homeassistant.components.squeezebox.config_flow.async_discover",
mock_discover,
), patch("homeassistant.components.squeezebox.config_flow.TIMEOUT", 0.1), patch(
"homeassistant.components.squeezebox.async_setup", return_value=True
), patch(
"homeassistant.components.squeezebox.async_setup_entry",
return_value=True,
):
entry = MockConfigEntry(domain=DOMAIN, unique_id=UUID)
await hass.config_entries.async_add(entry)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == RESULT_TYPE_FORM
assert result["errors"] == {"base": "no_server_found"}
async def test_form_invalid_auth(hass):
"""Test we handle invalid auth."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "edit"}
)
async def patch_async_query(self, *args):
self.http_status = HTTP_UNAUTHORIZED
return False
with patch("pysqueezebox.Server.async_query", new=patch_async_query):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_HOST: HOST,
CONF_PORT: PORT,
CONF_USERNAME: "test-username",
CONF_PASSWORD: "test-password",
},
)
assert result["type"] == RESULT_TYPE_FORM
assert result["errors"] == {"base": "invalid_auth"}
async def test_form_cannot_connect(hass):
"""Test we handle cannot connect error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "edit"}
)
with patch(
"pysqueezebox.Server.async_query",
return_value=False,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_HOST: HOST,
CONF_PORT: PORT,
CONF_USERNAME: "test-username",
CONF_PASSWORD: "test-password",
},
)
assert result["type"] == RESULT_TYPE_FORM
assert result["errors"] == {"base": "cannot_connect"}
async def test_discovery(hass):
"""Test handling of discovered server."""
with patch(
"pysqueezebox.Server.async_query",
return_value={"uuid": UUID},
):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_DISCOVERY},
data={CONF_HOST: HOST, CONF_PORT: PORT, "uuid": UUID},
)
assert result["type"] == RESULT_TYPE_FORM
assert result["step_id"] == "edit"
async def test_discovery_no_uuid(hass):
"""Test handling of discovered server with unavailable uuid."""
with patch("pysqueezebox.Server.async_query", new=patch_async_query_unauthorized):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_DISCOVERY},
data={CONF_HOST: HOST, CONF_PORT: PORT},
)
assert result["type"] == RESULT_TYPE_FORM
assert result["step_id"] == "edit"
async def test_import(hass):
"""Test handling of configuration imported."""
with patch("pysqueezebox.Server.async_query", return_value={"uuid": UUID},), patch(
"homeassistant.components.squeezebox.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.squeezebox.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={CONF_HOST: HOST, CONF_PORT: PORT},
)
assert result["type"] == RESULT_TYPE_CREATE_ENTRY
await hass.async_block_till_done()
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_import_bad_host(hass):
"""Test handling of configuration imported with bad host."""
with patch("pysqueezebox.Server.async_query", return_value=False):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={CONF_HOST: HOST, CONF_PORT: PORT},
)
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "cannot_connect"
async def test_import_bad_auth(hass):
"""Test handling of configuration import with bad authentication."""
with patch("pysqueezebox.Server.async_query", new=patch_async_query_unauthorized):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={
CONF_HOST: HOST,
CONF_PORT: PORT,
CONF_USERNAME: "test",
CONF_PASSWORD: "bad",
},
)
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "invalid_auth"
async def test_import_existing(hass):
"""Test handling of configuration import of existing server."""
with patch(
"homeassistant.components.squeezebox.async_setup", return_value=True
), patch(
"homeassistant.components.squeezebox.async_setup_entry",
return_value=True,
), patch(
"pysqueezebox.Server.async_query",
return_value={"ip": HOST, "uuid": UUID},
):
entry = MockConfigEntry(domain=DOMAIN, unique_id=UUID)
await hass.config_entries.async_add(entry)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={CONF_HOST: HOST, CONF_PORT: PORT},
)
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
|
import pytest
from shop.forms.checkout import ShippingAddressForm
from shop.views.address import AddressEditView
from shop.views.checkout import CheckoutViewSet
@pytest.mark.django_db
def test_customer_form(registered_customer, api_rf, empty_cart):
data = {
'customer': {
'salutation': "mr",
'first_name': "John",
'last_name': "Doe",
'email': "[email protected]",
'plugin_id': "1",
'plugin_order': "1",
},
}
request = api_rf.put('/shop/api/checkout/upload', data, format='json')
request.customer = registered_customer
response = CheckoutViewSet.as_view({'put': 'upload'})(request)
assert response.status_code == 200
assert registered_customer.salutation == data['customer']['salutation']
assert registered_customer.first_name == data['customer']['first_name']
assert registered_customer.last_name == data['customer']['last_name']
assert registered_customer.email == data['customer']['email']
@pytest.fixture
def address_data():
return {
'name': "John Doe",
'address1': "31 Orwell Road",
'zip_code': "L41RG",
'city': "Liverpool",
'country': "GB",
'plugin_id': "1",
'plugin_order': "1",
}
@pytest.mark.django_db
def test_new_shipping_address(registered_customer, api_rf, empty_cart):
"""
Check that clicking on the "Add new address" returns an empty address form.
"""
request = api_rf.get('/shop/api/shipping_address/add')
request.customer = registered_customer
request.user = registered_customer.user
response = AddressEditView.as_view(form_class=ShippingAddressForm)(request, priority='add')
assert response.status_code == 200
assert response.data['shipping_address_form']['name'] is None
assert response.data['shipping_address_form']['address1'] is None
assert response.data['shipping_address_form']['zip_code'] is None
assert response.data['shipping_address_form']['city'] is None
assert response.data['shipping_address_form']['country'] is None
@pytest.mark.django_db
def test_add_shipping_address(registered_customer, api_rf, empty_cart, address_data):
data = dict(shipping_address=address_data, active_priority='add')
request = api_rf.put('/shop/api/checkout/upload', data, format='json')
request.customer = registered_customer
request.user = registered_customer.user
assert registered_customer.shippingaddress_set.count() == 0
assert registered_customer.billingaddress_set.count() == 0
response = CheckoutViewSet.as_view({'put': 'upload'})(request)
assert response.status_code == 200
assert response.data['shipping_address_form']['name'] == address_data['name']
label = "1. John Doe – 31 Orwell Road – L41RG Liverpool – United Kingdom"
assert response.data['shipping_address_form']['siblings_summary'][0]['label'] == label
registered_customer.refresh_from_db()
assert registered_customer.billingaddress_set.count() == 0
shipping_address = registered_customer.shippingaddress_set.first()
assert shipping_address
assert shipping_address.name == address_data['name']
assert shipping_address.address1 == address_data['address1']
assert shipping_address.zip_code == address_data['zip_code']
assert shipping_address.city == address_data['city']
assert shipping_address.country == address_data['country']
@pytest.mark.django_db
def test_delete_shipping_address(registered_customer, api_rf, empty_cart, shipping_address_factory):
assert registered_customer.shippingaddress_set.count() == 0
registered_customer.shippingaddress_set.add(shipping_address_factory.create(customer=registered_customer))
registered_customer.shippingaddress_set.add(shipping_address_factory.create(customer=registered_customer))
assert registered_customer.shippingaddress_set.count() == 2
first_priority = registered_customer.shippingaddress_set.first().priority
last_priority = registered_customer.shippingaddress_set.last().priority
assert first_priority != last_priority
request = api_rf.delete('/shop/api/shipping_address/1')
request.customer = registered_customer
request.user = registered_customer.user
response = AddressEditView.as_view(form_class=ShippingAddressForm)(request, priority=first_priority)
assert response.status_code == 200
assert registered_customer.shippingaddress_set.count() == 1
assert registered_customer.shippingaddress_set.first().priority == last_priority
@pytest.mark.django_db
def test_delete_last_shipping_address(registered_customer, api_rf, empty_cart, shipping_address_factory):
registered_customer.shippingaddress_set.add(shipping_address_factory.create(customer=registered_customer))
assert registered_customer.shippingaddress_set.count() == 1
request = api_rf.delete('/shop/api/shipping_address/1')
request.customer = registered_customer
request.user = registered_customer.user
priority = registered_customer.shippingaddress_set.first().priority
response = AddressEditView.as_view(form_class=ShippingAddressForm)(request, priority=priority)
assert response.status_code == 410
assert registered_customer.shippingaddress_set.count() == 0
@pytest.mark.django_db
def test_change_shipping_address(registered_customer, api_rf, empty_cart, address_data):
data = dict(shipping_address=address_data, active_priotity=1)
request = api_rf.put('/shop/api/checkout/upload', data, format='json')
request.customer = registered_customer
response = CheckoutViewSet.as_view({'put': 'upload'})(request)
assert response.status_code == 200
shipping_address = registered_customer.shippingaddress_set.first()
assert shipping_address.id == registered_customer.cart.shipping_address.id
assert shipping_address.name == address_data['name']
assert shipping_address.address1 == address_data['address1']
assert shipping_address.zip_code == address_data['zip_code']
assert shipping_address.city == address_data['city']
assert shipping_address.country == address_data['country']
assert registered_customer.billingaddress_set.first() is None
@pytest.mark.django_db
def test_select_shipping_address(registered_customer, api_rf, empty_cart, shipping_address_factory):
assert registered_customer.shippingaddress_set.count() == 0
address1 = shipping_address_factory.create(customer=registered_customer)
registered_customer.shippingaddress_set.add(address1)
address2 = shipping_address_factory.create(customer=registered_customer)
registered_customer.shippingaddress_set.add(address2)
assert registered_customer.shippingaddress_set.count() == 2
first_priority = registered_customer.shippingaddress_set.first().priority
last_priority = registered_customer.shippingaddress_set.last().priority
assert first_priority != last_priority
request = api_rf.get('/shop/api/shipping_address/0')
request.customer = registered_customer
request.user = registered_customer.user
response = AddressEditView.as_view(form_class=ShippingAddressForm)(request, priority=first_priority)
assert response.status_code == 200
assert response.data['shipping_address_form']['name'] == address1.name
assert response.data['shipping_address_form']['address1'] == address1.address1
assert response.data['shipping_address_form']['zip_code'] == address1.zip_code
assert response.data['shipping_address_form']['city'] == address1.city
assert response.data['shipping_address_form']['country'] == address1.country
data = dict(shipping_address=response.data['shipping_address_form'])
data['shipping_address']['plugin_order'] = 1
request = api_rf.put('/shop/api/shipping_address/0', data, format='json')
request.customer = registered_customer
request.user = registered_customer.user
response = AddressEditView.as_view(form_class=ShippingAddressForm)(request)
assert response.status_code == 200
@pytest.mark.django_db
def test_use_shipping_address_for_billing(registered_customer, api_rf, empty_cart, address_data):
data = {
'shipping_address': dict(address_data, plugin_order=1, active_priority='add'),
'billing_address': {
'active_priority': 'add',
'use_primary_address': True,
'plugin_order': 2,
},
}
request = api_rf.put('/shop/api/checkout/upload', data, format='json')
request.customer = registered_customer
response = CheckoutViewSet.as_view({'put': 'upload'})(request)
assert response.status_code == 200
shipping_address = registered_customer.shippingaddress_set.first()
assert shipping_address is not None
billing_address = registered_customer.billingaddress_set.first()
assert billing_address is None
request = api_rf.get('/shop/api/checkout/digest')
request.customer = registered_customer
response = CheckoutViewSet.as_view({'get': 'digest'})(request)
assert response.status_code == 200
assert response.data['checkout_digest']['billing_address_tag'] == "Use shipping address for billing"
|
import os
import sys
import logging
import argparse
import threading
import time
from queue import Queue
import Pyro4
from gensim import utils
logger = logging.getLogger(__name__)
# How many jobs (=chunks of N documents) to keep "pre-fetched" in a queue?
# A small number is usually enough, unless iteration over the corpus is very very
# slow (slower than the actual computation of LSI), in which case you can override
# this value from command line. ie. run "python ./lsi_dispatcher.py 100"
MAX_JOBS_QUEUE = 10
# timeout for the Queue object put/get blocking methods.
# it should really be infinity, but then keyboard interrupts don't work.
# so this is really just a hack, see http://bugs.python.org/issue1360
HUGE_TIMEOUT = 365 * 24 * 60 * 60 # one year
class Dispatcher:
"""Dispatcher object that communicates and coordinates individual workers.
Warnings
--------
There should never be more than one dispatcher running at any one time.
"""
def __init__(self, maxsize=0):
"""Partly initialize the dispatcher.
A full initialization (including initialization of the workers) requires a call to
:meth:`~gensim.models.lsi_dispatcher.Dispatcher.initialize`
Parameters
----------
maxsize : int, optional
Maximum number of jobs to be kept pre-fetched in the queue.
"""
self.maxsize = maxsize
self.workers = {}
self.callback = None # a pyro proxy to this object (unknown at init time, but will be set later)
@Pyro4.expose
def initialize(self, **model_params):
"""Fully initialize the dispatcher and all its workers.
Parameters
----------
**model_params
Keyword parameters used to initialize individual workers
(gets handed all the way down to :meth:`gensim.models.lsi_worker.Worker.initialize`).
See :class:`~gensim.models.lsimodel.LsiModel`.
Raises
------
RuntimeError
When no workers are found (the :mod:`gensim.model.lsi_worker` script must be ran beforehand).
"""
self.jobs = Queue(maxsize=self.maxsize)
self.lock_update = threading.Lock()
self._jobsdone = 0
self._jobsreceived = 0
# locate all available workers and store their proxies, for subsequent RMI calls
self.workers = {}
with utils.getNS() as ns:
self.callback = Pyro4.Proxy('PYRONAME:gensim.lsi_dispatcher') # = self
for name, uri in ns.list(prefix='gensim.lsi_worker').items():
try:
worker = Pyro4.Proxy(uri)
workerid = len(self.workers)
# make time consuming methods work asynchronously
logger.info("registering worker #%i from %s", workerid, uri)
worker.initialize(workerid, dispatcher=self.callback, **model_params)
self.workers[workerid] = worker
except Pyro4.errors.PyroError:
logger.exception("unresponsive worker at %s, deleting it from the name server", uri)
ns.remove(name)
if not self.workers:
raise RuntimeError('no workers found; run some lsi_worker scripts on your machines first!')
@Pyro4.expose
def getworkers(self):
"""Get pyro URIs of all registered workers.
Returns
-------
list of URIs
The pyro URIs for each worker.
"""
return [worker._pyroUri for worker in self.workers.values()]
@Pyro4.expose
def getjob(self, worker_id):
"""Atomically pop a job from the queue.
Parameters
----------
worker_id : int
The worker that requested the job.
Returns
-------
iterable of iterable of (int, float)
The corpus in BoW format.
"""
logger.info("worker #%i requesting a new job", worker_id)
job = self.jobs.get(block=True, timeout=1)
logger.info("worker #%i got a new job (%i left)", worker_id, self.jobs.qsize())
return job
@Pyro4.expose
def putjob(self, job):
"""Atomically add a job to the queue.
Parameters
----------
job : iterable of list of (int, float)
The corpus in BoW format.
"""
self._jobsreceived += 1
self.jobs.put(job, block=True, timeout=HUGE_TIMEOUT)
logger.info("added a new job (len(queue)=%i items)", self.jobs.qsize())
@Pyro4.expose
def getstate(self):
"""Merge projections from across all workers and get the final projection.
Returns
-------
:class:`~gensim.models.lsimodel.Projection`
The current projection of the total model.
"""
logger.info("end of input, assigning all remaining jobs")
logger.debug("jobs done: %s, jobs received: %s", self._jobsdone, self._jobsreceived)
while self._jobsdone < self._jobsreceived:
time.sleep(0.5) # check every half a second
# TODO: merge in parallel, so that we're done in `log_2(workers)` merges,
# and not `workers - 1` merges!
# but merging only takes place once, after all input data has been processed,
# so the overall effect would be small... compared to the amount of coding :-)
logger.info("merging states from %i workers", len(self.workers))
workers = list(self.workers.items())
result = workers[0][1].getstate()
for workerid, worker in workers[1:]:
logger.info("pulling state from worker %s", workerid)
result.merge(worker.getstate())
logger.info("sending out merged projection")
return result
@Pyro4.expose
def reset(self):
"""Re-initialize all workers for a new decomposition."""
for workerid, worker in self.workers.items():
logger.info("resetting worker %s", workerid)
worker.reset()
worker.requestjob()
self._jobsdone = 0
self._jobsreceived = 0
@Pyro4.expose
@Pyro4.oneway
@utils.synchronous('lock_update')
def jobdone(self, workerid):
"""A worker has finished its job. Log this event and then asynchronously transfer control back to the worker.
Callback used by workers to notify when their job is done.
The job done event is logged and then control is asynchronously transfered back to the worker
(who can then request another job). In this way, control flow basically oscillates between
:meth:`gensim.models.lsi_dispatcher.Dispatcher.jobdone` and :meth:`gensim.models.lsi_worker.Worker.requestjob`.
Parameters
----------
workerid : int
The ID of the worker that finished the job (used for logging).
"""
self._jobsdone += 1
logger.info("worker #%s finished job #%i", workerid, self._jobsdone)
worker = self.workers[workerid]
worker.requestjob() # tell the worker to ask for another job, asynchronously (one-way)
def jobsdone(self):
"""Wrap :attr:`~gensim.models.lsi_dispatcher.Dispatcher._jobsdone`, needed for remote access through proxies.
Returns
-------
int
Number of jobs already completed.
"""
return self._jobsdone
@Pyro4.oneway
def exit(self):
"""Terminate all registered workers and then the dispatcher."""
for workerid, worker in self.workers.items():
logger.info("terminating worker %s", workerid)
worker.exit()
logger.info("terminating dispatcher")
os._exit(0) # exit the whole process (not just this thread ala sys.exit())
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s - %(levelname)s - %(message)s', level=logging.INFO)
parser = argparse.ArgumentParser(description=__doc__[:-135], formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument(
'maxsize', type=int, help='Maximum number of jobs to be kept pre-fetched in the queue.', default=MAX_JOBS_QUEUE
)
args = parser.parse_args()
logger.info("running %s", " ".join(sys.argv))
utils.pyro_daemon('gensim.lsi_dispatcher', Dispatcher(maxsize=args.maxsize))
logger.info("finished running %s", parser.prog)
|
from asyncio import Future
from homeassistant.components.group.reproduce_state import async_reproduce_states
from homeassistant.core import Context, State
from tests.async_mock import patch
async def test_reproduce_group(hass):
"""Test reproduce_state with group."""
context = Context()
def clone_state(state, entity_id):
"""Return a cloned state with different entity_id."""
return State(
entity_id,
state.state,
state.attributes,
last_changed=state.last_changed,
last_updated=state.last_updated,
context=state.context,
)
with patch(
"homeassistant.components.group.reproduce_state.async_reproduce_state"
) as fun:
fun.return_value = Future()
fun.return_value.set_result(None)
hass.states.async_set(
"group.test",
"off",
{"entity_id": ["light.test1", "light.test2", "switch.test1"]},
)
hass.states.async_set("light.test1", "off")
hass.states.async_set("light.test2", "off")
hass.states.async_set("switch.test1", "off")
state = State("group.test", "on")
await async_reproduce_states(hass, [state], context=context)
fun.assert_called_once_with(
hass,
[
clone_state(state, "light.test1"),
clone_state(state, "light.test2"),
clone_state(state, "switch.test1"),
],
context=context,
reproduce_options=None,
)
|
import copy
import json
import pytest
from homeassistant.components import switch
from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON
import homeassistant.core as ha
from homeassistant.setup import async_setup_component
from .test_common import (
help_test_availability_when_connection_lost,
help_test_availability_without_topic,
help_test_custom_availability_payload,
help_test_default_availability_payload,
help_test_discovery_broken,
help_test_discovery_removal,
help_test_discovery_update,
help_test_discovery_update_attr,
help_test_discovery_update_unchanged,
help_test_entity_debug_info_message,
help_test_entity_device_info_remove,
help_test_entity_device_info_update,
help_test_entity_device_info_with_connection,
help_test_entity_device_info_with_identifier,
help_test_entity_id_update_discovery_update,
help_test_entity_id_update_subscriptions,
help_test_setting_attribute_via_mqtt_json_message,
help_test_setting_attribute_with_template,
help_test_unique_id,
help_test_update_with_json_attrs_bad_JSON,
help_test_update_with_json_attrs_not_dict,
)
from tests.async_mock import patch
from tests.common import async_fire_mqtt_message
from tests.components.switch import common
DEFAULT_CONFIG = {
switch.DOMAIN: {"platform": "mqtt", "name": "test", "command_topic": "test-topic"}
}
async def test_controlling_state_via_topic(hass, mqtt_mock):
"""Test the controlling state via topic."""
assert await async_setup_component(
hass,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"payload_on": 1,
"payload_off": 0,
}
},
)
await hass.async_block_till_done()
state = hass.states.get("switch.test")
assert state.state == STATE_OFF
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "state-topic", "1")
state = hass.states.get("switch.test")
assert state.state == STATE_ON
async_fire_mqtt_message(hass, "state-topic", "0")
state = hass.states.get("switch.test")
assert state.state == STATE_OFF
async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock):
"""Test the sending MQTT commands in optimistic mode."""
fake_state = ha.State("switch.test", "on")
with patch(
"homeassistant.helpers.restore_state.RestoreEntity.async_get_last_state",
return_value=fake_state,
):
assert await async_setup_component(
hass,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "mqtt",
"name": "test",
"command_topic": "command-topic",
"payload_on": "beer on",
"payload_off": "beer off",
"qos": "2",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("switch.test")
assert state.state == STATE_ON
assert state.attributes.get(ATTR_ASSUMED_STATE)
await common.async_turn_on(hass, "switch.test")
mqtt_mock.async_publish.assert_called_once_with(
"command-topic", "beer on", 2, False
)
mqtt_mock.async_publish.reset_mock()
state = hass.states.get("switch.test")
assert state.state == STATE_ON
await common.async_turn_off(hass, "switch.test")
mqtt_mock.async_publish.assert_called_once_with(
"command-topic", "beer off", 2, False
)
state = hass.states.get("switch.test")
assert state.state == STATE_OFF
async def test_controlling_state_via_topic_and_json_message(hass, mqtt_mock):
"""Test the controlling state via topic and JSON message."""
assert await async_setup_component(
hass,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"payload_on": "beer on",
"payload_off": "beer off",
"value_template": "{{ value_json.val }}",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("switch.test")
assert state.state == STATE_OFF
async_fire_mqtt_message(hass, "state-topic", '{"val":"beer on"}')
state = hass.states.get("switch.test")
assert state.state == STATE_ON
async_fire_mqtt_message(hass, "state-topic", '{"val":"beer off"}')
state = hass.states.get("switch.test")
assert state.state == STATE_OFF
async def test_availability_when_connection_lost(hass, mqtt_mock):
"""Test availability after MQTT disconnection."""
await help_test_availability_when_connection_lost(
hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG
)
async def test_availability_without_topic(hass, mqtt_mock):
"""Test availability without defined availability topic."""
await help_test_availability_without_topic(
hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG
)
async def test_default_availability_payload(hass, mqtt_mock):
"""Test availability by default payload with defined topic."""
config = {
switch.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"payload_on": 1,
"payload_off": 0,
}
}
await help_test_default_availability_payload(
hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1"
)
async def test_custom_availability_payload(hass, mqtt_mock):
"""Test availability by custom payload with defined topic."""
config = {
switch.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"payload_on": 1,
"payload_off": 0,
}
}
await help_test_custom_availability_payload(
hass, mqtt_mock, switch.DOMAIN, config, True, "state-topic", "1"
)
async def test_custom_state_payload(hass, mqtt_mock):
"""Test the state payload."""
assert await async_setup_component(
hass,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"payload_on": 1,
"payload_off": 0,
"state_on": "HIGH",
"state_off": "LOW",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("switch.test")
assert state.state == STATE_OFF
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "state-topic", "HIGH")
state = hass.states.get("switch.test")
assert state.state == STATE_ON
async_fire_mqtt_message(hass, "state-topic", "LOW")
state = hass.states.get("switch.test")
assert state.state == STATE_OFF
async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock):
"""Test the setting of attribute via MQTT with JSON payload."""
await help_test_setting_attribute_via_mqtt_json_message(
hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG
)
async def test_setting_attribute_with_template(hass, mqtt_mock):
"""Test the setting of attribute via MQTT with JSON payload."""
await help_test_setting_attribute_with_template(
hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG
)
async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog):
"""Test attributes get extracted from a JSON result."""
await help_test_update_with_json_attrs_not_dict(
hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG
)
async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog):
"""Test attributes get extracted from a JSON result."""
await help_test_update_with_json_attrs_bad_JSON(
hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG
)
async def test_discovery_update_attr(hass, mqtt_mock, caplog):
"""Test update of discovered MQTTAttributes."""
await help_test_discovery_update_attr(
hass, mqtt_mock, caplog, switch.DOMAIN, DEFAULT_CONFIG
)
async def test_unique_id(hass, mqtt_mock):
"""Test unique id option only creates one switch per unique_id."""
config = {
switch.DOMAIN: [
{
"platform": "mqtt",
"name": "Test 1",
"state_topic": "test-topic",
"command_topic": "command-topic",
"unique_id": "TOTALLY_UNIQUE",
},
{
"platform": "mqtt",
"name": "Test 2",
"state_topic": "test-topic",
"command_topic": "command-topic",
"unique_id": "TOTALLY_UNIQUE",
},
]
}
await help_test_unique_id(hass, mqtt_mock, switch.DOMAIN, config)
async def test_discovery_removal_switch(hass, mqtt_mock, caplog):
"""Test removal of discovered switch."""
data = (
'{ "name": "test",'
' "state_topic": "test_topic",'
' "command_topic": "test_topic" }'
)
await help_test_discovery_removal(hass, mqtt_mock, caplog, switch.DOMAIN, data)
async def test_discovery_update_switch_topic_template(hass, mqtt_mock, caplog):
"""Test update of discovered switch."""
config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN])
config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN])
config1["name"] = "Beer"
config2["name"] = "Milk"
config1["state_topic"] = "switch/state1"
config2["state_topic"] = "switch/state2"
config1["value_template"] = "{{ value_json.state1.state }}"
config2["value_template"] = "{{ value_json.state2.state }}"
state_data1 = [
([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None),
]
state_data2 = [
([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None),
([("switch/state2", '{"state2":{"state":"ON"}}')], "on", None),
([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None),
([("switch/state1", '{"state2":{"state":"OFF"}}')], "on", None),
([("switch/state2", '{"state1":{"state":"OFF"}}')], "on", None),
([("switch/state2", '{"state2":{"state":"OFF"}}')], "off", None),
]
data1 = json.dumps(config1)
data2 = json.dumps(config2)
await help_test_discovery_update(
hass,
mqtt_mock,
caplog,
switch.DOMAIN,
data1,
data2,
state_data1=state_data1,
state_data2=state_data2,
)
async def test_discovery_update_switch_template(hass, mqtt_mock, caplog):
"""Test update of discovered switch."""
config1 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN])
config2 = copy.deepcopy(DEFAULT_CONFIG[switch.DOMAIN])
config1["name"] = "Beer"
config2["name"] = "Milk"
config1["state_topic"] = "switch/state1"
config2["state_topic"] = "switch/state1"
config1["value_template"] = "{{ value_json.state1.state }}"
config2["value_template"] = "{{ value_json.state2.state }}"
state_data1 = [
([("switch/state1", '{"state1":{"state":"ON"}}')], "on", None),
]
state_data2 = [
([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None),
([("switch/state1", '{"state2":{"state":"ON"}}')], "on", None),
([("switch/state1", '{"state1":{"state":"OFF"}}')], "on", None),
([("switch/state1", '{"state2":{"state":"OFF"}}')], "off", None),
]
data1 = json.dumps(config1)
data2 = json.dumps(config2)
await help_test_discovery_update(
hass,
mqtt_mock,
caplog,
switch.DOMAIN,
data1,
data2,
state_data1=state_data1,
state_data2=state_data2,
)
async def test_discovery_update_unchanged_switch(hass, mqtt_mock, caplog):
"""Test update of discovered switch."""
data1 = (
'{ "name": "Beer",'
' "state_topic": "test_topic",'
' "command_topic": "test_topic" }'
)
with patch(
"homeassistant.components.mqtt.switch.MqttSwitch.discovery_update"
) as discovery_update:
await help_test_discovery_update_unchanged(
hass, mqtt_mock, caplog, switch.DOMAIN, data1, discovery_update
)
@pytest.mark.no_fail_on_log_exception
async def test_discovery_broken(hass, mqtt_mock, caplog):
"""Test handling of bad discovery message."""
data1 = '{ "name": "Beer" }'
data2 = (
'{ "name": "Milk",'
' "state_topic": "test_topic",'
' "command_topic": "test_topic" }'
)
await help_test_discovery_broken(
hass, mqtt_mock, caplog, switch.DOMAIN, data1, data2
)
async def test_entity_device_info_with_connection(hass, mqtt_mock):
"""Test MQTT switch device registry integration."""
await help_test_entity_device_info_with_connection(
hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG
)
async def test_entity_device_info_with_identifier(hass, mqtt_mock):
"""Test MQTT switch device registry integration."""
await help_test_entity_device_info_with_identifier(
hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG
)
async def test_entity_device_info_update(hass, mqtt_mock):
"""Test device registry update."""
await help_test_entity_device_info_update(
hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG
)
async def test_entity_device_info_remove(hass, mqtt_mock):
"""Test device registry remove."""
await help_test_entity_device_info_remove(
hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG
)
async def test_entity_id_update_subscriptions(hass, mqtt_mock):
"""Test MQTT subscriptions are managed when entity_id is updated."""
await help_test_entity_id_update_subscriptions(
hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG
)
async def test_entity_id_update_discovery_update(hass, mqtt_mock):
"""Test MQTT discovery update when entity_id is updated."""
await help_test_entity_id_update_discovery_update(
hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG
)
async def test_entity_debug_info_message(hass, mqtt_mock):
"""Test MQTT debug info."""
await help_test_entity_debug_info_message(
hass, mqtt_mock, switch.DOMAIN, DEFAULT_CONFIG
)
|
from collections import defaultdict
from typing import Callable
from typing import Dict
_autoscaling_components: Dict[str, Dict[str, Callable]] = defaultdict(dict)
def register_autoscaling_component(name, method_type):
def outer(autoscaling_method):
_autoscaling_components[method_type][name] = autoscaling_method
return autoscaling_method
return outer
def get_autoscaling_component(name, method_type):
return _autoscaling_components[method_type][name]
|
import urwid
class Pudding(urwid.Widget):
_sizing = frozenset(['flow'])
def rows(self, size, focus=False):
return 1
def render(self, size, focus=False):
(maxcol,) = size
num_pudding = maxcol / len("Pudding")
return urwid.TextCanvas(["Pudding" * num_pudding], maxcol=maxcol)
class BoxPudding(urwid.Widget):
_sizing = frozenset(['box'])
def render(self, size, focus=False):
(maxcol, maxrow) = size
num_pudding = maxcol / len("Pudding")
return urwid.TextCanvas(["Pudding" * num_pudding] * maxrow,
maxcol=maxcol)
|
import logging
import secrets
from typing import Optional
from toonapi import Status, Toon, ToonError
from homeassistant.components.webhook import (
async_register as webhook_register,
async_unregister as webhook_unregister,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_WEBHOOK_ID, EVENT_HOMEASSISTANT_STOP
from homeassistant.core import Event, HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.config_entry_oauth2_flow import OAuth2Session
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import CONF_CLOUDHOOK_URL, DEFAULT_SCAN_INTERVAL, DOMAIN
_LOGGER = logging.getLogger(__name__)
class ToonDataUpdateCoordinator(DataUpdateCoordinator[Status]):
"""Class to manage fetching Toon data from single endpoint."""
def __init__(
self, hass: HomeAssistant, *, entry: ConfigEntry, session: OAuth2Session
):
"""Initialize global Toon data updater."""
self.session = session
self.entry = entry
async def async_token_refresh() -> str:
await session.async_ensure_token_valid()
return session.token["access_token"]
self.toon = Toon(
token=session.token["access_token"],
session=async_get_clientsession(hass),
token_refresh_method=async_token_refresh,
)
super().__init__(
hass, _LOGGER, name=DOMAIN, update_interval=DEFAULT_SCAN_INTERVAL
)
def update_listeners(self) -> None:
"""Call update on all listeners."""
for update_callback in self._listeners:
update_callback()
async def register_webhook(self, event: Optional[Event] = None) -> None:
"""Register a webhook with Toon to get live updates."""
if CONF_WEBHOOK_ID not in self.entry.data:
data = {**self.entry.data, CONF_WEBHOOK_ID: secrets.token_hex()}
self.hass.config_entries.async_update_entry(self.entry, data=data)
if self.hass.components.cloud.async_active_subscription():
if CONF_CLOUDHOOK_URL not in self.entry.data:
webhook_url = await self.hass.components.cloud.async_create_cloudhook(
self.entry.data[CONF_WEBHOOK_ID]
)
data = {**self.entry.data, CONF_CLOUDHOOK_URL: webhook_url}
self.hass.config_entries.async_update_entry(self.entry, data=data)
else:
webhook_url = self.entry.data[CONF_CLOUDHOOK_URL]
else:
webhook_url = self.hass.components.webhook.async_generate_url(
self.entry.data[CONF_WEBHOOK_ID]
)
# Ensure the webhook is not registered already
webhook_unregister(self.hass, self.entry.data[CONF_WEBHOOK_ID])
webhook_register(
self.hass,
DOMAIN,
"Toon",
self.entry.data[CONF_WEBHOOK_ID],
self.handle_webhook,
)
try:
await self.toon.subscribe_webhook(
application_id=self.entry.entry_id, url=webhook_url
)
_LOGGER.info("Registered Toon webhook: %s", webhook_url)
except ToonError as err:
_LOGGER.error("Error during webhook registration - %s", err)
self.hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_STOP, self.unregister_webhook
)
async def handle_webhook(
self, hass: HomeAssistant, webhook_id: str, request
) -> None:
"""Handle webhook callback."""
try:
data = await request.json()
except ValueError:
return
_LOGGER.debug("Got webhook data: %s", data)
# Webhook expired notification, re-register
if data.get("code") == 510:
await self.register_webhook()
return
if (
"updateDataSet" not in data
or "commonName" not in data
or self.data.agreement.display_common_name != data["commonName"]
):
_LOGGER.warning("Received invalid data from Toon webhook - %s", data)
return
try:
await self.toon.update(data["updateDataSet"])
self.update_listeners()
except ToonError as err:
_LOGGER.error("Could not process data received from Toon webhook - %s", err)
async def unregister_webhook(self, event: Optional[Event] = None) -> None:
"""Remove / Unregister webhook for toon."""
_LOGGER.debug(
"Unregistering Toon webhook (%s)", self.entry.data[CONF_WEBHOOK_ID]
)
try:
await self.toon.unsubscribe_webhook(self.entry.entry_id)
except ToonError as err:
_LOGGER.error("Failed unregistering Toon webhook - %s", err)
webhook_unregister(self.hass, self.entry.data[CONF_WEBHOOK_ID])
async def _async_update_data(self) -> Status:
"""Fetch data from Toon."""
try:
return await self.toon.update()
except ToonError as error:
raise UpdateFailed(f"Invalid response from API: {error}") from error
|
from homeassistant.components.binary_sensor import BinarySensorEntity
from homeassistant.const import CONF_NAME
from . import PiHoleEntity
from .const import DATA_KEY_API, DATA_KEY_COORDINATOR, DOMAIN as PIHOLE_DOMAIN
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up the Pi-hole binary sensor."""
name = entry.data[CONF_NAME]
hole_data = hass.data[PIHOLE_DOMAIN][entry.entry_id]
binary_sensors = [
PiHoleBinarySensor(
hole_data[DATA_KEY_API],
hole_data[DATA_KEY_COORDINATOR],
name,
entry.entry_id,
)
]
async_add_entities(binary_sensors, True)
class PiHoleBinarySensor(PiHoleEntity, BinarySensorEntity):
"""Representation of a Pi-hole binary sensor."""
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unique_id(self):
"""Return the unique id of the sensor."""
return f"{self._server_unique_id}/Status"
@property
def is_on(self):
"""Return if the service is on."""
return self.api.data.get("status") == "enabled"
|
from plumbum.commands.processes import CommandNotFound
from plumbum.commands.processes import ProcessExecutionError
from plumbum.commands.processes import ProcessTimedOut
class PopenAddons(object):
"""This adds a verify to popen objects to that the correct command is attributed when
an error is thrown."""
def verify(self, retcode, timeout, stdout, stderr):
"""This verifies that the correct command is attributed."""
if getattr(self, "_timed_out", False):
raise ProcessTimedOut(
"Process did not terminate within %s seconds" % (timeout, ),
getattr(self, "argv", None))
if retcode is not None:
if hasattr(retcode, "__contains__"):
if self.returncode not in retcode:
raise ProcessExecutionError(
getattr(self, "argv", None), self.returncode, stdout,
stderr)
elif self.returncode != retcode:
raise ProcessExecutionError(
getattr(self, "argv", None), self.returncode, stdout,
stderr)
class BaseMachine(object):
"""This is a base class for other machines. It contains common code to
all machines in Plumbum."""
def get(self, cmd, *othercommands):
"""This works a little like the ``.get`` method with dict's, only
it supports an unlimited number of arguments, since later arguments
are tried as commands and could also fail. It
will try to call the first command, and if that is not found,
it will call the next, etc. Will raise if no file named for the
executable if a path is given, unlike ``[]`` access.
Usage::
best_zip = local.get('pigz','gzip')
"""
try:
command = self[cmd]
if not command.executable.exists():
raise CommandNotFound(cmd, command.executable)
else:
return command
except CommandNotFound:
if othercommands:
return self.get(othercommands[0], *othercommands[1:])
else:
raise
def __contains__(self, cmd):
"""Tests for the existance of the command, e.g., ``"ls" in plumbum.local``.
``cmd`` can be anything acceptable by ``__getitem__``.
"""
try:
self[cmd]
except CommandNotFound:
return False
else:
return True
@property
def encoding(self):
'This is a wrapper for custom_encoding'
return self.custom_encoding
@encoding.setter
def encoding(self, value):
self.custom_encoding = value
def daemonic_popen(self,
command,
cwd="/",
stdout=None,
stderr=None,
append=True):
raise NotImplementedError("This is not implemented on this machine!")
class Cmd(object):
def __init__(self, machine):
self._machine = machine
def __getattr__(self, name):
try:
return self._machine[name]
except CommandNotFound:
raise AttributeError(name)
@property
def cmd(self):
return self.Cmd(self)
|
from typing import List
import voluptuous as vol
from homeassistant.const import CONF_DEVICE_ID, CONF_DOMAIN, CONF_TYPE
from homeassistant.core import Context, HomeAssistant
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.typing import ConfigType, TemplateVarsType
from . import DOMAIN
from .api import SERVICE_WARNING_DEVICE_SQUAWK, SERVICE_WARNING_DEVICE_WARN
from .core.const import CHANNEL_IAS_WD
from .core.helpers import async_get_zha_device
ACTION_SQUAWK = "squawk"
ACTION_WARN = "warn"
ATTR_DATA = "data"
ATTR_IEEE = "ieee"
CONF_ZHA_ACTION_TYPE = "zha_action_type"
ZHA_ACTION_TYPE_SERVICE_CALL = "service_call"
ACTION_SCHEMA = cv.DEVICE_ACTION_BASE_SCHEMA.extend(
{vol.Required(CONF_DOMAIN): DOMAIN, vol.Required(CONF_TYPE): str}
)
DEVICE_ACTIONS = {
CHANNEL_IAS_WD: [
{CONF_TYPE: ACTION_SQUAWK, CONF_DOMAIN: DOMAIN},
{CONF_TYPE: ACTION_WARN, CONF_DOMAIN: DOMAIN},
]
}
DEVICE_ACTION_TYPES = {
ACTION_SQUAWK: ZHA_ACTION_TYPE_SERVICE_CALL,
ACTION_WARN: ZHA_ACTION_TYPE_SERVICE_CALL,
}
SERVICE_NAMES = {
ACTION_SQUAWK: SERVICE_WARNING_DEVICE_SQUAWK,
ACTION_WARN: SERVICE_WARNING_DEVICE_WARN,
}
async def async_call_action_from_config(
hass: HomeAssistant,
config: ConfigType,
variables: TemplateVarsType,
context: Context,
) -> None:
"""Perform an action based on configuration."""
await ZHA_ACTION_TYPES[DEVICE_ACTION_TYPES[config[CONF_TYPE]]](
hass, config, variables, context
)
async def async_get_actions(hass: HomeAssistant, device_id: str) -> List[dict]:
"""List device actions."""
try:
zha_device = await async_get_zha_device(hass, device_id)
except (KeyError, AttributeError):
return []
cluster_channels = [
ch.name
for pool in zha_device.channels.pools
for ch in pool.claimed_channels.values()
]
actions = [
action
for channel in DEVICE_ACTIONS
for action in DEVICE_ACTIONS[channel]
if channel in cluster_channels
]
for action in actions:
action[CONF_DEVICE_ID] = device_id
return actions
async def _execute_service_based_action(
hass: HomeAssistant,
config: ACTION_SCHEMA,
variables: TemplateVarsType,
context: Context,
) -> None:
action_type = config[CONF_TYPE]
service_name = SERVICE_NAMES[action_type]
try:
zha_device = await async_get_zha_device(hass, config[CONF_DEVICE_ID])
except (KeyError, AttributeError):
return
service_data = {ATTR_IEEE: str(zha_device.ieee)}
await hass.services.async_call(
DOMAIN, service_name, service_data, blocking=True, context=context
)
ZHA_ACTION_TYPES = {ZHA_ACTION_TYPE_SERVICE_CALL: _execute_service_based_action}
|
import pickle
import random
from collections import deque
from copy import copy as shallow_copy
import pytest
from markupsafe import Markup
from jinja2.utils import consume
from jinja2.utils import generate_lorem_ipsum
from jinja2.utils import LRUCache
from jinja2.utils import missing
from jinja2.utils import object_type_repr
from jinja2.utils import select_autoescape
from jinja2.utils import urlize
class TestLRUCache:
def test_simple(self):
d = LRUCache(3)
d["a"] = 1
d["b"] = 2
d["c"] = 3
d["a"]
d["d"] = 4
assert len(d) == 3
assert "a" in d and "c" in d and "d" in d and "b" not in d
def test_itervalues(self):
cache = LRUCache(3)
cache["b"] = 1
cache["a"] = 2
values = [v for v in cache.values()]
assert len(values) == 2
assert 1 in values
assert 2 in values
def test_itervalues_empty(self):
cache = LRUCache(2)
values = [v for v in cache.values()]
assert len(values) == 0
def test_pickleable(self):
cache = LRUCache(2)
cache["foo"] = 42
cache["bar"] = 23
cache["foo"]
for protocol in range(3):
copy = pickle.loads(pickle.dumps(cache, protocol))
assert copy.capacity == cache.capacity
assert copy._mapping == cache._mapping
assert copy._queue == cache._queue
@pytest.mark.parametrize("copy_func", [LRUCache.copy, shallow_copy])
def test_copy(self, copy_func):
cache = LRUCache(2)
cache["a"] = 1
cache["b"] = 2
copy = copy_func(cache)
assert copy._queue == cache._queue
copy["c"] = 3
assert copy._queue != cache._queue
assert "a" not in copy and "b" in copy and "c" in copy
def test_clear(self):
d = LRUCache(3)
d["a"] = 1
d["b"] = 2
d["c"] = 3
d.clear()
assert d.__getstate__() == {"capacity": 3, "_mapping": {}, "_queue": deque([])}
def test_repr(self):
d = LRUCache(3)
d["a"] = 1
d["b"] = 2
d["c"] = 3
# Sort the strings - mapping is unordered
assert sorted(repr(d)) == sorted("<LRUCache {'a': 1, 'b': 2, 'c': 3}>")
def test_items(self):
"""Test various items, keys, values and iterators of LRUCache."""
d = LRUCache(3)
d["a"] = 1
d["b"] = 2
d["c"] = 3
assert d.items() == [("c", 3), ("b", 2), ("a", 1)]
assert d.keys() == ["c", "b", "a"]
assert d.values() == [3, 2, 1]
assert list(reversed(d)) == ["a", "b", "c"]
# Change the cache a little
d["b"]
d["a"] = 4
assert d.items() == [("a", 4), ("b", 2), ("c", 3)]
assert d.keys() == ["a", "b", "c"]
assert d.values() == [4, 2, 3]
assert list(reversed(d)) == ["c", "b", "a"]
def test_setdefault(self):
d = LRUCache(3)
assert len(d) == 0
assert d.setdefault("a") is None
assert d.setdefault("a", 1) is None
assert len(d) == 1
assert d.setdefault("b", 2) == 2
assert len(d) == 2
class TestHelpers:
def test_object_type_repr(self):
class X:
pass
assert object_type_repr(42) == "int object"
assert object_type_repr([]) == "list object"
assert object_type_repr(X()) == "test_utils.X object"
assert object_type_repr(None) == "None"
assert object_type_repr(Ellipsis) == "Ellipsis"
def test_autoescape_select(self):
func = select_autoescape(
enabled_extensions=("html", ".htm"),
disabled_extensions=("txt",),
default_for_string="STRING",
default="NONE",
)
assert func(None) == "STRING"
assert func("unknown.foo") == "NONE"
assert func("foo.html")
assert func("foo.htm")
assert not func("foo.txt")
assert func("FOO.HTML")
assert not func("FOO.TXT")
class TestEscapeUrlizeTarget:
def test_escape_urlize_target(self):
url = "http://example.org"
target = "<script>"
assert urlize(url, target=target) == (
'<a href="http://example.org"'
' target="<script>">'
"http://example.org</a>"
)
class TestLoremIpsum:
def test_lorem_ipsum_markup(self):
"""Test that output of lorem_ipsum is Markup by default."""
assert isinstance(generate_lorem_ipsum(), Markup)
def test_lorem_ipsum_html(self):
"""Test that output of lorem_ipsum is a string_type when not html."""
assert isinstance(generate_lorem_ipsum(html=False), str)
def test_lorem_ipsum_n(self):
"""Test that the n (number of lines) works as expected."""
assert generate_lorem_ipsum(n=0, html=False) == ""
for n in range(1, 50):
assert generate_lorem_ipsum(n=n, html=False).count("\n") == (n - 1) * 2
def test_lorem_ipsum_min(self):
"""Test that at least min words are in the output of each line"""
for _ in range(5):
m = random.randrange(20, 99)
for _ in range(10):
assert generate_lorem_ipsum(n=1, min=m, html=False).count(" ") >= m - 1
def test_lorem_ipsum_max(self):
"""Test that at least max words are in the output of each line"""
for _ in range(5):
m = random.randrange(21, 100)
for _ in range(10):
assert generate_lorem_ipsum(n=1, max=m, html=False).count(" ") < m - 1
def test_missing():
"""Test the repr of missing."""
assert repr(missing) == "missing"
def test_consume():
"""Test that consume consumes an iterator."""
x = iter([1, 2, 3, 4, 5])
consume(x)
with pytest.raises(StopIteration):
next(x)
|
from httplib2 import HTTPConnectionWithTimeout, HTTPSConnectionWithTimeout
from ..stubs import VCRHTTPConnection, VCRHTTPSConnection
class VCRHTTPConnectionWithTimeout(VCRHTTPConnection, HTTPConnectionWithTimeout):
_baseclass = HTTPConnectionWithTimeout
def __init__(self, *args, **kwargs):
"""I overrode the init because I need to clean kwargs before calling
HTTPConnection.__init__."""
# Delete the keyword arguments that HTTPConnection would not recognize
safe_keys = {"host", "port", "strict", "timeout", "source_address"}
unknown_keys = set(kwargs.keys()) - safe_keys
safe_kwargs = kwargs.copy()
for kw in unknown_keys:
del safe_kwargs[kw]
self.proxy_info = kwargs.pop("proxy_info", None)
VCRHTTPConnection.__init__(self, *args, **safe_kwargs)
self.sock = self.real_connection.sock
class VCRHTTPSConnectionWithTimeout(VCRHTTPSConnection, HTTPSConnectionWithTimeout):
_baseclass = HTTPSConnectionWithTimeout
def __init__(self, *args, **kwargs):
# Delete the keyword arguments that HTTPSConnection would not recognize
safe_keys = {
"host",
"port",
"key_file",
"cert_file",
"strict",
"timeout",
"source_address",
"ca_certs",
"disable_ssl_certificate_validation",
}
unknown_keys = set(kwargs.keys()) - safe_keys
safe_kwargs = kwargs.copy()
for kw in unknown_keys:
del safe_kwargs[kw]
self.proxy_info = kwargs.pop("proxy_info", None)
if "ca_certs" not in kwargs or kwargs["ca_certs"] is None:
try:
import httplib2
self.ca_certs = httplib2.CA_CERTS
except ImportError:
self.ca_certs = None
else:
self.ca_certs = kwargs["ca_certs"]
self.disable_ssl_certificate_validation = kwargs.pop("disable_ssl_certificate_validation", None)
VCRHTTPSConnection.__init__(self, *args, **safe_kwargs)
self.sock = self.real_connection.sock
|
import logging
import aiohttp
from homeassistant.components.light import SUPPORT_BRIGHTNESS, LightEntity
from homeassistant.const import CONF_HOST
from homeassistant.exceptions import PlatformNotReady
from . import DATA_SISYPHUS
_LOGGER = logging.getLogger(__name__)
SUPPORTED_FEATURES = SUPPORT_BRIGHTNESS
async def async_setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up a single Sisyphus table."""
host = discovery_info[CONF_HOST]
try:
table_holder = hass.data[DATA_SISYPHUS][host]
table = await table_holder.get_table()
except aiohttp.ClientError as err:
raise PlatformNotReady() from err
add_entities([SisyphusLight(table_holder.name, table)], update_before_add=True)
class SisyphusLight(LightEntity):
"""Representation of a Sisyphus table as a light."""
def __init__(self, name, table):
"""Initialize the Sisyphus table."""
self._name = name
self._table = table
async def async_added_to_hass(self):
"""Add listeners after this object has been initialized."""
self._table.add_listener(self.async_write_ha_state)
@property
def available(self):
"""Return true if the table is responding to heartbeats."""
return self._table.is_connected
@property
def unique_id(self):
"""Return the UUID of the table."""
return self._table.id
@property
def name(self):
"""Return the ame of the table."""
return self._name
@property
def is_on(self):
"""Return True if the table is on."""
return not self._table.is_sleeping
@property
def brightness(self):
"""Return the current brightness of the table's ring light."""
return self._table.brightness * 255
@property
def supported_features(self):
"""Return the features supported by the table; i.e. brightness."""
return SUPPORTED_FEATURES
async def async_turn_off(self, **kwargs):
"""Put the table to sleep."""
await self._table.sleep()
_LOGGER.debug("Sisyphus table %s: sleep")
async def async_turn_on(self, **kwargs):
"""Wake up the table if necessary, optionally changes brightness."""
if not self.is_on:
await self._table.wakeup()
_LOGGER.debug("Sisyphus table %s: wakeup")
if "brightness" in kwargs:
await self._table.set_brightness(kwargs["brightness"] / 255.0)
|
import dbm
import json
import subprocess
import sys
def dbm_iter(db):
# try dictionary interface - ok in python2 and dumbdb
try:
return db.items()
except Exception:
# try firstkey/nextkey - ok for py3 dbm.gnu
def iter_gdbm(db):
k = db.firstkey()
while k is not None:
yield k, db[k]
k = db.nextkey(k)
return iter_gdbm(db)
def dumpdb():
with dbm.open('.doit.db') as data:
return {key: json.loads(value_str.decode('utf-8'))
for key, value_str in dbm_iter(data)}
print_ = print
def print(*args, **kwargs):
print_(*args, file=sys.stdout)
sys.stdout.flush()
print("==> Removing stuff...")
subprocess.call(['rm', '-rf', '.doit.db', 'output', 'cache', 'cc_debug.sqlite3'])
print("==> Running first build...")
subprocess.call(['nikola', 'build'])
print("==> Fetching database...")
first = dumpdb()
print("==> Running second build...")
subprocess.call(['nikola', 'build'])
print("==> Fetching database...")
second = dumpdb()
print("==> Saving dumps...")
with open('first_dump.py', 'w', encoding='utf-8') as fh:
fh.write(repr(first))
with open('second_dump.py', 'w', encoding='utf-8') as fh:
fh.write(repr(second))
|
import numpy as np
from qstrader.portcon.order_sizer.order_sizer import OrderSizer
class LongShortLeveragedOrderSizer(OrderSizer):
"""
Creates a target portfolio of quantities for each Asset
using its provided weight and total equity available in the
Broker portfolio, leveraging up if necessary via the supplied
gross leverage.
Parameters
----------
broker : `Broker`
The derived Broker instance to obtain portfolio equity from.
broker_portfolio_id : `str`
The specific portfolio at the Broker to obtain equity from.
data_handler : `DataHandler`
To obtain latest asset prices from.
gross_leverage : `float`, optional
The amount of percentage leverage to use when sizing orders.
"""
def __init__(
self,
broker,
broker_portfolio_id,
data_handler,
gross_leverage=1.0
):
self.broker = broker
self.broker_portfolio_id = broker_portfolio_id
self.data_handler = data_handler
self.gross_leverage = self._check_set_gross_leverage(
gross_leverage
)
def _check_set_gross_leverage(self, gross_leverage):
"""
Checks and sets the gross leverage percentage value.
Parameters
----------
gross_leverage : `float`
The amount of percentage leverage to use when sizing orders.
This assumes no restriction on margin.
Returns
-------
`float`
The gross leverage percentage value.
"""
if (
gross_leverage <= 0.0
):
raise ValueError(
'Gross leverage "%s" provided to long-short levered '
'order sizer is non positive.' % gross_leverage
)
else:
return gross_leverage
def _obtain_broker_portfolio_total_equity(self):
"""
Obtain the Broker portfolio total equity.
Returns
-------
`float`
The Broker portfolio total equity.
"""
return self.broker.get_portfolio_total_equity(self.broker_portfolio_id)
def _normalise_weights(self, weights):
"""
Rescale provided weight values to ensure the
weights are scaled to gross exposure divided by
gross leverage.
Parameters
----------
weights : `dict{Asset: float}`
The un-normalised weight vector.
Returns
-------
`dict{Asset: float}`
The scaled weight vector.
"""
gross_exposure = sum(np.abs(weight) for weight in weights.values())
# If the weights are very close or equal to zero then rescaling
# is not possible, so simply return weights unscaled
if np.isclose(gross_exposure, 0.0):
return weights
gross_ratio = self.gross_leverage / gross_exposure
return {
asset: (weight * gross_ratio)
for asset, weight in weights.items()
}
def __call__(self, dt, weights):
"""
Creates a long short leveraged target portfolio from the
provided target weights at a particular timestamp.
Parameters
----------
dt : `pd.Timestamp`
The current date-time timestamp.
weights : `dict{Asset: float}`
The (potentially unnormalised) target weights.
Returns
-------
`dict{Asset: dict}`
The long short target portfolio dictionary with quantities.
"""
total_equity = self._obtain_broker_portfolio_total_equity()
# Pre-cost dollar weight
N = len(weights)
if N == 0:
# No forecasts so portfolio remains in cash
# or is fully liquidated
return {}
# Scale weights to take into account gross exposure and leverage
normalised_weights = self._normalise_weights(weights)
target_portfolio = {}
for asset, weight in sorted(normalised_weights.items()):
pre_cost_dollar_weight = total_equity * weight
# Estimate broker fees for this asset
est_quantity = 0 # TODO: Needs to be added for IB
est_costs = self.broker.fee_model.calc_total_cost(
asset, est_quantity, pre_cost_dollar_weight, broker=self.broker
)
# Calculate integral target asset quantity assuming broker costs
after_cost_dollar_weight = pre_cost_dollar_weight - est_costs
asset_price = self.data_handler.get_asset_latest_ask_price(
dt, asset
)
if np.isnan(asset_price):
raise ValueError(
'Asset price for "%s" at timestamp "%s" is Not-a-Number (NaN). '
'This can occur if the chosen backtest start date is earlier '
'than the first available price for a particular asset. Try '
'modifying the backtest start date and re-running.' % (asset, dt)
)
# Truncate the after cost dollar weight
# to nearest integer
truncated_after_cost_dollar_weight = (
np.floor(after_cost_dollar_weight)
if after_cost_dollar_weight >= 0.0
else np.ceil(after_cost_dollar_weight)
)
asset_quantity = int(
truncated_after_cost_dollar_weight / asset_price
)
# Add to the target portfolio
target_portfolio[asset] = {"quantity": asset_quantity}
return target_portfolio
|
import argparse
from sandman2 import get_app
def main():
"""Main entry point for script."""
parser = argparse.ArgumentParser(
description='Auto-generate a RESTful API service '
'from an existing database.'
)
parser.add_argument(
'URI',
help='Database URI in the format '
'postgresql+psycopg2://user:password@host/database')
parser.add_argument(
'-d',
'--debug',
help='Turn on debug logging',
action='store_true',
default=False)
parser.add_argument(
'-p',
'--port',
help='Port for service to listen on',
default=5000)
parser.add_argument(
'-l',
'--local-only',
help='Only provide service on localhost (will not be accessible'
' from other machines)',
action='store_true',
default=False)
parser.add_argument(
'-r',
'--read-only',
help='Make all database resources read-only (i.e. only the HTTP GET method is supported)',
action='store_true',
default=False)
parser.add_argument(
'-s',
'--schema',
help='Use this named schema instead of default',
default=None)
parser.add_argument(
'-e',
'--enable-cors',
help='Enable Cross Origin Resource Sharing (CORS)',
default=False)
args = parser.parse_args()
app = get_app(args.URI, read_only=args.read_only, schema=args.schema)
if args.enable_cors:
from flask_cors import CORS
CORS(app)
if args.debug:
app.config['DEBUG'] = True
if args.local_only:
host = '127.0.0.1'
else:
host = '0.0.0.0'
app.config['SECRET_KEY'] = '42'
app.run(host=host, port=int(args.port))
if __name__ == '__main__':
main()
|
from aiohttp import ClientSession
from google.oauth2.credentials import Credentials
from google_nest_sdm.auth import AbstractAuth
from homeassistant.helpers import config_entry_oauth2_flow
# See https://developers.google.com/nest/device-access/registration
class AsyncConfigEntryAuth(AbstractAuth):
"""Provide Google Nest Device Access authentication tied to an OAuth2 based config entry."""
def __init__(
self,
websession: ClientSession,
oauth_session: config_entry_oauth2_flow.OAuth2Session,
api_url: str,
):
"""Initialize Google Nest Device Access auth."""
super().__init__(websession, api_url)
self._oauth_session = oauth_session
async def async_get_access_token(self):
"""Return a valid access token."""
if not self._oauth_session.valid_token:
await self._oauth_session.async_ensure_token_valid()
return self._oauth_session.token["access_token"]
async def async_get_creds(self):
"""Return a minimal OAuth credential."""
token = await self.async_get_access_token()
return Credentials(token=token)
|
import numpy as np
import pytest
from tensornetwork.block_sparse.charge import (U1Charge, charge_equal,
BaseCharge)
from tensornetwork.block_sparse.index import Index
from tensornetwork.block_sparse.blocksparsetensor import (BlockSparseTensor,
tensordot,
outerproduct)
from tensornetwork import ncon
np_dtypes = [np.float64, np.complex128]
np_tensordot_dtypes = [np.float64, np.complex128]
def get_contractable_tensors(R1, R2, cont, dtype, num_charges, DsA, Dscomm,
DsB):
assert R1 >= cont
assert R2 >= cont
chargesA = [
BaseCharge(
np.random.randint(-5, 5, (DsA[n], num_charges)),
charge_types=[U1Charge] * num_charges) for n in range(R1 - cont)
]
commoncharges = [
BaseCharge(
np.random.randint(-5, 5, (Dscomm[n], num_charges)),
charge_types=[U1Charge] * num_charges) for n in range(cont)
]
chargesB = [
BaseCharge(
np.random.randint(-5, 5, (DsB[n], num_charges)),
charge_types=[U1Charge] * num_charges) for n in range(R2 - cont)
]
#contracted indices
indsA = np.random.choice(np.arange(R1), cont, replace=False)
indsB = np.random.choice(np.arange(R2), cont, replace=False)
flowsA = np.full(R1, False, dtype=np.bool)
flowsB = np.full(R2, False, dtype=np.bool)
flowsB[indsB] = True
indicesA = [None for _ in range(R1)]
indicesB = [None for _ in range(R2)]
for n, ia in enumerate(indsA):
indicesA[ia] = Index(commoncharges[n], flowsA[ia])
indicesB[indsB[n]] = Index(commoncharges[n], flowsB[indsB[n]])
compA = list(set(np.arange(R1)) - set(indsA))
compB = list(set(np.arange(R2)) - set(indsB))
for n, ca in enumerate(compA):
indicesA[ca] = Index(chargesA[n], flowsA[ca])
for n, cb in enumerate(compB):
indicesB[cb] = Index(chargesB[n], flowsB[cb])
indices_final = []
for n in sorted(compA):
indices_final.append(indicesA[n])
for n in sorted(compB):
indices_final.append(indicesB[n])
A = BlockSparseTensor.random(indices=indicesA, dtype=dtype)
B = BlockSparseTensor.random(indices=indicesB, dtype=dtype)
return A, B, indsA, indsB
@pytest.mark.parametrize('dtype', np_dtypes)
@pytest.mark.parametrize('num_legs', [1, 2, 3, 4])
@pytest.mark.parametrize('num_charges', [1, 2])
def test_outerproduct(dtype, num_legs, num_charges):
np.random.seed(10)
Ds1 = np.arange(2, 2 + num_legs)
Ds2 = np.arange(2 + num_legs, 2 + 2 * num_legs)
is1 = [
Index(
BaseCharge(
np.random.randint(-5, 6, (Ds1[n], num_charges)),
charge_types=[U1Charge] * num_charges), False)
for n in range(num_legs)
]
is2 = [
Index(
BaseCharge(
np.random.randint(-5, 6, (Ds2[n], num_charges)),
charge_types=[U1Charge] * num_charges), False)
for n in range(num_legs)
]
a = BlockSparseTensor.random(is1, dtype=dtype)
b = BlockSparseTensor.random(is2, dtype=dtype)
abdense = ncon([a.todense(), b.todense()], [
-np.arange(1, num_legs + 1, dtype=np.int16),
-num_legs - np.arange(1, num_legs + 1, dtype=np.int16)
])
ab = outerproduct(a, b)
np.testing.assert_allclose(ab.todense(), abdense)
@pytest.mark.parametrize('dtype', np_dtypes)
@pytest.mark.parametrize('num_legs', [2, 3])
@pytest.mark.parametrize('num_charges', [1, 2, 3])
def test_outerproduct_transpose(dtype, num_legs, num_charges):
np.random.seed(10)
Ds1 = np.arange(2, 2 + num_legs)
Ds2 = np.arange(2 + num_legs, 2 + 2 * num_legs)
is1 = [
Index(
BaseCharge(
np.random.randint(-5, 6, (Ds1[n], num_charges)),
charge_types=[U1Charge] * num_charges), False)
for n in range(num_legs)
]
is2 = [
Index(
BaseCharge(
np.random.randint(-5, 6, (Ds2[n], num_charges)),
charge_types=[U1Charge] * num_charges), False)
for n in range(num_legs)
]
o1 = np.arange(num_legs)
o2 = np.arange(num_legs)
np.random.shuffle(o1)
np.random.shuffle(o2)
a = BlockSparseTensor.random(is1, dtype=dtype).transpose(o1)
b = BlockSparseTensor.random(is2, dtype=dtype).transpose(o2)
abdense = ncon([a.todense(), b.todense()], [
-np.arange(1, num_legs + 1, dtype=np.int16),
-num_legs - np.arange(1, num_legs + 1, dtype=np.int16)
])
ab = outerproduct(a, b)
np.testing.assert_allclose(ab.todense(), abdense)
@pytest.mark.parametrize('dtype', np_dtypes)
@pytest.mark.parametrize('num_legs', [2, 3])
@pytest.mark.parametrize('num_charges', [1, 2])
def test_outerproduct_transpose_reshape(dtype, num_legs, num_charges):
np.random.seed(10)
Ds1 = np.arange(2, 2 + num_legs)
Ds2 = np.arange(2 + num_legs, 2 + 2 * num_legs)
is1 = [
Index(
BaseCharge(
np.random.randint(-5, 6, (Ds1[n], num_charges)),
charge_types=[U1Charge] * num_charges), False)
for n in range(num_legs)
]
is2 = [
Index(
BaseCharge(
np.random.randint(-5, 6, (Ds2[n], num_charges)),
charge_types=[U1Charge] * num_charges), False)
for n in range(num_legs)
]
o1 = np.arange(num_legs)
o2 = np.arange(num_legs)
np.random.shuffle(o1)
np.random.shuffle(o2)
a = BlockSparseTensor.random(is1, dtype=dtype).transpose(o1)
b = BlockSparseTensor.random(is2, dtype=dtype).transpose(o2)
a = a.reshape([np.prod(a.shape)])
b = b.reshape([np.prod(b.shape)])
abdense = ncon([a.todense(), b.todense()], [[-1], [-2]])
ab = outerproduct(a, b)
assert ab.ndim == 2
np.testing.assert_allclose(ab.todense(), abdense)
@pytest.mark.parametrize("dtype", np_tensordot_dtypes)
@pytest.mark.parametrize("R1, R2, cont", [(4, 4, 2), (4, 3, 3), (3, 4, 3)])
@pytest.mark.parametrize('num_charges', [1, 2, 3, 4])
def test_tensordot(R1, R2, cont, dtype, num_charges):
np.random.seed(10)
DsA = np.random.randint(5, 10, R1 - cont)
Dscomm = np.random.randint(5, 10, cont)
DsB = np.random.randint(5, 10, R2 - cont)
A, B, indsA, indsB = get_contractable_tensors(R1, R2, cont, dtype,
num_charges, DsA, Dscomm, DsB)
res = tensordot(A, B, (indsA, indsB))
dense_res = np.tensordot(A.todense(), B.todense(), (indsA, indsB))
np.testing.assert_allclose(dense_res, res.todense())
free_inds_A = np.sort(list(set(np.arange(len(A.shape))) - set(indsA)))
free_inds_B = np.sort(list(set(np.arange(len(B.shape))) - set(indsB)))
for n, fiA in enumerate(free_inds_A):
assert charge_equal(res.charges[n][0], A.charges[fiA][0])
for n in range(len(free_inds_A), len(free_inds_A) + len(free_inds_B)):
assert charge_equal(res.charges[n][0],
B.charges[free_inds_B[n - len(free_inds_A)]][0])
def test_tensordot_single_arg():
R = 3
dtype = np.float64
np.random.seed(10)
Ds = [10, 10, 10]
inds = [
Index(U1Charge.random(dimension=Ds[n], minval=-5, maxval=5), False)
for n in range(R)
]
A = BlockSparseTensor.random(inds, dtype=dtype)
res = tensordot(A, A.conj(), ([0]))
dense_res = np.tensordot(A.todense(), A.conj().todense(), ([0], [0]))
np.testing.assert_allclose(dense_res, res.todense())
@pytest.mark.parametrize("dtype", np_tensordot_dtypes)
@pytest.mark.parametrize('num_charges', [1, 2, 3, 4])
def test_tensordot_empty_tensors(dtype, num_charges):
A, B, iA, iB = get_contractable_tensors(
R1=4,
R2=4,
cont=2,
dtype=dtype,
num_charges=num_charges,
DsA=[10, 0],
Dscomm=[0, 4],
DsB=[8, 0])
free_inds_A = np.sort(list(set(np.arange(len(A.shape))) - set(iA)))
free_inds_B = np.sort(list(set(np.arange(len(B.shape))) - set(iB)))
res = tensordot(A, B, (iA, iB))
assert len(res.data) == 0
for n in range(2):
assert charge_equal(res.charges[n][0], A.charges[free_inds_A[n]][0])
for n in range(2, 4):
assert charge_equal(res.charges[n][0], B.charges[free_inds_B[n - 2]][0])
def test_tensordot_raises():
R1 = 3
R2 = 3
R3 = 3
dtype = np.float64
np.random.seed(10)
Ds1 = np.arange(2, 2 + R1)
Ds2 = np.arange(2 + R1, 2 + R1 + R2)
Ds3 = np.arange(2 + R1, 2 + R1 + R3)
is1 = [
Index(U1Charge.random(dimension=Ds1[n], minval=-5, maxval=5), False)
for n in range(R1)
]
is2 = [
Index(U1Charge.random(dimension=Ds2[n], minval=-5, maxval=5), False)
for n in range(R2)
]
is3 = [
Index(U1Charge.random(dimension=Ds3[n], minval=-5, maxval=5), False)
for n in range(R3)
]
A = BlockSparseTensor.random(is1, dtype=dtype)
B = BlockSparseTensor.random(is2, dtype=dtype)
C = BlockSparseTensor.random(is3, dtype=dtype)
with pytest.raises(ValueError, match="same length"):
tensordot(A, B, ([0, 1, 2, 3], [1, 2]))
with pytest.raises(ValueError, match="same length"):
tensordot(A, B, ([0, 1], [0, 1, 2, 3]))
with pytest.raises(ValueError, match="same length"):
tensordot(A, B, ([0], [1, 2]))
with pytest.raises(ValueError, match='invalid input'):
tensordot(A, B, [0, [1, 2]])
with pytest.raises(ValueError, match="incompatible elementary flows"):
tensordot(A, B, ([0, 0], [1, 2]))
with pytest.raises(ValueError):
tensordot(A, B, ([0, 1], [1, 1]))
with pytest.raises(ValueError, match="rank of `tensor1` is smaller than "):
tensordot(A, B, ([0, 4], [1, 2]))
with pytest.raises(ValueError, match="rank of `tensor2` is smaller than "):
tensordot(A, B, ([0, 1], [0, 4]))
with pytest.raises(ValueError):
tensordot(A, B, ([0, 4], [1, 4]))
with pytest.raises(ValueError):
tensordot(A, B, ([0, 1], [0, 1]))
with pytest.raises(ValueError):
tensordot(A, A, ([0, 1], [0, 1]))
with pytest.raises(ValueError):
tensordot(A, A.conj(), ([0, 1], [1, 0]))
with pytest.raises(ValueError, match="is incompatible with `tensor1.shape"):
tensordot(A, A.conj(), ([0, 1, 2, 3], [0, 1, 2, 3]))
with pytest.raises(ValueError, match="is incompatible with `tensor1.shape"):
tensordot(A, C, ([0, 1, 2, 3], [0, 1, 2, 3]))
Ds1 = np.array([8, 9, 10, 11])
Ds2 = np.array([8, 9])
flows1 = [False] * len(Ds1)
flows2 = [False] * len(Ds2)
indices1 = [Index(U1Charge.random(D, -2, 2), f) for D, f in zip(Ds1, flows1)]
indices2 = [Index(U1Charge.random(D, -2, 2), f) for D, f in zip(Ds2, flows2)]
arr1 = BlockSparseTensor.random(indices1)
arr2 = BlockSparseTensor.random(indices2)
with pytest.raises(ValueError, match="axes2 = "):
tensordot(arr1, arr2, ([0, 1, 2], [0, 1, 2]))
Ds2 = np.array([8, 9, 2, 5, 11])
flows2 = [False] * len(Ds2)
indices1 = [Index(U1Charge.random(D, -2, 2), f) for D, f in zip(Ds1, flows1)]
indices2 = [Index(U1Charge.random(D, -2, 2), f) for D, f in zip(Ds2, flows2)]
arr1 = BlockSparseTensor.random(indices1)
arr2 = BlockSparseTensor.random(indices2).reshape(Ds1)
with pytest.raises(ValueError, match="incompatible elementary shapes "):
tensordot(arr1, arr2.conj(), ([2, 3], [2, 3]))
@pytest.mark.parametrize("dtype", np_dtypes)
@pytest.mark.parametrize('num_charges', [1, 2, 3, 4])
def test_tensordot_reshape(dtype, num_charges):
np.random.seed(10)
R1 = 4
R2 = 4
q = np.random.randint(-5, 5, (10, num_charges), dtype=np.int16)
charges1 = [
BaseCharge(q, charge_types=[U1Charge] * num_charges) for n in range(R1)
]
charges2 = [
BaseCharge(q, charge_types=[U1Charge] * num_charges) for n in range(R2)
]
flowsA = np.asarray([False] * R1)
flowsB = np.asarray([True] * R2)
A = BlockSparseTensor.random(
indices=[Index(charges1[n], flowsA[n]) for n in range(R1)], dtype=dtype)
B = BlockSparseTensor.random(
indices=[Index(charges2[n], flowsB[n]) for n in range(R2)], dtype=dtype)
Adense = A.todense().reshape((10, 10 * 10, 10))
Bdense = B.todense().reshape((10 * 10, 10, 10))
A = A.reshape((10, 10 * 10, 10))
B = B.reshape((10 * 10, 10, 10))
res = tensordot(A, B, ([0, 1], [2, 0]))
dense = np.tensordot(Adense, Bdense, ([0, 1], [2, 0]))
np.testing.assert_allclose(dense, res.todense())
@pytest.mark.parametrize("dtype", np_dtypes)
@pytest.mark.parametrize("R1, R2", [(2, 2), (3, 3), (4, 4), (1, 1)])
@pytest.mark.parametrize('num_charges', [1, 2, 3, 4])
def test_tensordot_inner(R1, R2, dtype, num_charges):
np.random.seed(10)
DsA = np.random.randint(3, 5, R1)
Dscomm = np.random.randint(3, 5, 0)
DsB = np.random.randint(3, 5, R2)
A, B, indsA, indsB = get_contractable_tensors(R1, R2, 0, dtype, num_charges,
DsA, Dscomm, DsB)
res = tensordot(A, B, (indsA, indsB))
dense_res = np.tensordot(A.todense(), B.todense(), (indsA, indsB))
np.testing.assert_allclose(dense_res, res.todense())
@pytest.mark.parametrize("dtype", np_dtypes)
@pytest.mark.parametrize("R1, R2", [(2, 2), (3, 3), (4, 4), (1, 1)])
@pytest.mark.parametrize('num_charges', [1, 2, 3, 4])
def test_tensordot_inner_transpose(R1, R2, dtype, num_charges):
np.random.seed(10)
DsA = np.random.randint(3, 5, R1)
Dscomm = np.random.randint(3, 5, 0)
DsB = np.random.randint(3, 5, R2)
A, B, indsA, indsB = get_contractable_tensors(R1, R2, 0, dtype, num_charges,
DsA, Dscomm, DsB)
orderA = np.arange(R1)
orderB = np.arange(R2)
np.random.shuffle(orderA)
np.random.shuffle(orderB)
A_ = A.transpose(orderA)
B_ = B.transpose(orderB)
_, indposA = np.unique(orderA, return_index=True)
_, indposB = np.unique(orderB, return_index=True)
indsA_ = indposA[indsA]
indsB_ = indposB[indsB]
res = tensordot(A_, B_, (indsA_, indsB_))
dense_res = np.tensordot(A_.todense(), B_.todense(), (indsA_, indsB_))
np.testing.assert_allclose(dense_res, res.todense())
@pytest.mark.parametrize("dtype", np_dtypes)
@pytest.mark.parametrize("R1, R2", [(2, 2), (2, 1), (1, 2), (1, 1)])
@pytest.mark.parametrize('num_charges', [1, 2, 3, 4])
def test_tensordot_outer(R1, R2, dtype, num_charges):
np.random.seed(10)
DsA = np.random.randint(3, 5, R1)
Dscomm = np.random.randint(3, 5, 0)
DsB = np.random.randint(3, 5, R2)
A, B, _, _ = get_contractable_tensors(R1, R2, 0, dtype, num_charges, DsA,
Dscomm, DsB)
res = tensordot(A, B, axes=0)
dense_res = np.tensordot(A.todense(), B.todense(), axes=0)
np.testing.assert_allclose(dense_res, res.todense())
for n in range(R1):
assert charge_equal(res.charges[n][0], A.charges[n][0])
for n in range(R1, R1 + R2):
assert charge_equal(res.charges[n][0], B.charges[n - R1][0])
|
import argparse
import logging
import sys
from typing import Optional
from typing import Sequence
from typing import Tuple
from paasta_tools.kubernetes.application.controller_wrappers import Application
from paasta_tools.kubernetes.application.controller_wrappers import (
get_application_wrapper,
)
from paasta_tools.kubernetes_tools import ensure_namespace
from paasta_tools.kubernetes_tools import InvalidKubernetesConfig
from paasta_tools.kubernetes_tools import KubeClient
from paasta_tools.kubernetes_tools import list_all_deployments
from paasta_tools.kubernetes_tools import load_kubernetes_service_config_no_cache
from paasta_tools.utils import decompose_job_id
from paasta_tools.utils import DEFAULT_SOA_DIR
from paasta_tools.utils import InvalidJobNameError
from paasta_tools.utils import load_system_paasta_config
from paasta_tools.utils import NoConfigurationForServiceError
from paasta_tools.utils import NoDeploymentsAvailable
from paasta_tools.utils import SPACER
log = logging.getLogger(__name__)
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(description="Creates marathon jobs.")
parser.add_argument(
"service_instance_list",
nargs="+",
help="The list of marathon service instances to create or update",
metavar="SERVICE%sINSTANCE" % SPACER,
)
parser.add_argument(
"-d",
"--soa-dir",
dest="soa_dir",
metavar="SOA_DIR",
default=DEFAULT_SOA_DIR,
help="define a different soa config directory",
)
parser.add_argument(
"-c", "--cluster", dest="cluster", help="paasta cluster",
)
parser.add_argument(
"-v", "--verbose", action="store_true", dest="verbose", default=False
)
args = parser.parse_args()
return args
def main() -> None:
args = parse_args()
soa_dir = args.soa_dir
if args.verbose:
logging.basicConfig(level=logging.DEBUG)
else:
# filter out unwanted zookeeper messages in the log
logging.getLogger("kazoo").setLevel(logging.WARN)
logging.basicConfig(level=logging.INFO)
# system_paasta_config = load_system_paasta_config()
kube_client = KubeClient()
ensure_namespace(kube_client, namespace="paasta")
setup_kube_succeeded = setup_kube_deployments(
kube_client=kube_client,
service_instances=args.service_instance_list,
soa_dir=soa_dir,
cluster=args.cluster or load_system_paasta_config().get_cluster(),
)
sys.exit(0 if setup_kube_succeeded else 1)
def validate_job_name(service_instance: str) -> bool:
try:
service, instance, _, __ = decompose_job_id(service_instance)
except InvalidJobNameError:
log.error(
"Invalid service instance specified. Format is service%sinstance." % SPACER
)
return False
return True
def setup_kube_deployments(
kube_client: KubeClient,
service_instances: Sequence[str],
cluster: str,
soa_dir: str = DEFAULT_SOA_DIR,
) -> bool:
if service_instances:
existing_kube_deployments = set(list_all_deployments(kube_client))
existing_apps = {
(deployment.service, deployment.instance)
for deployment in existing_kube_deployments
}
service_instances_with_valid_names = [
decompose_job_id(service_instance)
for service_instance in service_instances
if validate_job_name(service_instance)
]
applications = [
create_application_object(
kube_client=kube_client,
service=service_instance[0],
instance=service_instance[1],
cluster=cluster,
soa_dir=soa_dir,
)
for service_instance in service_instances_with_valid_names
]
for _, app in applications:
if app:
if (
app.kube_deployment.service,
app.kube_deployment.instance,
) not in existing_apps:
log.info(f"Creating {app} because it does not exist yet.")
app.create(kube_client)
elif app.kube_deployment not in existing_kube_deployments:
log.info(f"Updating {app} because configs have changed.")
app.update(kube_client)
else:
log.info(f"{app} is up-to-date!")
log.info(f"Ensuring related API objects for {app} are in sync")
app.update_related_api_objects(kube_client)
return (False, None) not in applications and len(
service_instances_with_valid_names
) == len(service_instances)
def create_application_object(
kube_client: KubeClient, service: str, instance: str, cluster: str, soa_dir: str,
) -> Tuple[bool, Optional[Application]]:
try:
service_instance_config = load_kubernetes_service_config_no_cache(
service, instance, cluster, soa_dir=soa_dir,
)
except NoDeploymentsAvailable:
log.debug(
"No deployments found for %s.%s in cluster %s. Skipping."
% (service, instance, cluster)
)
return True, None
except NoConfigurationForServiceError:
error_msg = (
f"Could not read kubernetes configuration file for %s.%s in cluster %s"
% (service, instance, cluster)
)
log.error(error_msg)
return False, None
try:
formatted_application = service_instance_config.format_kubernetes_app()
except InvalidKubernetesConfig as e:
log.error(str(e))
return False, None
app = get_application_wrapper(formatted_application)
app.load_local_config(soa_dir, cluster)
return True, app
if __name__ == "__main__":
main()
|
import pytest
from PyQt5.QtCore import QUrl
pytest.importorskip('PyQt5.QtWebEngineCore')
from PyQt5.QtWebEngineCore import QWebEngineCookieStore
from PyQt5.QtWebEngineWidgets import QWebEngineProfile
from qutebrowser.browser.webengine import cookies
from qutebrowser.utils import urlmatch
@pytest.fixture
def filter_request():
request = QWebEngineCookieStore.FilterRequest()
request.firstPartyUrl = QUrl('https://example.com')
return request
@pytest.fixture(autouse=True)
def enable_cookie_logging(monkeypatch):
monkeypatch.setattr(cookies.objects, 'debug_flags', ['log-cookies'])
@pytest.mark.parametrize('setting, third_party, accepted', [
('all', False, True),
('never', False, False),
('no-3rdparty', False, True),
('no-3rdparty', True, False),
])
def test_accept_cookie(config_stub, filter_request, setting, third_party,
accepted):
"""Test that _accept_cookie respects content.cookies.accept."""
config_stub.val.content.cookies.accept = setting
filter_request.thirdParty = third_party
assert cookies._accept_cookie(filter_request) == accepted
@pytest.mark.parametrize('setting, pattern_setting, third_party, accepted', [
('never', 'all', False, True),
('all', 'never', False, False),
('no-3rdparty', 'all', True, True),
('all', 'no-3rdparty', True, False),
])
def test_accept_cookie_with_pattern(config_stub, filter_request, setting,
pattern_setting, third_party, accepted):
"""Test that _accept_cookie matches firstPartyUrl with the UrlPattern."""
filter_request.thirdParty = third_party
config_stub.set_str('content.cookies.accept', setting)
config_stub.set_str('content.cookies.accept', pattern_setting,
pattern=urlmatch.UrlPattern('https://*.example.com'))
assert cookies._accept_cookie(filter_request) == accepted
@pytest.mark.parametrize('global_value', ['never', 'all'])
def test_invalid_url(config_stub, filter_request, global_value):
"""Make sure we fall back to the global value with invalid URLs.
This can happen when there's a cookie request from an iframe, e.g. here:
https://developers.google.com/youtube/youtube_player_demo
"""
config_stub.val.content.cookies.accept = global_value
filter_request.firstPartyUrl = QUrl()
accepted = global_value == 'all'
assert cookies._accept_cookie(filter_request) == accepted
@pytest.mark.parametrize('enabled', [True, False])
def test_logging(monkeypatch, config_stub, filter_request, caplog, enabled):
monkeypatch.setattr(cookies.objects, 'debug_flags',
['log-cookies'] if enabled else [])
config_stub.val.content.cookies.accept = 'all'
caplog.clear()
cookies._accept_cookie(filter_request)
if enabled:
expected = ("Cookie from origin <unknown> on https://example.com "
"(third party: False) -> applying setting all")
assert caplog.messages == [expected]
else:
assert not caplog.messages
class TestInstall:
def test_real_profile(self):
profile = QWebEngineProfile()
cookies.install_filter(profile)
def test_fake_profile(self, stubs):
store = stubs.FakeCookieStore()
profile = stubs.FakeWebEngineProfile(cookie_store=store)
cookies.install_filter(profile)
assert store.cookie_filter is cookies._accept_cookie
|
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_CONNECTIVITY,
BinarySensorEntity,
)
from . import DOMAIN as ZONEMINDER_DOMAIN
async def async_setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the ZoneMinder binary sensor platform."""
sensors = []
for host_name, zm_client in hass.data[ZONEMINDER_DOMAIN].items():
sensors.append(ZMAvailabilitySensor(host_name, zm_client))
add_entities(sensors)
return True
class ZMAvailabilitySensor(BinarySensorEntity):
"""Representation of the availability of ZoneMinder as a binary sensor."""
def __init__(self, host_name, client):
"""Initialize availability sensor."""
self._state = None
self._name = host_name
self._client = client
@property
def name(self):
"""Return the name of this binary sensor."""
return self._name
@property
def is_on(self):
"""Return true if the binary sensor is on."""
return self._state
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
return DEVICE_CLASS_CONNECTIVITY
def update(self):
"""Update the state of this sensor (availability of ZoneMinder)."""
self._state = self._client.is_available
|
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_CONNECTIVITY,
BinarySensorEntity,
)
from . import CONF_DOOR_WINDOW, HUB as hub
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Verisure binary sensors."""
sensors = []
hub.update_overview()
if int(hub.config.get(CONF_DOOR_WINDOW, 1)):
sensors.extend(
[
VerisureDoorWindowSensor(device_label)
for device_label in hub.get(
"$.doorWindow.doorWindowDevice[*].deviceLabel"
)
]
)
sensors.extend([VerisureEthernetStatus()])
add_entities(sensors)
class VerisureDoorWindowSensor(BinarySensorEntity):
"""Representation of a Verisure door window sensor."""
def __init__(self, device_label):
"""Initialize the Verisure door window sensor."""
self._device_label = device_label
@property
def name(self):
"""Return the name of the binary sensor."""
return hub.get_first(
"$.doorWindow.doorWindowDevice[?(@.deviceLabel=='%s')].area",
self._device_label,
)
@property
def is_on(self):
"""Return the state of the sensor."""
return (
hub.get_first(
"$.doorWindow.doorWindowDevice[?(@.deviceLabel=='%s')].state",
self._device_label,
)
== "OPEN"
)
@property
def available(self):
"""Return True if entity is available."""
return (
hub.get_first(
"$.doorWindow.doorWindowDevice[?(@.deviceLabel=='%s')]",
self._device_label,
)
is not None
)
# pylint: disable=no-self-use
def update(self):
"""Update the state of the sensor."""
hub.update_overview()
class VerisureEthernetStatus(BinarySensorEntity):
"""Representation of a Verisure VBOX internet status."""
@property
def name(self):
"""Return the name of the binary sensor."""
return "Verisure Ethernet status"
@property
def is_on(self):
"""Return the state of the sensor."""
return hub.get_first("$.ethernetConnectedNow")
@property
def available(self):
"""Return True if entity is available."""
return hub.get_first("$.ethernetConnectedNow") is not None
# pylint: disable=no-self-use
def update(self):
"""Update the state of the sensor."""
hub.update_overview()
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
return DEVICE_CLASS_CONNECTIVITY
|
import urwid
class SelectablePudding(urwid.Widget):
_sizing = frozenset(['flow'])
_selectable = True
def __init__(self):
self.pudding = "pudding"
def rows(self, size, focus=False):
return 1
def render(self, size, focus=False):
(maxcol,) = size
num_pudding = maxcol / len(self.pudding)
pudding = self.pudding
if focus:
pudding = pudding.upper()
return urwid.TextCanvas([pudding * num_pudding],
maxcol=maxcol)
def keypress(self, size, key):
(maxcol,) = size
if len(key) > 1:
return key
if key.lower() in self.pudding:
# remove letter from pudding
n = self.pudding.index(key.lower())
self.pudding = self.pudding[:n] + self.pudding[n+1:]
if not self.pudding:
self.pudding = "pudding"
self._invalidate()
else:
return key
|
from wled import WLEDConnectionError
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
from homeassistant.components.wled.const import (
ATTR_DURATION,
ATTR_FADE,
ATTR_TARGET_BRIGHTNESS,
ATTR_UDP_PORT,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_ICON,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_OFF,
STATE_ON,
STATE_UNAVAILABLE,
)
from homeassistant.core import HomeAssistant
from tests.async_mock import patch
from tests.components.wled import init_integration
from tests.test_util.aiohttp import AiohttpClientMocker
async def test_switch_state(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test the creation and values of the WLED switches."""
await init_integration(hass, aioclient_mock)
entity_registry = await hass.helpers.entity_registry.async_get_registry()
state = hass.states.get("switch.wled_rgb_light_nightlight")
assert state
assert state.attributes.get(ATTR_DURATION) == 60
assert state.attributes.get(ATTR_ICON) == "mdi:weather-night"
assert state.attributes.get(ATTR_TARGET_BRIGHTNESS) == 0
assert state.attributes.get(ATTR_FADE)
assert state.state == STATE_OFF
entry = entity_registry.async_get("switch.wled_rgb_light_nightlight")
assert entry
assert entry.unique_id == "aabbccddeeff_nightlight"
state = hass.states.get("switch.wled_rgb_light_sync_send")
assert state
assert state.attributes.get(ATTR_ICON) == "mdi:upload-network-outline"
assert state.attributes.get(ATTR_UDP_PORT) == 21324
assert state.state == STATE_OFF
entry = entity_registry.async_get("switch.wled_rgb_light_sync_send")
assert entry
assert entry.unique_id == "aabbccddeeff_sync_send"
state = hass.states.get("switch.wled_rgb_light_sync_receive")
assert state
assert state.attributes.get(ATTR_ICON) == "mdi:download-network-outline"
assert state.attributes.get(ATTR_UDP_PORT) == 21324
assert state.state == STATE_ON
entry = entity_registry.async_get("switch.wled_rgb_light_sync_receive")
assert entry
assert entry.unique_id == "aabbccddeeff_sync_receive"
async def test_switch_change_state(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test the change of state of the WLED switches."""
await init_integration(hass, aioclient_mock)
# Nightlight
with patch("wled.WLED.nightlight") as nightlight_mock:
await hass.services.async_call(
SWITCH_DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.wled_rgb_light_nightlight"},
blocking=True,
)
await hass.async_block_till_done()
nightlight_mock.assert_called_once_with(on=True)
with patch("wled.WLED.nightlight") as nightlight_mock:
await hass.services.async_call(
SWITCH_DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: "switch.wled_rgb_light_nightlight"},
blocking=True,
)
await hass.async_block_till_done()
nightlight_mock.assert_called_once_with(on=False)
# Sync send
with patch("wled.WLED.sync") as sync_mock:
await hass.services.async_call(
SWITCH_DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.wled_rgb_light_sync_send"},
blocking=True,
)
await hass.async_block_till_done()
sync_mock.assert_called_once_with(send=True)
with patch("wled.WLED.sync") as sync_mock:
await hass.services.async_call(
SWITCH_DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: "switch.wled_rgb_light_sync_send"},
blocking=True,
)
await hass.async_block_till_done()
sync_mock.assert_called_once_with(send=False)
# Sync receive
with patch("wled.WLED.sync") as sync_mock:
await hass.services.async_call(
SWITCH_DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: "switch.wled_rgb_light_sync_receive"},
blocking=True,
)
await hass.async_block_till_done()
sync_mock.assert_called_once_with(receive=False)
with patch("wled.WLED.sync") as sync_mock:
await hass.services.async_call(
SWITCH_DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.wled_rgb_light_sync_receive"},
blocking=True,
)
await hass.async_block_till_done()
sync_mock.assert_called_once_with(receive=True)
async def test_switch_error(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, caplog
) -> None:
"""Test error handling of the WLED switches."""
aioclient_mock.post("http://192.168.1.123:80/json/state", text="", status=400)
await init_integration(hass, aioclient_mock)
with patch("homeassistant.components.wled.WLED.update"):
await hass.services.async_call(
SWITCH_DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.wled_rgb_light_nightlight"},
blocking=True,
)
await hass.async_block_till_done()
state = hass.states.get("switch.wled_rgb_light_nightlight")
assert state.state == STATE_OFF
assert "Invalid response from API" in caplog.text
async def test_switch_connection_error(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test error handling of the WLED switches."""
await init_integration(hass, aioclient_mock)
with patch("homeassistant.components.wled.WLED.update"), patch(
"homeassistant.components.wled.WLED.nightlight", side_effect=WLEDConnectionError
):
await hass.services.async_call(
SWITCH_DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.wled_rgb_light_nightlight"},
blocking=True,
)
await hass.async_block_till_done()
state = hass.states.get("switch.wled_rgb_light_nightlight")
assert state.state == STATE_UNAVAILABLE
|
from homeassistant.components.switch import SwitchEntity
from . import BleBoxEntity, create_blebox_entities
from .const import BLEBOX_TO_HASS_DEVICE_CLASSES
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up a BleBox switch entity."""
create_blebox_entities(
hass, config_entry, async_add_entities, BleBoxSwitchEntity, "switches"
)
class BleBoxSwitchEntity(BleBoxEntity, SwitchEntity):
"""Representation of a BleBox switch feature."""
@property
def device_class(self):
"""Return the device class."""
return BLEBOX_TO_HASS_DEVICE_CLASSES[self._feature.device_class]
@property
def is_on(self):
"""Return whether switch is on."""
return self._feature.is_on
async def async_turn_on(self, **kwargs):
"""Turn on the switch."""
await self._feature.async_turn_on()
async def async_turn_off(self, **kwargs):
"""Turn off the switch."""
await self._feature.async_turn_off()
|
import inspect
import logging
class EffectSync(object):
"""
Class which deals with receiving effect events from other devices
"""
def __init__(self, parent, device_number):
self._logger = logging.getLogger('razer.device{0}.effect_sync'.format(device_number))
self._parent = parent
self._parent.register_observer(self)
def __del__(self):
self.close()
def close(self):
self._parent.remove_observer(self)
def notify(self, msg):
"""
Receive notificatons from the device (we only care about effects)
:param msg: Notification
:type msg: tuple
"""
if not isinstance(msg, tuple):
self._logger.warning("Got msg that was not a tuple")
elif msg[0] == 'effect':
# We have a message directed at us
# MSG format
# 0 1 2 3
# ('effect', Device, 'effectName', 'effectparams'...)
# Device is the device the msg originated from (could be parent device)
if msg[1] is not self._parent:
# Msg from another device
self.run_effect(msg[2], *msg[3:])
def run_effect(self, effect_name, *args):
"""
Run the specified effect with the given arguments
:param effect_name: Name of the effect
:type effect_name: str
:param args: Arguments for the specified effect
:type args: list
"""
# Disable notifications
self._parent.disable_notify = True
try:
# Does parent have method
effect_func = getattr(self._parent, effect_name, None)
if effect_func is not None:
# We have method, does it have the correct num arguments
actual_args = self.get_num_arguments(effect_func)
if actual_args == len(args):
# method should be same
effect_func(*args)
else:
# Method same but wrong args, try alternatives
if effect_name == 'setStatic':
# Could be static from chroma to non chroma
if actual_args == 0:
# Chroma -> BW
effect_func()
else:
# BW -> Chroma
effect_func(0x0, 0xFF, 0x00) # Green
else:
# setNone sets active to false and needs to be re-enabled for effects to show - maybe a bit inefficient
if not effect_name == 'setNone':
effect_func = getattr(self._parent, 'setScrollActive', None)
if effect_func is not None:
effect_func(True)
effect_func = getattr(self._parent, 'setLogoActive', None)
if effect_func is not None:
effect_func(True)
effect_func = getattr(self._parent, 'setBacklightActive', None)
if effect_func is not None:
effect_func(True)
# The target device doesn't have these methods, use similar ones
if effect_name == 'setPulsate':
# setPulsate doesn't provide a color but we need one, take green.
pargs = (0x00, 0xFF, 0x00) # Green
effect_func = getattr(self._parent, 'setBreathSingle', None)
if effect_func is not None:
effect_func(*pargs)
effect_func = getattr(self._parent, 'setScrollPulsate', None)
if effect_func is not None:
effect_func(*pargs)
effect_func = getattr(self._parent, 'setLogoPulsate', None)
if effect_func is not None:
effect_func(*pargs)
effect_func = getattr(self._parent, 'setBacklightPulsate', None)
if effect_func is not None:
effect_func(*pargs)
elif effect_name in ('setBreathSingle', 'setBreathRandom', 'setBreathDual', 'setBreathTriple'):
if effect_name == 'setBreathRandom':
pargs = (0x00, 0xFF, 0x00) # Green
else:
pargs = args[0:3] # limit args to first 3, as setBreathDual gives 6 args and setBreathTriple gives 9 args
effect_func = getattr(self._parent, 'setPulsate', None)
if effect_func is not None:
# setPulsate doesn't take any argument
effect_func()
effect_func = getattr(self._parent, 'setScrollPulsate', None)
if effect_func is not None:
effect_func(*pargs)
effect_func = getattr(self._parent, 'setLogoPulsate', None)
if effect_func is not None:
effect_func(*pargs)
effect_func = getattr(self._parent, 'setBacklightPulsate', None)
if effect_func is not None:
effect_func(*pargs)
elif effect_name == 'setNone':
#print("setNone stub")
effect_func = getattr(self._parent, 'setScrollActive', None)
if effect_func is not None:
effect_func(False)
effect_func = getattr(self._parent, 'setLogoActive', None)
if effect_func is not None:
effect_func(False)
effect_func = getattr(self._parent, 'setBacklightActive', None)
if effect_func is not None:
effect_func(False)
elif effect_name == 'setSpectrum':
effect_func = getattr(self._parent, 'setScrollSpectrum', None)
if effect_func is not None:
effect_func()
effect_func = getattr(self._parent, 'setLogoSpectrum', None)
if effect_func is not None:
effect_func()
effect_func = getattr(self._parent, 'setBacklightSpectrum', None)
if effect_func is not None:
effect_func()
elif effect_name == 'setStatic':
effect_func = getattr(self._parent, 'setScrollStatic', None)
if effect_func is not None:
effect_func(*args)
effect_func = getattr(self._parent, 'setLogoStatic', None)
if effect_func is not None:
effect_func(*args)
effect_func = getattr(self._parent, 'setBacklightStatic', None)
if effect_func is not None:
effect_func(*args)
except Exception as err:
self._logger.exception("Caught exception trying to sync effects.", exc_info=err)
# Re-enable notifications
self._parent.disable_notify = False
@staticmethod
def get_num_arguments(func):
"""
Get number of arguments in a function
:param func: Function
:type func: callable
:return: Number of arguments
:rtype: int
"""
func_sig = inspect.signature(func)
return len(func_sig.parameters)
|
from __future__ import division
import collections
import glob
import optparse
import os
import re
import sys
import textwrap
import disgen
from coverage.parser import PythonParser
from coverage.python import get_python_source
opcode_counts = collections.Counter()
class ParserMain(object):
"""A main for code parsing experiments."""
def main(self, args):
"""A main function for trying the code from the command line."""
parser = optparse.OptionParser()
parser.add_option(
"-d", action="store_true", dest="dis",
help="Disassemble"
)
parser.add_option(
"-H", action="store_true", dest="histogram",
help="Count occurrences of opcodes"
)
parser.add_option(
"-R", action="store_true", dest="recursive",
help="Recurse to find source files"
)
parser.add_option(
"-s", action="store_true", dest="source",
help="Show analyzed source"
)
parser.add_option(
"-t", action="store_true", dest="tokens",
help="Show tokens"
)
options, args = parser.parse_args()
if options.recursive:
if args:
root = args[0]
else:
root = "."
for root, _, _ in os.walk(root):
for f in glob.glob(root + "/*.py"):
self.one_file(options, f)
elif not args:
parser.print_help()
else:
self.one_file(options, args[0])
if options.histogram:
total = sum(opcode_counts.values())
print("{} total opcodes".format(total))
for opcode, number in opcode_counts.most_common():
print("{:20s} {:6d} {:.1%}".format(opcode, number, number/total))
def one_file(self, options, filename):
"""Process just one file."""
# `filename` can have a line number suffix. In that case, extract those
# lines, dedent them, and use that. This is for trying test cases
# embedded in the test files.
match = re.search(r"^(.*):(\d+)-(\d+)$", filename)
if match:
filename, start, end = match.groups()
start, end = int(start), int(end)
else:
start = end = None
try:
text = get_python_source(filename)
if start is not None:
lines = text.splitlines(True)
text = textwrap.dedent("".join(lines[start-1:end]).replace("\\\\", "\\"))
pyparser = PythonParser(text, filename=filename, exclude=r"no\s*cover")
pyparser.parse_source()
except Exception as err:
print("%s" % (err,))
return
if options.dis:
print("Main code:")
self.disassemble(pyparser.byte_parser, histogram=options.histogram)
arcs = pyparser.arcs()
if options.source or options.tokens:
pyparser.show_tokens = options.tokens
pyparser.parse_source()
if options.source:
arc_chars = self.arc_ascii_art(arcs)
if arc_chars:
arc_width = max(len(a) for a in arc_chars.values())
exit_counts = pyparser.exit_counts()
for lineno, ltext in enumerate(pyparser.lines, start=1):
marks = [' ', ' ', ' ', ' ', ' ']
a = ' '
if lineno in pyparser.raw_statements:
marks[0] = '-'
if lineno in pyparser.statements:
marks[1] = '='
exits = exit_counts.get(lineno, 0)
if exits > 1:
marks[2] = str(exits)
if lineno in pyparser.raw_docstrings:
marks[3] = '"'
if lineno in pyparser.raw_classdefs:
marks[3] = 'C'
if lineno in pyparser.raw_excluded:
marks[4] = 'x'
if arc_chars:
a = arc_chars[lineno].ljust(arc_width)
else:
a = ""
print("%4d %s%s %s" % (lineno, "".join(marks), a, ltext))
def disassemble(self, byte_parser, histogram=False):
"""Disassemble code, for ad-hoc experimenting."""
for bp in byte_parser.child_parsers():
if bp.text:
srclines = bp.text.splitlines()
else:
srclines = None
print("\n%s: " % bp.code)
upto = None
for disline in disgen.disgen(bp.code):
if histogram:
opcode_counts[disline.opcode] += 1
continue
if disline.first:
if srclines:
upto = upto or disline.lineno-1
while upto <= disline.lineno-1:
print("%100s%s" % ("", srclines[upto]))
upto += 1
elif disline.offset > 0:
print("")
line = disgen.format_dis_line(disline)
print("%-70s" % (line,))
print("")
def arc_ascii_art(self, arcs):
"""Draw arcs as ascii art.
Returns a dictionary mapping line numbers to ascii strings to draw for
that line.
"""
plus_ones = set()
arc_chars = collections.defaultdict(str)
for lfrom, lto in sorted(arcs):
if lfrom < 0:
arc_chars[lto] += 'v'
elif lto < 0:
arc_chars[lfrom] += '^'
else:
if lfrom == lto - 1:
plus_ones.add(lfrom)
arc_chars[lfrom] += "" # ensure this line is in arc_chars
continue
if lfrom < lto:
l1, l2 = lfrom, lto
else:
l1, l2 = lto, lfrom
w = first_all_blanks(arc_chars[l] for l in range(l1, l2+1))
for l in range(l1, l2+1):
if l == lfrom:
ch = '<'
elif l == lto:
ch = '>'
else:
ch = '|'
arc_chars[l] = set_char(arc_chars[l], w, ch)
# Add the plusses as the first character
for lineno, arcs in arc_chars.items():
arc_chars[lineno] = (
("+" if lineno in plus_ones else " ") +
arcs
)
return arc_chars
def set_char(s, n, c):
"""Set the nth char of s to be c, extending s if needed."""
s = s.ljust(n)
return s[:n] + c + s[n+1:]
def blanks(s):
"""Return the set of positions where s is blank."""
return set(i for i, c in enumerate(s) if c == " ")
def first_all_blanks(ss):
"""Find the first position that is all blank in the strings ss."""
ss = list(ss)
blankss = blanks(ss[0])
for s in ss[1:]:
blankss &= blanks(s)
if blankss:
return min(blankss)
else:
return max(len(s) for s in ss)
if __name__ == '__main__':
ParserMain().main(sys.argv[1:])
|
from inflection import underscore
from sqlalchemy import exc, func, distinct
from sqlalchemy.orm import make_transient, lazyload
from sqlalchemy.sql import and_, or_
from lemur.exceptions import AttrNotFound, DuplicateError
from lemur.extensions import db
def filter_none(kwargs):
"""
Remove all `None` values froma given dict. SQLAlchemy does not
like to have values that are None passed to it.
:param kwargs: Dict to filter
:return: Dict without any 'None' values
"""
n_kwargs = {}
for k, v in kwargs.items():
if v:
n_kwargs[k] = v
return n_kwargs
def session_query(model):
"""
Returns a SQLAlchemy query object for the specified `model`.
If `model` has a ``query`` attribute already, that object will be returned.
Otherwise a query will be created and returned based on `session`.
:param model: sqlalchemy model
:return: query object for model
"""
return model.query if hasattr(model, "query") else db.session.query(model)
def create_query(model, kwargs):
"""
Returns a SQLAlchemy query object for specified `model`. Model
filtered by the kwargs passed.
:param model:
:param kwargs:
:return:
"""
s = session_query(model)
return s.filter_by(**kwargs)
def commit():
"""
Helper to commit the current session.
"""
db.session.commit()
def add(model):
"""
Helper to add a `model` to the current session.
:param model:
:return:
"""
db.session.add(model)
def get_model_column(model, field):
if field in getattr(model, "sensitive_fields", ()):
raise AttrNotFound(field)
column = model.__table__.columns._data.get(field, None)
if column is None:
raise AttrNotFound(field)
return column
def find_all(query, model, kwargs):
"""
Returns a query object that ensures that all kwargs
are present.
:param query:
:param model:
:param kwargs:
:return:
"""
conditions = []
kwargs = filter_none(kwargs)
for attr, value in kwargs.items():
if not isinstance(value, list):
value = value.split(",")
conditions.append(get_model_column(model, attr).in_(value))
return query.filter(and_(*conditions))
def find_any(query, model, kwargs):
"""
Returns a query object that allows any kwarg
to be present.
:param query:
:param model:
:param kwargs:
:return:
"""
or_args = []
for attr, value in kwargs.items():
or_args.append(or_(get_model_column(model, attr) == value))
exprs = or_(*or_args)
return query.filter(exprs)
def get(model, value, field="id"):
"""
Returns one object filtered by the field and value.
:param model:
:param value:
:param field:
:return:
"""
query = session_query(model)
return query.filter(get_model_column(model, field) == value).scalar()
def get_all(model, value, field="id"):
"""
Returns query object with the fields and value filtered.
:param model:
:param value:
:param field:
:return:
"""
query = session_query(model)
return query.filter(get_model_column(model, field) == value)
def create(model):
"""
Helper that attempts to create a new instance of an object.
:param model:
:return: :raise IntegrityError:
"""
try:
db.session.add(model)
commit()
except exc.IntegrityError as e:
raise DuplicateError(e.orig.diag.message_detail)
db.session.refresh(model)
return model
def update(model):
"""
Helper that attempts to update a model.
:param model:
:return:
"""
commit()
db.session.refresh(model)
return model
def delete(model):
"""
Helper that attempts to delete a model.
:param model:
"""
if model:
db.session.delete(model)
db.session.commit()
def filter(query, model, terms):
"""
Helper that searched for 'like' strings in column values.
:param query:
:param model:
:param terms:
:return:
"""
column = get_model_column(model, underscore(terms[0]))
return query.filter(column.ilike("%{}%".format(terms[1])))
def sort(query, model, field, direction):
"""
Returns objects of the specified `model` in the field and direction
given
:param query:
:param model:
:param field:
:param direction:
"""
column = get_model_column(model, underscore(field))
return query.order_by(column.desc() if direction == "desc" else column.asc())
def paginate(query, page, count):
"""
Returns the items given the count and page specified
:param query:
:param page:
:param count:
"""
return query.paginate(page, count)
def update_list(model, model_attr, item_model, items):
"""
Helper that correctly updates a models items
depending on what has changed
:param model_attr:
:param item_model:
:param items:
:param model:
:return:
"""
ids = []
for i in getattr(model, model_attr):
if i.id not in ids:
getattr(model, model_attr).remove(i)
for i in items:
for item in getattr(model, model_attr):
if item.id == i["id"]:
break
else:
getattr(model, model_attr).append(get(item_model, i["id"]))
return model
def clone(model):
"""
Clones the given model and removes it's primary key
:param model:
:return:
"""
db.session.expunge(model)
make_transient(model)
model.id = None
return model
def get_count(q):
"""
Count the number of rows in a table. More efficient than count(*)
:param q:
:return:
"""
disable_group_by = False
if len(q._entities) > 1:
# currently support only one entity
raise Exception("only one entity is supported for get_count, got: %s" % q)
entity = q._entities[0]
if hasattr(entity, "column"):
# _ColumnEntity has column attr - on case: query(Model.column)...
col = entity.column
if q._group_by and q._distinct:
# which query can have both?
raise NotImplementedError
if q._group_by or q._distinct:
col = distinct(col)
if q._group_by:
# need to disable group_by and enable distinct - we can do this because we have only 1 entity
disable_group_by = True
count_func = func.count(col)
else:
# _MapperEntity doesn't have column attr - on case: query(Model)...
count_func = func.count()
if q._group_by and not disable_group_by:
count_func = count_func.over(None)
count_q = (
q.options(lazyload("*"))
.statement.with_only_columns([count_func])
.order_by(None)
)
if disable_group_by:
count_q = count_q.group_by(None)
count = q.session.execute(count_q).scalar()
return count
def sort_and_page(query, model, args):
"""
Helper that allows us to combine sorting and paging
:param query:
:param model:
:param args:
:return:
"""
sort_by = args.pop("sort_by")
sort_dir = args.pop("sort_dir")
page = args.pop("page")
count = args.pop("count")
if args.get("user"):
user = args.pop("user")
query = find_all(query, model, args)
if sort_by and sort_dir:
query = sort(query, model, sort_by, sort_dir)
total = get_count(query)
# offset calculated at zero
page -= 1
items = query.offset(count * page).limit(count).all()
return dict(items=items, total=total)
|
import os
def boot(application, api_key, flavor, version):
# Configure bugsnag
if api_key:
try:
import bugsnag
import bugsnag.flask
root_path = os.path.abspath(
os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
bugsnag.configure(api_key=api_key,
project_root=root_path,
release_stage=flavor,
notify_release_stages=[flavor],
app_version=version
)
bugsnag.flask.handle_exceptions(application)
except Exception as e:
raise Exception('Failed to init bugsnag agent %s' % e)
|
import os
from optional_django import staticfiles
from .exceptions import ComponentSourceFileNotFound
from .render_server import render_server
def render_component(path, props=None, to_static_markup=False, renderer=render_server, request_headers=None, timeout=None):
if not os.path.isabs(path):
abs_path = staticfiles.find(path)
if not abs_path:
raise ComponentSourceFileNotFound(path)
path = abs_path
if not os.path.exists(path):
raise ComponentSourceFileNotFound(path)
return renderer.render(path, props, to_static_markup, request_headers, timeout=timeout)
|
import numpy as np
import unittest
import chainer
from chainer import testing
from chainer.testing import attr
from chainercv.links import YOLOv2
@testing.parameterize(*testing.product({
'n_fg_class': [1, 5, 20],
}))
class TestYOLOv2(unittest.TestCase):
def setUp(self):
self.link = YOLOv2(n_fg_class=self.n_fg_class)
self.insize = 416
self.n_bbox = 13 * 13 * 5
def _check_call(self):
x = self.link.xp.array(
np.random.uniform(-1, 1, size=(1, 3, self.insize, self.insize)),
dtype=np.float32)
locs, objs, confs = self.link(x)
self.assertIsInstance(locs, chainer.Variable)
self.assertIsInstance(locs.array, self.link.xp.ndarray)
self.assertEqual(locs.shape, (1, self.n_bbox, 4))
self.assertIsInstance(objs, chainer.Variable)
self.assertIsInstance(objs.array, self.link.xp.ndarray)
self.assertEqual(objs.shape, (1, self.n_bbox))
self.assertIsInstance(confs, chainer.Variable)
self.assertIsInstance(confs.array, self.link.xp.ndarray)
self.assertEqual(confs.shape, (1, self.n_bbox, self.n_fg_class))
@attr.slow
def test_call_cpu(self):
self._check_call()
@attr.gpu
@attr.slow
def test_call_gpu(self):
self.link.to_gpu()
self._check_call()
@testing.parameterize(*testing.product({
'n_fg_class': [None, 10, 20],
'pretrained_model': ['voc0712'],
}))
class TestYOLOv2Pretrained(unittest.TestCase):
@attr.slow
def test_pretrained(self):
kwargs = {
'n_fg_class': self.n_fg_class,
'pretrained_model': self.pretrained_model,
}
if self.pretrained_model == 'voc0712':
valid = self.n_fg_class in {None, 20}
if valid:
YOLOv2(**kwargs)
else:
with self.assertRaises(ValueError):
YOLOv2(**kwargs)
testing.run_module(__name__, __file__)
|
import logging
import threading
from abc import ABC
from abc import abstractmethod
from time import sleep
from typing import Optional
from typing import Union
from kubernetes.client import V1beta1PodDisruptionBudget
from kubernetes.client import V1DeleteOptions
from kubernetes.client import V1Deployment
from kubernetes.client import V1StatefulSet
from kubernetes.client.rest import ApiException
from paasta_tools.autoscaling.autoscaling_service_lib import autoscaling_is_paused
from paasta_tools.kubernetes_tools import create_deployment
from paasta_tools.kubernetes_tools import create_pod_disruption_budget
from paasta_tools.kubernetes_tools import create_stateful_set
from paasta_tools.kubernetes_tools import force_delete_pods
from paasta_tools.kubernetes_tools import KubeClient
from paasta_tools.kubernetes_tools import KubeDeployment
from paasta_tools.kubernetes_tools import KubernetesDeploymentConfig
from paasta_tools.kubernetes_tools import list_all_deployments
from paasta_tools.kubernetes_tools import load_kubernetes_service_config_no_cache
from paasta_tools.kubernetes_tools import paasta_prefixed
from paasta_tools.kubernetes_tools import pod_disruption_budget_for_service_instance
from paasta_tools.kubernetes_tools import update_deployment
from paasta_tools.kubernetes_tools import update_stateful_set
from paasta_tools.utils import load_system_paasta_config
class Application(ABC):
def __init__(
self,
item: Union[V1Deployment, V1StatefulSet],
logging=logging.getLogger(__name__),
) -> None:
"""
This Application wrapper is an interface for creating/deleting k8s deployments and statefulsets
soa_config is KubernetesDeploymentConfig. It is not loaded in init because it is not always required.
:param item: Kubernetes Object(V1Deployment/V1StatefulSet) that has already been filled up.
:param logging: where logs go
"""
if not item.metadata.namespace:
item.metadata.namespace = "paasta"
attrs = {
attr: item.metadata.labels[paasta_prefixed(attr)]
for attr in ["service", "instance", "git_sha", "config_sha"]
}
self.kube_deployment = KubeDeployment(replicas=item.spec.replicas, **attrs)
self.item = item
self.soa_config = None # type: KubernetesDeploymentConfig
self.logging = logging
def load_local_config(
self, soa_dir: str, cluster: str
) -> Optional[KubernetesDeploymentConfig]:
if not self.soa_config:
self.soa_config = load_kubernetes_service_config_no_cache(
service=self.kube_deployment.service,
instance=self.kube_deployment.instance,
cluster=cluster,
soa_dir=soa_dir,
)
return self.soa_config
def __str__(self):
service = self.kube_deployment.service
instance = self.kube_deployment.instance
git_sha = self.kube_deployment.git_sha
config_sha = self.kube_deployment.config_sha
return f"{service}-{instance}-{git_sha}-{config_sha}"
@abstractmethod
def deep_delete(self, kube_client: KubeClient) -> None:
"""
Remove all controllers, pods, and pod disruption budgets related to this application
:param kube_client:
"""
pass
def create(self, kube_client: KubeClient):
"""
Create all controllers, HPA, and pod disruption budgets related to this application
:param kube_client:
"""
pass
def update(self, kube_client: KubeClient):
"""
Update all controllers, HPA, and pod disruption budgets related to this application
:param kube_client:
"""
pass
def update_related_api_objects(self, kube_client: KubeClient) -> None:
"""
Update related Kubernetes API objects such as HPAs and Pod Disruption Budgets
:param kube_client:
"""
self.ensure_pod_disruption_budget(kube_client)
def delete_pod_disruption_budget(self, kube_client: KubeClient) -> None:
try:
kube_client.policy.delete_namespaced_pod_disruption_budget(
name=self.item.metadata.name,
namespace=self.item.metadata.namespace,
body=V1DeleteOptions(),
)
except ApiException as e:
if e.status == 404:
# Deployment does not exist, nothing to delete but
# we can consider this a success.
self.logging.debug(
"not deleting nonexistent pod disruption budget/{} from namespace/{}".format(
self.item.metadata.name, self.item.metadata.namespace
)
)
else:
raise
else:
self.logging.info(
"deleted pod disruption budget/{} from namespace/{}".format(
self.item.metadata.name, self.item.metadata.namespace
)
)
def ensure_pod_disruption_budget(
self, kube_client: KubeClient
) -> V1beta1PodDisruptionBudget:
max_unavailable: Union[str, int]
if "bounce_margin_factor" in self.soa_config.config_dict:
max_unavailable = (
f"{int((1 - self.soa_config.get_bounce_margin_factor()) * 100)}%"
)
else:
system_paasta_config = load_system_paasta_config()
max_unavailable = system_paasta_config.get_pdb_max_unavailable()
pdr = pod_disruption_budget_for_service_instance(
service=self.kube_deployment.service,
instance=self.kube_deployment.instance,
max_unavailable=max_unavailable,
)
try:
existing_pdr = kube_client.policy.read_namespaced_pod_disruption_budget(
name=pdr.metadata.name, namespace=pdr.metadata.namespace
)
except ApiException as e:
if e.status == 404:
existing_pdr = None
else:
raise
if existing_pdr:
if existing_pdr.spec.min_available is not None:
logging.info(
"Not updating poddisruptionbudget: can't have both "
"min_available and max_unavailable"
)
elif existing_pdr.spec.max_unavailable != pdr.spec.max_unavailable:
logging.info(f"Updating poddisruptionbudget {pdr.metadata.name}")
return kube_client.policy.patch_namespaced_pod_disruption_budget(
name=pdr.metadata.name, namespace=pdr.metadata.namespace, body=pdr
)
else:
logging.info(f"poddisruptionbudget {pdr.metadata.name} up to date")
else:
logging.info(f"creating poddisruptionbudget {pdr.metadata.name}")
return create_pod_disruption_budget(
kube_client=kube_client, pod_disruption_budget=pdr
)
class DeploymentWrapper(Application):
def deep_delete(self, kube_client: KubeClient) -> None:
"""
Remove all controllers, pods, and pod disruption budgets related to this application
:param kube_client:
"""
delete_options = V1DeleteOptions(propagation_policy="Foreground")
try:
kube_client.deployments.delete_namespaced_deployment(
self.item.metadata.name,
self.item.metadata.namespace,
body=delete_options,
)
except ApiException as e:
if e.status == 404:
# Deployment does not exist, nothing to delete but
# we can consider this a success.
self.logging.debug(
"not deleting nonexistent deploy/{} from namespace/{}".format(
self.item.metadata.name, self.item.metadata.namespace
)
)
else:
raise
else:
self.logging.info(
"deleted deploy/{} from namespace/{}".format(
self.item.metadata.name, self.item.metadata.namespace
)
)
self.delete_pod_disruption_budget(kube_client)
self.delete_horizontal_pod_autoscaler(kube_client)
def get_existing_app(self, kube_client: KubeClient):
return kube_client.deployments.read_namespaced_deployment(
name=self.item.metadata.name, namespace=self.item.metadata.namespace
)
def create(self, kube_client: KubeClient) -> None:
create_deployment(kube_client=kube_client, formatted_deployment=self.item)
self.ensure_pod_disruption_budget(kube_client)
self.sync_horizontal_pod_autoscaler(kube_client)
def deep_delete_and_create(self, kube_client: KubeClient) -> None:
self.deep_delete(kube_client)
timer = 0
while (
self.kube_deployment in set(list_all_deployments(kube_client))
and timer < 60
):
sleep(1)
timer += 1
if timer >= 60 and self.kube_deployment in set(
list_all_deployments(kube_client)
):
try:
force_delete_pods(
self.item.metadata.name,
self.kube_deployment.service,
self.kube_deployment.instance,
self.item.metadata.namespace,
kube_client,
)
except ApiException as e:
if e.status == 404:
# Deployment does not exist, nothing to delete but
# we can consider this a success.
self.logging.debug(
"not deleting nonexistent deploy/{} from namespace/{}".format(
self.kube_deployment.service, self.item.metadata.namespace
)
)
else:
raise
else:
self.logging.info(
"deleted deploy/{} from namespace/{}".format(
self.kube_deployment.service, self.item.metadata.namespace
)
)
self.create(kube_client=kube_client)
def update(self, kube_client: KubeClient) -> None:
# If HPA is enabled, do not update replicas.
# In all other cases, replica is set to max(instances, min_instances)
if self.soa_config.config_dict.get("bounce_method", "") == "brutal":
threading.Thread(
target=self.deep_delete_and_create, args=[KubeClient()]
).start()
return
update_deployment(kube_client=kube_client, formatted_deployment=self.item)
def update_related_api_objects(self, kube_client: KubeClient) -> None:
super().update_related_api_objects(kube_client)
self.sync_horizontal_pod_autoscaler(kube_client)
def sync_horizontal_pod_autoscaler(self, kube_client: KubeClient) -> None:
"""
In order for autoscaling to work, there needs to be at least two configurations
min_instnace, max_instance, and there cannot be instance.
"""
desired_hpa_spec = self.soa_config.get_autoscaling_metric_spec(
name=self.item.metadata.name,
cluster=self.soa_config.cluster,
kube_client=kube_client,
namespace=self.item.metadata.namespace,
)
hpa_exists = self.exists_hpa(kube_client)
should_have_hpa = desired_hpa_spec and not autoscaling_is_paused()
if not should_have_hpa:
self.logging.info(
f"No HPA required for {self.item.metadata.name}/name in {self.item.metadata.namespace}"
)
if hpa_exists:
self.logging.info(
f"Deleting HPA for {self.item.metadata.name}/name in {self.item.metadata.namespace}"
)
self.delete_horizontal_pod_autoscaler(kube_client)
return
self.logging.info(
f"Syncing HPA setting for {self.item.metadata.name}/name in {self.item.metadata.namespace}"
)
self.logging.debug(desired_hpa_spec)
if not hpa_exists:
self.logging.info(
f"Creating new HPA for {self.item.metadata.name}/name in {self.item.metadata.namespace}"
)
kube_client.autoscaling.create_namespaced_horizontal_pod_autoscaler(
namespace=self.item.metadata.namespace,
body=desired_hpa_spec,
pretty=True,
)
else:
self.logging.info(
f"Updating new HPA for {self.item.metadata.name}/name in {self.item.metadata.namespace}/namespace"
)
kube_client.autoscaling.replace_namespaced_horizontal_pod_autoscaler(
name=self.item.metadata.name,
namespace=self.item.metadata.namespace,
body=desired_hpa_spec,
pretty=True,
)
def exists_hpa(self, kube_client: KubeClient) -> bool:
return (
len(
kube_client.autoscaling.list_namespaced_horizontal_pod_autoscaler(
field_selector=f"metadata.name={self.item.metadata.name}",
namespace=self.item.metadata.namespace,
).items
)
> 0
)
def delete_horizontal_pod_autoscaler(self, kube_client: KubeClient) -> None:
try:
kube_client.autoscaling.delete_namespaced_horizontal_pod_autoscaler(
name=self.item.metadata.name,
namespace=self.item.metadata.namespace,
body=V1DeleteOptions(),
)
except ApiException as e:
if e.status == 404:
# Deployment does not exist, nothing to delete but
# we can consider this a success.
self.logging.debug(
f"not deleting nonexistent HPA/{self.item.metadata.name} from namespace/{self.item.metadata.namespace}"
)
else:
raise
else:
self.logging.info(
"deleted HPA/{} from namespace/{}".format(
self.item.metadata.name, self.item.metadata.namespace
)
)
class StatefulSetWrapper(Application):
def deep_delete(self, kube_client: KubeClient) -> None:
"""
Remove all controllers, pods, and pod disruption budgets related to this application
:param kube_client:
"""
delete_options = V1DeleteOptions(propagation_policy="Foreground")
try:
kube_client.deployments.delete_namespaced_stateful_set(
self.item.metadata.name,
self.item.metadata.namespace,
body=delete_options,
)
except ApiException as e:
if e.status == 404:
# StatefulSet does not exist, nothing to delete but
# we can consider this a success.
self.logging.debug(
"not deleting nonexistent statefulset/{} from namespace/{}".format(
self.item.metadata.name, self.item.metadata.namespace
)
)
else:
raise
else:
self.logging.info(
"deleted statefulset/{} from namespace/{}".format(
self.item.metadata.name, self.item.metadata.namespace
)
)
self.delete_pod_disruption_budget(kube_client)
def create(self, kube_client: KubeClient):
create_stateful_set(kube_client=kube_client, formatted_stateful_set=self.item)
self.ensure_pod_disruption_budget(kube_client)
def update(self, kube_client: KubeClient):
update_stateful_set(kube_client=kube_client, formatted_stateful_set=self.item)
def get_application_wrapper(
formatted_application: Union[V1Deployment, V1StatefulSet]
) -> Application:
app: Application
if isinstance(formatted_application, V1Deployment):
app = DeploymentWrapper(formatted_application)
elif isinstance(formatted_application, V1StatefulSet):
app = StatefulSetWrapper(formatted_application)
else:
raise Exception("Unknown kubernetes object to update")
return app
|
import os
import numpy as np
import pandas as pd
import pathos
from sklearn.base import BaseEstimator, TransformerMixin
class Ensembler(BaseEstimator, TransformerMixin):
def __init__(self, ensemble_predictors, type_of_estimator, ensemble_method='average', num_classes=None):
self.ensemble_predictors = ensemble_predictors
self.type_of_estimator = type_of_estimator
self.ensemble_method = ensemble_method
self.num_classes = num_classes
# ################################
# Get a dataframe that is all the predictions from all the sub-models
# ################################
# Note that we will get these predictions in parallel (relatively quick)
def get_all_predictions(self, X):
def get_predictions_for_one_estimator(estimator, X):
estimator_name = estimator.name
if self.type_of_estimator == 'regressor':
predictions = estimator.predict(X)
else:
# For classifiers
predictions = list(estimator.predict_proba(X))
return_obj = {estimator_name: predictions}
return return_obj
# Don't bother parallelizing if this is a single dictionary
if X.shape[0] == 1:
predictions_from_all_estimators = map(lambda predictor: get_predictions_for_one_estimator(predictor, X), self.ensemble_predictors)
else:
# Pathos doesn't like datasets beyond a certain size. So fall back on single, non-parallel predictions instead.
# try:
if os.environ.get('is_test_suite', False) == 'True':
predictions_from_all_estimators = map(lambda predictor: get_predictions_for_one_estimator(predictor, X), self.ensemble_predictors)
else:
# Open a new multiprocessing pool
pool = pathos.multiprocessing.ProcessPool()
# Since we may have already closed the pool, try to restart it
try:
pool.restart()
except AssertionError as e:
pass
predictions_from_all_estimators = pool.map(lambda predictor: get_predictions_for_one_estimator(predictor, X), self.ensemble_predictors)
# Once we have gotten all we need from the pool, close it so it's not taking up unnecessary memory
pool.close()
try:
pool.join()
except AssertionError:
pass
predictions_from_all_estimators = list(predictions_from_all_estimators)
results = {}
for result_dict in predictions_from_all_estimators:
results.update(result_dict)
# if this is a single row we are getting predictions from, just return a dictionary with single values for all the predictions
if X.shape[0] == 1:
return results
else:
predictions_df = pd.DataFrame.from_dict(results, orient='columns')
return predictions_df
def fit(self, X, y):
return self
# ################################
# Public API to get a single prediction from each row, where that single prediction is somehow an ensemble of all our trained subpredictors
# ################################
def predict(self, X):
predictions = self.get_all_predictions(X)
# If this is just a single dictionary we're getting predictions from:
if X.shape[0] == 1:
# predictions is just a dictionary where all the values are the predicted values from one of our subpredictors. we'll want that as a list
predicted_vals = list(predictions.values())
if self.ensemble_method == 'median':
return np.median(predicted_vals)
elif self.ensemble_method == 'average' or self.ensemble_method == 'mean' or self.ensemble_method == 'avg':
return np.average(predicted_vals)
elif self.ensemble_method == 'max':
return np.max(predicted_vals)
elif self.ensemble_method == 'min':
return np.min(predicted_vals)
else:
if self.ensemble_method == 'median':
return predictions.apply(np.median, axis=1).values
elif self.ensemble_method == 'average' or self.ensemble_method == 'mean' or self.ensemble_method == 'avg':
return predictions.apply(np.average, axis=1).values
elif self.ensemble_method == 'max':
return predictions.apply(np.max, axis=1).values
elif self.ensemble_method == 'min':
return predictions.apply(np.min, axis=1).values
def get_predictions_by_class(self, predictions):
predictions_by_class = []
for class_idx in range(self.num_classes):
class_preds = [pred[class_idx] for pred in predictions]
predictions_by_class.append(class_preds)
return predictions_by_class
def predict_proba(self, X):
predictions = self.get_all_predictions(X)
# If this is just a single dictionary we're getting predictions from:
if X.shape[0] == 1:
# predictions is just a dictionary where all the values are the predicted values from one of our subpredictors. we'll want that as a list
predicted_vals = list(predictions.values())
predicted_vals = self.get_predictions_by_class(predicted_vals)
if self.ensemble_method == 'median':
return [np.median(class_preds) for class_preds in predicted_vals]
elif self.ensemble_method == 'average' or self.ensemble_method == 'mean' or self.ensemble_method == 'avg':
return [np.average(class_preds) for class_preds in predicted_vals]
elif self.ensemble_method == 'max':
return [np.max(class_preds) for class_preds in predicted_vals]
elif self.ensemble_method == 'min':
return [np.min(class_preds) for class_preds in predicted_vals]
else:
classed_predictions = predictions.apply(self.get_predictions_by_class, axis=1)
if self.ensemble_method == 'median':
return classed_predictions.apply(np.median, axis=1)
elif self.ensemble_method == 'average' or self.ensemble_method == 'mean' or self.ensemble_method == 'avg':
return classed_predictions.apply(np.average, axis=1)
elif self.ensemble_method == 'max':
return classed_predictions.apply(np.max, axis=1)
elif self.ensemble_method == 'min':
return classed_predictions.apply(np.min, axis=1)
|
import re
from typing import Iterable, Tuple
from PyQt5.QtCore import Qt, QSortFilterProxyModel, QRegExp
from PyQt5.QtGui import QStandardItem, QStandardItemModel
from PyQt5.QtWidgets import QWidget
from qutebrowser.completion.models import util
from qutebrowser.utils import qtutils, log
class ListCategory(QSortFilterProxyModel):
"""Expose a list of items as a category for the CompletionModel."""
def __init__(self,
name: str,
items: Iterable[Tuple[str, ...]],
sort: bool = True,
delete_func: util.DeleteFuncType = None,
parent: QWidget = None):
super().__init__(parent)
self.name = name
self.srcmodel = QStandardItemModel(parent=self)
self._pattern = ''
# ListCategory filters all columns
self.columns_to_filter = [0, 1, 2]
self.setFilterKeyColumn(-1)
for item in items:
self.srcmodel.appendRow([QStandardItem(x) for x in item])
self.setSourceModel(self.srcmodel)
self.delete_func = delete_func
self._sort = sort
def set_pattern(self, val):
"""Setter for pattern.
Args:
val: The value to set.
"""
self._pattern = val
val = re.sub(r' +', r' ', val) # See #1919
val = re.escape(val)
val = val.replace(r'\ ', '.*')
rx = QRegExp(val, Qt.CaseInsensitive)
self.setFilterRegExp(rx)
self.invalidate()
sortcol = 0
self.sort(sortcol)
def lessThan(self, lindex, rindex):
"""Custom sorting implementation.
Prefers all items which start with self._pattern. Other than that, uses
normal Python string sorting.
Args:
lindex: The QModelIndex of the left item (*left* < right)
rindex: The QModelIndex of the right item (left < *right*)
Return:
True if left < right, else False
"""
qtutils.ensure_valid(lindex)
qtutils.ensure_valid(rindex)
left = self.srcmodel.data(lindex)
right = self.srcmodel.data(rindex)
if left is None or right is None: # pragma: no cover
log.completion.warning("Got unexpected None value, "
"left={!r} right={!r} "
"lindex={!r} rindex={!r}"
.format(left, right, lindex, rindex))
return False
leftstart = left.startswith(self._pattern)
rightstart = right.startswith(self._pattern)
if leftstart and not rightstart:
return True
elif rightstart and not leftstart:
return False
elif self._sort:
return left < right
else:
return False
|
from typing import Dict, List
import voluptuous as vol
from homeassistant.components.device_automation.const import CONF_IS_OFF, CONF_IS_ON
from homeassistant.const import ATTR_DEVICE_CLASS, CONF_ENTITY_ID, CONF_FOR, CONF_TYPE
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import condition, config_validation as cv
from homeassistant.helpers.entity_registry import (
async_entries_for_device,
async_get_registry,
)
from homeassistant.helpers.typing import ConfigType
from . import (
DEVICE_CLASS_BATTERY,
DEVICE_CLASS_BATTERY_CHARGING,
DEVICE_CLASS_COLD,
DEVICE_CLASS_CONNECTIVITY,
DEVICE_CLASS_DOOR,
DEVICE_CLASS_GARAGE_DOOR,
DEVICE_CLASS_GAS,
DEVICE_CLASS_HEAT,
DEVICE_CLASS_LIGHT,
DEVICE_CLASS_LOCK,
DEVICE_CLASS_MOISTURE,
DEVICE_CLASS_MOTION,
DEVICE_CLASS_MOVING,
DEVICE_CLASS_OCCUPANCY,
DEVICE_CLASS_OPENING,
DEVICE_CLASS_PLUG,
DEVICE_CLASS_POWER,
DEVICE_CLASS_PRESENCE,
DEVICE_CLASS_PROBLEM,
DEVICE_CLASS_SAFETY,
DEVICE_CLASS_SMOKE,
DEVICE_CLASS_SOUND,
DEVICE_CLASS_VIBRATION,
DEVICE_CLASS_WINDOW,
DOMAIN,
)
DEVICE_CLASS_NONE = "none"
CONF_IS_BAT_LOW = "is_bat_low"
CONF_IS_NOT_BAT_LOW = "is_not_bat_low"
CONF_IS_CHARGING = "is_charging"
CONF_IS_NOT_CHARGING = "is_not_charging"
CONF_IS_COLD = "is_cold"
CONF_IS_NOT_COLD = "is_not_cold"
CONF_IS_CONNECTED = "is_connected"
CONF_IS_NOT_CONNECTED = "is_not_connected"
CONF_IS_GAS = "is_gas"
CONF_IS_NO_GAS = "is_no_gas"
CONF_IS_HOT = "is_hot"
CONF_IS_NOT_HOT = "is_not_hot"
CONF_IS_LIGHT = "is_light"
CONF_IS_NO_LIGHT = "is_no_light"
CONF_IS_LOCKED = "is_locked"
CONF_IS_NOT_LOCKED = "is_not_locked"
CONF_IS_MOIST = "is_moist"
CONF_IS_NOT_MOIST = "is_not_moist"
CONF_IS_MOTION = "is_motion"
CONF_IS_NO_MOTION = "is_no_motion"
CONF_IS_MOVING = "is_moving"
CONF_IS_NOT_MOVING = "is_not_moving"
CONF_IS_OCCUPIED = "is_occupied"
CONF_IS_NOT_OCCUPIED = "is_not_occupied"
CONF_IS_PLUGGED_IN = "is_plugged_in"
CONF_IS_NOT_PLUGGED_IN = "is_not_plugged_in"
CONF_IS_POWERED = "is_powered"
CONF_IS_NOT_POWERED = "is_not_powered"
CONF_IS_PRESENT = "is_present"
CONF_IS_NOT_PRESENT = "is_not_present"
CONF_IS_PROBLEM = "is_problem"
CONF_IS_NO_PROBLEM = "is_no_problem"
CONF_IS_UNSAFE = "is_unsafe"
CONF_IS_NOT_UNSAFE = "is_not_unsafe"
CONF_IS_SMOKE = "is_smoke"
CONF_IS_NO_SMOKE = "is_no_smoke"
CONF_IS_SOUND = "is_sound"
CONF_IS_NO_SOUND = "is_no_sound"
CONF_IS_VIBRATION = "is_vibration"
CONF_IS_NO_VIBRATION = "is_no_vibration"
CONF_IS_OPEN = "is_open"
CONF_IS_NOT_OPEN = "is_not_open"
IS_ON = [
CONF_IS_BAT_LOW,
CONF_IS_CHARGING,
CONF_IS_COLD,
CONF_IS_CONNECTED,
CONF_IS_GAS,
CONF_IS_HOT,
CONF_IS_LIGHT,
CONF_IS_NOT_LOCKED,
CONF_IS_MOIST,
CONF_IS_MOTION,
CONF_IS_MOVING,
CONF_IS_OCCUPIED,
CONF_IS_OPEN,
CONF_IS_PLUGGED_IN,
CONF_IS_POWERED,
CONF_IS_PRESENT,
CONF_IS_PROBLEM,
CONF_IS_SMOKE,
CONF_IS_SOUND,
CONF_IS_UNSAFE,
CONF_IS_VIBRATION,
CONF_IS_ON,
]
IS_OFF = [
CONF_IS_NOT_BAT_LOW,
CONF_IS_NOT_CHARGING,
CONF_IS_NOT_COLD,
CONF_IS_NOT_CONNECTED,
CONF_IS_NOT_HOT,
CONF_IS_LOCKED,
CONF_IS_NOT_MOIST,
CONF_IS_NOT_MOVING,
CONF_IS_NOT_OCCUPIED,
CONF_IS_NOT_OPEN,
CONF_IS_NOT_PLUGGED_IN,
CONF_IS_NOT_POWERED,
CONF_IS_NOT_PRESENT,
CONF_IS_NOT_UNSAFE,
CONF_IS_NO_GAS,
CONF_IS_NO_LIGHT,
CONF_IS_NO_MOTION,
CONF_IS_NO_PROBLEM,
CONF_IS_NO_SMOKE,
CONF_IS_NO_SOUND,
CONF_IS_NO_VIBRATION,
CONF_IS_OFF,
]
ENTITY_CONDITIONS = {
DEVICE_CLASS_BATTERY: [
{CONF_TYPE: CONF_IS_BAT_LOW},
{CONF_TYPE: CONF_IS_NOT_BAT_LOW},
],
DEVICE_CLASS_BATTERY_CHARGING: [
{CONF_TYPE: CONF_IS_CHARGING},
{CONF_TYPE: CONF_IS_NOT_CHARGING},
],
DEVICE_CLASS_COLD: [{CONF_TYPE: CONF_IS_COLD}, {CONF_TYPE: CONF_IS_NOT_COLD}],
DEVICE_CLASS_CONNECTIVITY: [
{CONF_TYPE: CONF_IS_CONNECTED},
{CONF_TYPE: CONF_IS_NOT_CONNECTED},
],
DEVICE_CLASS_DOOR: [{CONF_TYPE: CONF_IS_OPEN}, {CONF_TYPE: CONF_IS_NOT_OPEN}],
DEVICE_CLASS_GARAGE_DOOR: [
{CONF_TYPE: CONF_IS_OPEN},
{CONF_TYPE: CONF_IS_NOT_OPEN},
],
DEVICE_CLASS_GAS: [{CONF_TYPE: CONF_IS_GAS}, {CONF_TYPE: CONF_IS_NO_GAS}],
DEVICE_CLASS_HEAT: [{CONF_TYPE: CONF_IS_HOT}, {CONF_TYPE: CONF_IS_NOT_HOT}],
DEVICE_CLASS_LIGHT: [{CONF_TYPE: CONF_IS_LIGHT}, {CONF_TYPE: CONF_IS_NO_LIGHT}],
DEVICE_CLASS_LOCK: [{CONF_TYPE: CONF_IS_LOCKED}, {CONF_TYPE: CONF_IS_NOT_LOCKED}],
DEVICE_CLASS_MOISTURE: [{CONF_TYPE: CONF_IS_MOIST}, {CONF_TYPE: CONF_IS_NOT_MOIST}],
DEVICE_CLASS_MOTION: [{CONF_TYPE: CONF_IS_MOTION}, {CONF_TYPE: CONF_IS_NO_MOTION}],
DEVICE_CLASS_MOVING: [{CONF_TYPE: CONF_IS_MOVING}, {CONF_TYPE: CONF_IS_NOT_MOVING}],
DEVICE_CLASS_OCCUPANCY: [
{CONF_TYPE: CONF_IS_OCCUPIED},
{CONF_TYPE: CONF_IS_NOT_OCCUPIED},
],
DEVICE_CLASS_OPENING: [{CONF_TYPE: CONF_IS_OPEN}, {CONF_TYPE: CONF_IS_NOT_OPEN}],
DEVICE_CLASS_PLUG: [
{CONF_TYPE: CONF_IS_PLUGGED_IN},
{CONF_TYPE: CONF_IS_NOT_PLUGGED_IN},
],
DEVICE_CLASS_POWER: [
{CONF_TYPE: CONF_IS_POWERED},
{CONF_TYPE: CONF_IS_NOT_POWERED},
],
DEVICE_CLASS_PRESENCE: [
{CONF_TYPE: CONF_IS_PRESENT},
{CONF_TYPE: CONF_IS_NOT_PRESENT},
],
DEVICE_CLASS_PROBLEM: [
{CONF_TYPE: CONF_IS_PROBLEM},
{CONF_TYPE: CONF_IS_NO_PROBLEM},
],
DEVICE_CLASS_SAFETY: [{CONF_TYPE: CONF_IS_UNSAFE}, {CONF_TYPE: CONF_IS_NOT_UNSAFE}],
DEVICE_CLASS_SMOKE: [{CONF_TYPE: CONF_IS_SMOKE}, {CONF_TYPE: CONF_IS_NO_SMOKE}],
DEVICE_CLASS_SOUND: [{CONF_TYPE: CONF_IS_SOUND}, {CONF_TYPE: CONF_IS_NO_SOUND}],
DEVICE_CLASS_VIBRATION: [
{CONF_TYPE: CONF_IS_VIBRATION},
{CONF_TYPE: CONF_IS_NO_VIBRATION},
],
DEVICE_CLASS_WINDOW: [{CONF_TYPE: CONF_IS_OPEN}, {CONF_TYPE: CONF_IS_NOT_OPEN}],
DEVICE_CLASS_NONE: [{CONF_TYPE: CONF_IS_ON}, {CONF_TYPE: CONF_IS_OFF}],
}
CONDITION_SCHEMA = cv.DEVICE_CONDITION_BASE_SCHEMA.extend(
{
vol.Required(CONF_ENTITY_ID): cv.entity_id,
vol.Required(CONF_TYPE): vol.In(IS_OFF + IS_ON),
vol.Optional(CONF_FOR): cv.positive_time_period_dict,
}
)
async def async_get_conditions(
hass: HomeAssistant, device_id: str
) -> List[Dict[str, str]]:
"""List device conditions."""
conditions: List[Dict[str, str]] = []
entity_registry = await async_get_registry(hass)
entries = [
entry
for entry in async_entries_for_device(entity_registry, device_id)
if entry.domain == DOMAIN
]
for entry in entries:
device_class = DEVICE_CLASS_NONE
state = hass.states.get(entry.entity_id)
if state and ATTR_DEVICE_CLASS in state.attributes:
device_class = state.attributes[ATTR_DEVICE_CLASS]
templates = ENTITY_CONDITIONS.get(
device_class, ENTITY_CONDITIONS[DEVICE_CLASS_NONE]
)
conditions.extend(
{
**template,
"condition": "device",
"device_id": device_id,
"entity_id": entry.entity_id,
"domain": DOMAIN,
}
for template in templates
)
return conditions
@callback
def async_condition_from_config(
config: ConfigType, config_validation: bool
) -> condition.ConditionCheckerType:
"""Evaluate state based on configuration."""
if config_validation:
config = CONDITION_SCHEMA(config)
condition_type = config[CONF_TYPE]
if condition_type in IS_ON:
stat = "on"
else:
stat = "off"
state_config = {
condition.CONF_CONDITION: "state",
condition.CONF_ENTITY_ID: config[CONF_ENTITY_ID],
condition.CONF_STATE: stat,
}
if CONF_FOR in config:
state_config[CONF_FOR] = config[CONF_FOR]
return condition.state_from_config(state_config)
async def async_get_condition_capabilities(hass: HomeAssistant, config: dict) -> dict:
"""List condition capabilities."""
return {
"extra_fields": vol.Schema(
{vol.Optional(CONF_FOR): cv.positive_time_period_dict}
)
}
|
import platform
import sys
from types import CodeType
from . import TemplateSyntaxError
from .utils import internal_code
from .utils import missing
def rewrite_traceback_stack(source=None):
"""Rewrite the current exception to replace any tracebacks from
within compiled template code with tracebacks that look like they
came from the template source.
This must be called within an ``except`` block.
:param source: For ``TemplateSyntaxError``, the original source if
known.
:return: The original exception with the rewritten traceback.
"""
_, exc_value, tb = sys.exc_info()
if isinstance(exc_value, TemplateSyntaxError) and not exc_value.translated:
exc_value.translated = True
exc_value.source = source
# Remove the old traceback, otherwise the frames from the
# compiler still show up.
exc_value.with_traceback(None)
# Outside of runtime, so the frame isn't executing template
# code, but it still needs to point at the template.
tb = fake_traceback(
exc_value, None, exc_value.filename or "<unknown>", exc_value.lineno
)
else:
# Skip the frame for the render function.
tb = tb.tb_next
stack = []
# Build the stack of traceback object, replacing any in template
# code with the source file and line information.
while tb is not None:
# Skip frames decorated with @internalcode. These are internal
# calls that aren't useful in template debugging output.
if tb.tb_frame.f_code in internal_code:
tb = tb.tb_next
continue
template = tb.tb_frame.f_globals.get("__jinja_template__")
if template is not None:
lineno = template.get_corresponding_lineno(tb.tb_lineno)
fake_tb = fake_traceback(exc_value, tb, template.filename, lineno)
stack.append(fake_tb)
else:
stack.append(tb)
tb = tb.tb_next
tb_next = None
# Assign tb_next in reverse to avoid circular references.
for tb in reversed(stack):
tb_next = tb_set_next(tb, tb_next)
return exc_value.with_traceback(tb_next)
def fake_traceback(exc_value, tb, filename, lineno):
"""Produce a new traceback object that looks like it came from the
template source instead of the compiled code. The filename, line
number, and location name will point to the template, and the local
variables will be the current template context.
:param exc_value: The original exception to be re-raised to create
the new traceback.
:param tb: The original traceback to get the local variables and
code info from.
:param filename: The template filename.
:param lineno: The line number in the template source.
"""
if tb is not None:
# Replace the real locals with the context that would be
# available at that point in the template.
locals = get_template_locals(tb.tb_frame.f_locals)
locals.pop("__jinja_exception__", None)
else:
locals = {}
globals = {
"__name__": filename,
"__file__": filename,
"__jinja_exception__": exc_value,
}
# Raise an exception at the correct line number.
code = compile("\n" * (lineno - 1) + "raise __jinja_exception__", filename, "exec")
# Build a new code object that points to the template file and
# replaces the location with a block name.
try:
location = "template"
if tb is not None:
function = tb.tb_frame.f_code.co_name
if function == "root":
location = "top-level template code"
elif function.startswith("block_"):
location = f"block {function[6:]!r}"
# Collect arguments for the new code object. CodeType only
# accepts positional arguments, and arguments were inserted in
# new Python versions.
code_args = []
for attr in (
"argcount",
"posonlyargcount", # Python 3.8
"kwonlyargcount",
"nlocals",
"stacksize",
"flags",
"code", # codestring
"consts", # constants
"names",
"varnames",
("filename", filename),
("name", location),
"firstlineno",
"lnotab",
"freevars",
"cellvars",
):
if isinstance(attr, tuple):
# Replace with given value.
code_args.append(attr[1])
continue
try:
# Copy original value if it exists.
code_args.append(getattr(code, "co_" + attr))
except AttributeError:
# Some arguments were added later.
continue
code = CodeType(*code_args)
except Exception:
# Some environments such as Google App Engine don't support
# modifying code objects.
pass
# Execute the new code, which is guaranteed to raise, and return
# the new traceback without this frame.
try:
exec(code, globals, locals)
except BaseException:
return sys.exc_info()[2].tb_next
def get_template_locals(real_locals):
"""Based on the runtime locals, get the context that would be
available at that point in the template.
"""
# Start with the current template context.
ctx = real_locals.get("context")
if ctx:
data = ctx.get_all().copy()
else:
data = {}
# Might be in a derived context that only sets local variables
# rather than pushing a context. Local variables follow the scheme
# l_depth_name. Find the highest-depth local that has a value for
# each name.
local_overrides = {}
for name, value in real_locals.items():
if not name.startswith("l_") or value is missing:
# Not a template variable, or no longer relevant.
continue
try:
_, depth, name = name.split("_", 2)
depth = int(depth)
except ValueError:
continue
cur_depth = local_overrides.get(name, (-1,))[0]
if cur_depth < depth:
local_overrides[name] = (depth, value)
# Modify the context with any derived context.
for name, (_, value) in local_overrides.items():
if value is missing:
data.pop(name, None)
else:
data[name] = value
return data
if sys.version_info >= (3, 7):
# tb_next is directly assignable as of Python 3.7
def tb_set_next(tb, tb_next):
tb.tb_next = tb_next
return tb
elif platform.python_implementation() == "PyPy":
# PyPy might have special support, and won't work with ctypes.
try:
import tputil
except ImportError:
# Without tproxy support, use the original traceback.
def tb_set_next(tb, tb_next):
return tb
else:
# With tproxy support, create a proxy around the traceback that
# returns the new tb_next.
def tb_set_next(tb, tb_next):
def controller(op):
if op.opname == "__getattribute__" and op.args[0] == "tb_next":
return tb_next
return op.delegate()
return tputil.make_proxy(controller, obj=tb)
else:
# Use ctypes to assign tb_next at the C level since it's read-only
# from Python.
import ctypes
class _CTraceback(ctypes.Structure):
_fields_ = [
# Extra PyObject slots when compiled with Py_TRACE_REFS.
("PyObject_HEAD", ctypes.c_byte * object().__sizeof__()),
# Only care about tb_next as an object, not a traceback.
("tb_next", ctypes.py_object),
]
def tb_set_next(tb, tb_next):
c_tb = _CTraceback.from_address(id(tb))
# Clear out the old tb_next.
if tb.tb_next is not None:
c_tb_next = ctypes.py_object(tb.tb_next)
c_tb.tb_next = ctypes.py_object()
ctypes.pythonapi.Py_DecRef(c_tb_next)
# Assign the new tb_next.
if tb_next is not None:
c_tb_next = ctypes.py_object(tb_next)
ctypes.pythonapi.Py_IncRef(c_tb_next)
c_tb.tb_next = c_tb_next
return tb
|
import logging
import pytest
import hypothesis
from hypothesis import strategies
from qutebrowser.browser.webkit import http, rfc6266
@pytest.mark.parametrize('template', [
'{}',
'attachment; filename="{}"',
'inline; {}',
'attachment; {}="foo"',
"attachment; filename*=iso-8859-1''{}",
'attachment; filename*={}',
])
@hypothesis.given(strategies.text(alphabet=[chr(x) for x in range(255)]))
def test_parse_content_disposition(caplog, template, stubs, s):
"""Test parsing headers based on templates which hypothesis completes."""
header = template.format(s)
reply = stubs.FakeNetworkReply(headers={'Content-Disposition': header})
with caplog.at_level(logging.ERROR, 'rfc6266'):
http.parse_content_disposition(reply)
@hypothesis.given(strategies.binary())
def test_content_disposition_directly(s):
"""Test rfc6266.parse_headers directly with binary data."""
try:
cd = rfc6266.parse_headers(s)
cd.filename()
except (SyntaxError, UnicodeDecodeError, rfc6266.Error):
pass
@hypothesis.given(strategies.text())
def test_parse_content_type(stubs, s):
reply = stubs.FakeNetworkReply(headers={'Content-Type': s})
http.parse_content_type(reply)
|
from binascii import hexlify, unhexlify
import logging
from serial import Serial, SerialException
import voluptuous as vol
from xbee_helper import ZigBee
import xbee_helper.const as xb_const
from xbee_helper.device import convert_adc
from xbee_helper.exceptions import ZigBeeException, ZigBeeTxFailure
from homeassistant.const import (
CONF_ADDRESS,
CONF_DEVICE,
CONF_NAME,
CONF_PIN,
EVENT_HOMEASSISTANT_STOP,
PERCENTAGE,
)
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_connect, dispatcher_send
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
DOMAIN = "xbee"
SIGNAL_XBEE_FRAME_RECEIVED = "xbee_frame_received"
CONF_BAUD = "baud"
DEFAULT_DEVICE = "/dev/ttyUSB0"
DEFAULT_BAUD = 9600
DEFAULT_ADC_MAX_VOLTS = 1.2
ATTR_FRAME = "frame"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Optional(CONF_BAUD, default=DEFAULT_BAUD): cv.string,
vol.Optional(CONF_DEVICE, default=DEFAULT_DEVICE): cv.string,
}
)
},
extra=vol.ALLOW_EXTRA,
)
PLATFORM_SCHEMA = vol.Schema(
{
vol.Required(CONF_NAME): cv.string,
vol.Optional(CONF_PIN): cv.positive_int,
vol.Optional(CONF_ADDRESS): cv.string,
},
extra=vol.ALLOW_EXTRA,
)
def setup(hass, config):
"""Set up the connection to the XBee Zigbee device."""
usb_device = config[DOMAIN].get(CONF_DEVICE, DEFAULT_DEVICE)
baud = int(config[DOMAIN].get(CONF_BAUD, DEFAULT_BAUD))
try:
ser = Serial(usb_device, baud)
except SerialException as exc:
_LOGGER.exception("Unable to open serial port for XBee: %s", exc)
return False
zigbee_device = ZigBee(ser)
def close_serial_port(*args):
"""Close the serial port we're using to communicate with the XBee."""
zigbee_device.zb.serial.close()
def _frame_received(frame):
"""Run when a XBee Zigbee frame is received.
Pickles the frame, then encodes it into base64 since it contains
non JSON serializable binary.
"""
dispatcher_send(hass, SIGNAL_XBEE_FRAME_RECEIVED, frame)
hass.data[DOMAIN] = zigbee_device
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, close_serial_port)
zigbee_device.add_frame_rx_handler(_frame_received)
return True
def frame_is_relevant(entity, frame):
"""Test whether the frame is relevant to the entity."""
if frame.get("source_addr_long") != entity.config.address:
return False
return "samples" in frame
class XBeeConfig:
"""Handle the fetching of configuration from the config file."""
def __init__(self, config):
"""Initialize the configuration."""
self._config = config
self._should_poll = config.get("poll", True)
@property
def name(self):
"""Return the name given to the entity."""
return self._config["name"]
@property
def address(self):
"""Return the address of the device.
If an address has been provided, unhexlify it, otherwise return None
as we're talking to our local XBee device.
"""
address = self._config.get("address")
if address is not None:
address = unhexlify(address)
return address
@property
def should_poll(self):
"""Return the polling state."""
return self._should_poll
class XBeePinConfig(XBeeConfig):
"""Handle the fetching of configuration from the configuration file."""
@property
def pin(self):
"""Return the GPIO pin number."""
return self._config["pin"]
class XBeeDigitalInConfig(XBeePinConfig):
"""A subclass of XBeePinConfig."""
def __init__(self, config):
"""Initialise the XBee Zigbee Digital input config."""
super().__init__(config)
self._bool2state, self._state2bool = self.boolean_maps
@property
def boolean_maps(self):
"""Create mapping dictionaries for potential inversion of booleans.
Create dicts to map the pin state (true/false) to potentially inverted
values depending on the on_state config value which should be set to
"low" or "high".
"""
if self._config.get("on_state", "").lower() == "low":
bool2state = {True: False, False: True}
else:
bool2state = {True: True, False: False}
state2bool = {v: k for k, v in bool2state.items()}
return bool2state, state2bool
@property
def bool2state(self):
"""Return a dictionary mapping the internal value to the Zigbee value.
For the translation of on/off as being pin high or low.
"""
return self._bool2state
@property
def state2bool(self):
"""Return a dictionary mapping the Zigbee value to the internal value.
For the translation of pin high/low as being on or off.
"""
return self._state2bool
class XBeeDigitalOutConfig(XBeePinConfig):
"""A subclass of XBeePinConfig.
Set _should_poll to default as False instead of True. The value will
still be overridden by the presence of a 'poll' config entry.
"""
def __init__(self, config):
"""Initialize the XBee Zigbee Digital out."""
super().__init__(config)
self._bool2state, self._state2bool = self.boolean_maps
self._should_poll = config.get("poll", False)
@property
def boolean_maps(self):
"""Create dicts to map booleans to pin high/low and vice versa.
Depends on the config item "on_state" which should be set to "low"
or "high".
"""
if self._config.get("on_state", "").lower() == "low":
bool2state = {
True: xb_const.GPIO_DIGITAL_OUTPUT_LOW,
False: xb_const.GPIO_DIGITAL_OUTPUT_HIGH,
}
else:
bool2state = {
True: xb_const.GPIO_DIGITAL_OUTPUT_HIGH,
False: xb_const.GPIO_DIGITAL_OUTPUT_LOW,
}
state2bool = {v: k for k, v in bool2state.items()}
return bool2state, state2bool
@property
def bool2state(self):
"""Return a dictionary mapping booleans to GPIOSetting objects.
For the translation of on/off as being pin high or low.
"""
return self._bool2state
@property
def state2bool(self):
"""Return a dictionary mapping GPIOSetting objects to booleans.
For the translation of pin high/low as being on or off.
"""
return self._state2bool
class XBeeAnalogInConfig(XBeePinConfig):
"""Representation of a XBee Zigbee GPIO pin set to analog in."""
@property
def max_voltage(self):
"""Return the voltage for ADC to report its highest value."""
return float(self._config.get("max_volts", DEFAULT_ADC_MAX_VOLTS))
class XBeeDigitalIn(Entity):
"""Representation of a GPIO pin configured as a digital input."""
def __init__(self, config, device):
"""Initialize the device."""
self._config = config
self._device = device
self._state = False
async def async_added_to_hass(self):
"""Register callbacks."""
def handle_frame(frame):
"""Handle an incoming frame.
Handle an incoming frame and update our status if it contains
information relating to this device.
"""
if not frame_is_relevant(self, frame):
return
sample = next(iter(frame["samples"]))
pin_name = xb_const.DIGITAL_PINS[self._config.pin]
if pin_name not in sample:
# Doesn't contain information about our pin
return
# Set state to the value of sample, respecting any inversion
# logic from the on_state config variable.
self._state = self._config.state2bool[
self._config.bool2state[sample[pin_name]]
]
self.schedule_update_ha_state()
async_dispatcher_connect(self.hass, SIGNAL_XBEE_FRAME_RECEIVED, handle_frame)
@property
def name(self):
"""Return the name of the input."""
return self._config.name
@property
def config(self):
"""Return the entity's configuration."""
return self._config
@property
def should_poll(self):
"""Return the state of the polling, if needed."""
return self._config.should_poll
@property
def is_on(self):
"""Return True if the Entity is on, else False."""
return self._state
def update(self):
"""Ask the Zigbee device what state its input pin is in."""
try:
sample = self._device.get_sample(self._config.address)
except ZigBeeTxFailure:
_LOGGER.warning(
"Transmission failure when attempting to get sample from "
"Zigbee device at address: %s",
hexlify(self._config.address),
)
return
except ZigBeeException as exc:
_LOGGER.exception("Unable to get sample from Zigbee device: %s", exc)
return
pin_name = xb_const.DIGITAL_PINS[self._config.pin]
if pin_name not in sample:
_LOGGER.warning(
"Pin %s (%s) was not in the sample provided by Zigbee device %s",
self._config.pin,
pin_name,
hexlify(self._config.address),
)
return
self._state = self._config.state2bool[sample[pin_name]]
class XBeeDigitalOut(XBeeDigitalIn):
"""Representation of a GPIO pin configured as a digital input."""
def _set_state(self, state):
"""Initialize the XBee Zigbee digital out device."""
try:
self._device.set_gpio_pin(
self._config.pin, self._config.bool2state[state], self._config.address
)
except ZigBeeTxFailure:
_LOGGER.warning(
"Transmission failure when attempting to set output pin on "
"Zigbee device at address: %s",
hexlify(self._config.address),
)
return
except ZigBeeException as exc:
_LOGGER.exception("Unable to set digital pin on XBee device: %s", exc)
return
self._state = state
if not self.should_poll:
self.schedule_update_ha_state()
def turn_on(self, **kwargs):
"""Set the digital output to its 'on' state."""
self._set_state(True)
def turn_off(self, **kwargs):
"""Set the digital output to its 'off' state."""
self._set_state(False)
def update(self):
"""Ask the XBee device what its output is set to."""
try:
pin_state = self._device.get_gpio_pin(
self._config.pin, self._config.address
)
except ZigBeeTxFailure:
_LOGGER.warning(
"Transmission failure when attempting to get output pin status"
" from Zigbee device at address: %s",
hexlify(self._config.address),
)
return
except ZigBeeException as exc:
_LOGGER.exception(
"Unable to get output pin status from XBee device: %s", exc
)
return
self._state = self._config.state2bool[pin_state]
class XBeeAnalogIn(Entity):
"""Representation of a GPIO pin configured as an analog input."""
def __init__(self, config, device):
"""Initialize the XBee analog in device."""
self._config = config
self._device = device
self._value = None
async def async_added_to_hass(self):
"""Register callbacks."""
def handle_frame(frame):
"""Handle an incoming frame.
Handle an incoming frame and update our status if it contains
information relating to this device.
"""
if not frame_is_relevant(self, frame):
return
sample = frame["samples"].pop()
pin_name = xb_const.ANALOG_PINS[self._config.pin]
if pin_name not in sample:
# Doesn't contain information about our pin
return
self._value = convert_adc(
sample[pin_name], xb_const.ADC_PERCENTAGE, self._config.max_voltage
)
self.schedule_update_ha_state()
async_dispatcher_connect(self.hass, SIGNAL_XBEE_FRAME_RECEIVED, handle_frame)
@property
def name(self):
"""Return the name of the input."""
return self._config.name
@property
def config(self):
"""Return the entity's configuration."""
return self._config
@property
def should_poll(self):
"""Return the polling state, if needed."""
return self._config.should_poll
@property
def state(self):
"""Return the state of the entity."""
return self._value
@property
def unit_of_measurement(self):
"""Return the unit this state is expressed in."""
return PERCENTAGE
def update(self):
"""Get the latest reading from the ADC."""
try:
self._value = self._device.read_analog_pin(
self._config.pin,
self._config.max_voltage,
self._config.address,
xb_const.ADC_PERCENTAGE,
)
except ZigBeeTxFailure:
_LOGGER.warning(
"Transmission failure when attempting to get sample from "
"Zigbee device at address: %s",
hexlify(self._config.address),
)
except ZigBeeException as exc:
_LOGGER.exception("Unable to get sample from Zigbee device: %s", exc)
|
from textwrap import wrap
from weblate.checks.format import BaseFormatCheck
from weblate.checks.models import CHECKS
from weblate.utils.management.base import BaseCommand
def sorter(check):
if isinstance(check, BaseFormatCheck):
pos = 1
elif check.name < "Formatted strings":
pos = 0
else:
pos = 2
return (check.source, pos, check.name.lower())
class Command(BaseCommand):
help = "List installed checks"
def flush_lines(self, lines):
self.stdout.writelines(lines)
lines.clear()
def handle(self, *args, **options):
"""List installed checks."""
ignores = []
lines = []
for check in sorted(CHECKS.values(), key=sorter):
is_format = isinstance(check, BaseFormatCheck)
if not is_format and lines:
self.flush_lines(lines)
# Output immediately
self.stdout.write(f".. _{check.doc_id}:\n")
if not lines:
lines.append("\n")
lines.append(str(check.name))
if is_format:
lines.append("*" * len(check.name))
else:
lines.append("~" * len(check.name))
lines.append("\n")
lines.append("\n".join(wrap(f"*{check.description}*", 79)))
lines.append("\n")
if not is_format:
self.flush_lines(lines)
ignores.append(f"``{check.ignore_string}``")
ignores.append(f' Skip the "{check.name}" quality check.')
self.stdout.write("\n")
self.stdout.writelines(ignores)
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from jbossapi import JbossApiCollector
###############################################################################
class TestJbossApiCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('JbossApiCollector', {
})
self.collector = JbossApiCollector(config, None)
def test_import(self):
self.assertTrue(JbossApiCollector)
###############################################################################
if __name__ == "__main__":
unittest.main()
|
import asyncio
from datetime import timedelta
from pyflunearyou import Client
from pyflunearyou.errors import FluNearYouError
from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE
from homeassistant.core import callback
from homeassistant.helpers import aiohttp_client, config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.event import async_track_time_interval
from .const import (
CATEGORY_CDC_REPORT,
CATEGORY_USER_REPORT,
DATA_CLIENT,
DOMAIN,
LOGGER,
SENSORS,
TOPIC_UPDATE,
)
DATA_LISTENER = "listener"
DEFAULT_SCAN_INTERVAL = timedelta(minutes=30)
CONFIG_SCHEMA = cv.deprecated(DOMAIN, invalidation_version="0.119")
@callback
def async_get_api_category(sensor_type):
"""Get the category that a particular sensor type belongs to."""
try:
return next(
(
category
for category, sensors in SENSORS.items()
for sensor in sensors
if sensor[0] == sensor_type
)
)
except StopIteration as err:
raise ValueError(f"Can't find category sensor type: {sensor_type}") from err
async def async_setup(hass, config):
"""Set up the Flu Near You component."""
hass.data[DOMAIN] = {DATA_CLIENT: {}, DATA_LISTENER: {}}
return True
async def async_setup_entry(hass, config_entry):
"""Set up Flu Near You as config entry."""
websession = aiohttp_client.async_get_clientsession(hass)
fny = FluNearYouData(
hass,
Client(websession),
config_entry.data.get(CONF_LATITUDE, hass.config.latitude),
config_entry.data.get(CONF_LONGITUDE, hass.config.longitude),
)
await fny.async_update()
hass.data[DOMAIN][DATA_CLIENT][config_entry.entry_id] = fny
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, "sensor")
)
async def refresh(event_time):
"""Refresh data from Flu Near You."""
await fny.async_update()
hass.data[DOMAIN][DATA_LISTENER][config_entry.entry_id] = async_track_time_interval(
hass, refresh, DEFAULT_SCAN_INTERVAL
)
return True
async def async_unload_entry(hass, config_entry):
"""Unload an Flu Near You config entry."""
hass.data[DOMAIN][DATA_CLIENT].pop(config_entry.entry_id)
remove_listener = hass.data[DOMAIN][DATA_LISTENER].pop(config_entry.entry_id)
remove_listener()
await hass.config_entries.async_forward_entry_unload(config_entry, "sensor")
return True
class FluNearYouData:
"""Define a data object to retrieve info from Flu Near You."""
def __init__(self, hass, client, latitude, longitude):
"""Initialize."""
self._async_cancel_time_interval_listener = None
self._client = client
self._hass = hass
self.data = {}
self.latitude = latitude
self.longitude = longitude
self._api_category_count = {
CATEGORY_CDC_REPORT: 0,
CATEGORY_USER_REPORT: 0,
}
self._api_category_locks = {
CATEGORY_CDC_REPORT: asyncio.Lock(),
CATEGORY_USER_REPORT: asyncio.Lock(),
}
async def _async_get_data_from_api(self, api_category):
"""Update and save data for a particular API category."""
if self._api_category_count[api_category] == 0:
return
if api_category == CATEGORY_CDC_REPORT:
api_coro = self._client.cdc_reports.status_by_coordinates(
self.latitude, self.longitude
)
else:
api_coro = self._client.user_reports.status_by_coordinates(
self.latitude, self.longitude
)
try:
self.data[api_category] = await api_coro
except FluNearYouError as err:
LOGGER.error("Unable to get %s data: %s", api_category, err)
self.data[api_category] = None
async def _async_update_listener_action(self, now):
"""Define an async_track_time_interval action to update data."""
await self.async_update()
@callback
def async_deregister_api_interest(self, sensor_type):
"""Decrement the number of entities with data needs from an API category."""
# If this deregistration should leave us with no registration at all, remove the
# time interval:
if sum(self._api_category_count.values()) == 0:
if self._async_cancel_time_interval_listener:
self._async_cancel_time_interval_listener()
self._async_cancel_time_interval_listener = None
return
api_category = async_get_api_category(sensor_type)
self._api_category_count[api_category] -= 1
async def async_register_api_interest(self, sensor_type):
"""Increment the number of entities with data needs from an API category."""
# If this is the first registration we have, start a time interval:
if not self._async_cancel_time_interval_listener:
self._async_cancel_time_interval_listener = async_track_time_interval(
self._hass,
self._async_update_listener_action,
DEFAULT_SCAN_INTERVAL,
)
api_category = async_get_api_category(sensor_type)
self._api_category_count[api_category] += 1
# If a sensor registers interest in a particular API call and the data doesn't
# exist for it yet, make the API call and grab the data:
async with self._api_category_locks[api_category]:
if api_category not in self.data:
await self._async_get_data_from_api(api_category)
async def async_update(self):
"""Update Flu Near You data."""
tasks = [
self._async_get_data_from_api(api_category)
for api_category in self._api_category_count
]
await asyncio.gather(*tasks)
LOGGER.debug("Received new data")
async_dispatcher_send(self._hass, TOPIC_UPDATE)
|
import logging
from omnilogic import LoginException, OmniLogic, OmniLogicException
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from homeassistant.core import callback
from homeassistant.helpers import aiohttp_client
from .const import CONF_SCAN_INTERVAL, DOMAIN # pylint:disable=unused-import
_LOGGER = logging.getLogger(__name__)
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Omnilogic."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
@staticmethod
@callback
def async_get_options_flow(config_entry):
"""Get the options flow for this handler."""
return OptionsFlowHandler(config_entry)
async def async_step_user(self, user_input=None):
"""Handle the initial step."""
errors = {}
config_entry = self.hass.config_entries.async_entries(DOMAIN)
if config_entry:
return self.async_abort(reason="single_instance_allowed")
errors = {}
if user_input is not None:
username = user_input[CONF_USERNAME]
password = user_input[CONF_PASSWORD]
session = aiohttp_client.async_get_clientsession(self.hass)
omni = OmniLogic(username, password, session)
try:
await omni.connect()
except LoginException:
errors["base"] = "invalid_auth"
except OmniLogicException:
errors["base"] = "cannot_connect"
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
await self.async_set_unique_id(user_input["username"])
self._abort_if_unique_id_configured()
return self.async_create_entry(title="Omnilogic", data=user_input)
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(CONF_USERNAME): str,
vol.Required(CONF_PASSWORD): str,
}
),
errors=errors,
)
class OptionsFlowHandler(config_entries.OptionsFlow):
"""Handle Omnilogic client options."""
def __init__(self, config_entry):
"""Initialize options flow."""
self.config_entry = config_entry
async def async_step_init(self, user_input=None):
"""Manage options."""
if user_input is not None:
return self.async_create_entry(title="", data=user_input)
return self.async_show_form(
step_id="init",
data_schema=vol.Schema(
{
vol.Optional(
CONF_SCAN_INTERVAL,
default=6,
): int,
}
),
)
|
from math import isclose
from arcam.fmj import DecodeMode2CH, DecodeModeMCH, IncomingAudioFormat, SourceCodes
import pytest
from homeassistant.components.media_player.const import (
ATTR_INPUT_SOURCE,
MEDIA_TYPE_MUSIC,
SERVICE_SELECT_SOURCE,
)
from homeassistant.const import ATTR_ENTITY_ID
from .conftest import MOCK_HOST, MOCK_NAME, MOCK_PORT, MOCK_UUID
from tests.async_mock import ANY, MagicMock, Mock, PropertyMock, patch
MOCK_TURN_ON = {
"service": "switch.turn_on",
"data": {"entity_id": "switch.test"},
}
async def update(player, force_refresh=False):
"""Force a update of player and return current state data."""
await player.async_update_ha_state(force_refresh=force_refresh)
return player.hass.states.get(player.entity_id)
async def test_properties(player, state):
"""Test standard properties."""
assert player.unique_id == f"{MOCK_UUID}-1"
assert player.device_info == {
"name": f"Arcam FMJ ({MOCK_HOST})",
"identifiers": {("arcam_fmj", MOCK_UUID), ("arcam_fmj", MOCK_HOST, MOCK_PORT)},
"model": "Arcam FMJ AVR",
"manufacturer": "Arcam",
}
assert not player.should_poll
async def test_powered_off(hass, player, state):
"""Test properties in powered off state."""
state.get_source.return_value = None
state.get_power.return_value = None
data = await update(player)
assert "source" not in data.attributes
assert data.state == "off"
async def test_powered_on(player, state):
"""Test properties in powered on state."""
state.get_source.return_value = SourceCodes.PVR
state.get_power.return_value = True
data = await update(player)
assert data.attributes["source"] == "PVR"
assert data.state == "on"
async def test_supported_features(player, state):
"""Test supported features."""
data = await update(player)
assert data.attributes["supported_features"] == 200588
async def test_turn_on(player, state):
"""Test turn on service."""
state.get_power.return_value = None
await player.async_turn_on()
state.set_power.assert_not_called()
state.get_power.return_value = False
await player.async_turn_on()
state.set_power.assert_called_with(True)
async def test_turn_off(player, state):
"""Test command to turn off."""
await player.async_turn_off()
state.set_power.assert_called_with(False)
@pytest.mark.parametrize("mute", [True, False])
async def test_mute_volume(player, state, mute):
"""Test mute functionality."""
await player.async_mute_volume(mute)
state.set_mute.assert_called_with(mute)
player.async_write_ha_state.assert_called_with()
async def test_name(player):
"""Test name."""
assert player.name == f"{MOCK_NAME} - Zone: 1"
async def test_update(player, state):
"""Test update."""
await update(player, force_refresh=True)
state.update.assert_called_with()
@pytest.mark.parametrize(
"fmt, result",
[
(None, True),
(IncomingAudioFormat.PCM, True),
(IncomingAudioFormat.ANALOGUE_DIRECT, True),
(IncomingAudioFormat.DOLBY_DIGITAL, False),
],
)
async def test_2ch(player, state, fmt, result):
"""Test selection of 2ch mode."""
state.get_incoming_audio_format.return_value = (fmt, None)
assert player._get_2ch() == result # pylint: disable=W0212
@pytest.mark.parametrize(
"source, value",
[("PVR", SourceCodes.PVR), ("BD", SourceCodes.BD), ("INVALID", None)],
)
async def test_select_source(hass, player_setup, state, source, value):
"""Test selection of source."""
await hass.services.async_call(
"media_player",
SERVICE_SELECT_SOURCE,
service_data={ATTR_ENTITY_ID: player_setup, ATTR_INPUT_SOURCE: source},
blocking=True,
)
if value:
state.set_source.assert_called_with(value)
else:
state.set_source.assert_not_called()
async def test_source_list(player, state):
"""Test source list."""
state.get_source_list.return_value = [SourceCodes.BD]
data = await update(player)
assert data.attributes["source_list"] == ["BD"]
@pytest.mark.parametrize(
"mode, mode_sel, mode_2ch, mode_mch",
[
("STEREO", True, DecodeMode2CH.STEREO, None),
("STEREO", False, None, None),
("STEREO", False, None, None),
],
)
async def test_select_sound_mode(player, state, mode, mode_sel, mode_2ch, mode_mch):
"""Test selection sound mode."""
player._get_2ch = Mock(return_value=mode_sel) # pylint: disable=W0212
await player.async_select_sound_mode(mode)
if mode_2ch:
state.set_decode_mode_2ch.assert_called_with(mode_2ch)
else:
state.set_decode_mode_2ch.assert_not_called()
if mode_mch:
state.set_decode_mode_mch.assert_called_with(mode_mch)
else:
state.set_decode_mode_mch.assert_not_called()
async def test_volume_up(player, state):
"""Test mute functionality."""
await player.async_volume_up()
state.inc_volume.assert_called_with()
player.async_write_ha_state.assert_called_with()
async def test_volume_down(player, state):
"""Test mute functionality."""
await player.async_volume_down()
state.dec_volume.assert_called_with()
player.async_write_ha_state.assert_called_with()
@pytest.mark.parametrize(
"mode, mode_sel, mode_2ch, mode_mch",
[
("STEREO", True, DecodeMode2CH.STEREO, None),
("STEREO_DOWNMIX", False, None, DecodeModeMCH.STEREO_DOWNMIX),
(None, False, None, None),
],
)
async def test_sound_mode(player, state, mode, mode_sel, mode_2ch, mode_mch):
"""Test selection sound mode."""
player._get_2ch = Mock(return_value=mode_sel) # pylint: disable=W0212
state.get_decode_mode_2ch.return_value = mode_2ch
state.get_decode_mode_mch.return_value = mode_mch
assert player.sound_mode == mode
async def test_sound_mode_list(player, state):
"""Test sound mode list."""
player._get_2ch = Mock(return_value=True) # pylint: disable=W0212
assert sorted(player.sound_mode_list) == sorted([x.name for x in DecodeMode2CH])
player._get_2ch = Mock(return_value=False) # pylint: disable=W0212
assert sorted(player.sound_mode_list) == sorted([x.name for x in DecodeModeMCH])
async def test_sound_mode_zone_x(player, state):
"""Test second zone sound mode."""
state.zn = 2
assert player.sound_mode is None
assert player.sound_mode_list is None
async def test_is_volume_muted(player, state):
"""Test muted."""
state.get_mute.return_value = True
assert player.is_volume_muted is True # pylint: disable=singleton-comparison
state.get_mute.return_value = False
assert player.is_volume_muted is False # pylint: disable=singleton-comparison
state.get_mute.return_value = None
assert player.is_volume_muted is None
async def test_volume_level(player, state):
"""Test volume."""
state.get_volume.return_value = 0
assert isclose(player.volume_level, 0.0)
state.get_volume.return_value = 50
assert isclose(player.volume_level, 50.0 / 99)
state.get_volume.return_value = 99
assert isclose(player.volume_level, 1.0)
state.get_volume.return_value = None
assert player.volume_level is None
@pytest.mark.parametrize("volume, call", [(0.0, 0), (0.5, 50), (1.0, 99)])
async def test_set_volume_level(player, state, volume, call):
"""Test setting volume."""
await player.async_set_volume_level(volume)
state.set_volume.assert_called_with(call)
@pytest.mark.parametrize(
"source, media_content_type",
[
(SourceCodes.DAB, MEDIA_TYPE_MUSIC),
(SourceCodes.FM, MEDIA_TYPE_MUSIC),
(SourceCodes.PVR, None),
(None, None),
],
)
async def test_media_content_type(player, state, source, media_content_type):
"""Test content type deduction."""
state.get_source.return_value = source
assert player.media_content_type == media_content_type
@pytest.mark.parametrize(
"source, dab, rds, channel",
[
(SourceCodes.DAB, "dab", "rds", "dab"),
(SourceCodes.DAB, None, None, None),
(SourceCodes.FM, "dab", "rds", "rds"),
(SourceCodes.FM, None, None, None),
(SourceCodes.PVR, "dab", "rds", None),
],
)
async def test_media_channel(player, state, source, dab, rds, channel):
"""Test media channel."""
state.get_dab_station.return_value = dab
state.get_rds_information.return_value = rds
state.get_source.return_value = source
assert player.media_channel == channel
@pytest.mark.parametrize(
"source, dls, artist",
[
(SourceCodes.DAB, "dls", "dls"),
(SourceCodes.FM, "dls", None),
(SourceCodes.DAB, None, None),
],
)
async def test_media_artist(player, state, source, dls, artist):
"""Test media artist."""
state.get_dls_pdt.return_value = dls
state.get_source.return_value = source
assert player.media_artist == artist
@pytest.mark.parametrize(
"source, channel, title",
[
(SourceCodes.DAB, "channel", "DAB - channel"),
(SourceCodes.DAB, None, "DAB"),
(None, None, None),
],
)
async def test_media_title(player, state, source, channel, title):
"""Test media title."""
from homeassistant.components.arcam_fmj.media_player import ArcamFmj
state.get_source.return_value = source
with patch.object(
ArcamFmj, "media_channel", new_callable=PropertyMock
) as media_channel:
media_channel.return_value = channel
data = await update(player)
if title is None:
assert "media_title" not in data.attributes
else:
assert data.attributes["media_title"] == title
async def test_added_to_hass(player, state):
"""Test addition to hass."""
from homeassistant.components.arcam_fmj.const import (
SIGNAL_CLIENT_DATA,
SIGNAL_CLIENT_STARTED,
SIGNAL_CLIENT_STOPPED,
)
connectors = {}
def _connect(signal, fun):
connectors[signal] = fun
player.hass = MagicMock()
player.hass.helpers.dispatcher.async_dispatcher_connect.side_effects = _connect
await player.async_added_to_hass()
state.start.assert_called_with()
player.hass.helpers.dispatcher.async_dispatcher_connect.assert_any_call(
SIGNAL_CLIENT_DATA, ANY
)
player.hass.helpers.dispatcher.async_dispatcher_connect.assert_any_call(
SIGNAL_CLIENT_STARTED, ANY
)
player.hass.helpers.dispatcher.async_dispatcher_connect.assert_any_call(
SIGNAL_CLIENT_STOPPED, ANY
)
|
from sqlalchemy import Column, Integer, String, Text
from sqlalchemy_utils import JSONType
from lemur.database import db
from lemur.plugins.base import plugins
class Destination(db.Model):
__tablename__ = "destinations"
id = Column(Integer, primary_key=True)
label = Column(String(32))
options = Column(JSONType)
description = Column(Text())
plugin_name = Column(String(32))
@property
def plugin(self):
return plugins.get(self.plugin_name)
def __repr__(self):
return "Destination(label={label})".format(label=self.label)
|
from __future__ import unicode_literals
from lib.data.data import pystrs
from lib.fun.decorator import magic
from lib.fun.fun import range_compatible
def pid8_magic(*args):
"""chinese id card last 8 digit"""
posrule = lambda _: str(_) if _ >= 10 else "0" + str(_)
# month
value1112 = " ".join(posrule(x) for x in range_compatible(1, 13))
# day
value1314 = " ".join(posrule(x) for x in range_compatible(1, 32))
value1516 = " ".join(posrule(x) for x in range_compatible(1, 100))
post18 = ("0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "X")
value1718 = ""
if pystrs.default_sex == pystrs.sex_range[0]:
rand = ("1", "3", "5", "7", "9")
for _ in rand:
for _p in post18:
value1718 += _ + _p + " "
elif pystrs.default_sex == pystrs.sex_range[1]:
rand = ("0", "2", "4", "6", "8")
for _ in rand:
for _p in post18:
value1718 += _ + _p + " "
elif pystrs.default_sex == pystrs.sex_range[2]:
rand = " ".join(str(_) for _ in range_compatible(0, 10))
for _ in rand.split(" "):
for _p in post18:
value1718 += _ + _p + " "
@magic
def pid8():
for v1112 in value1112.split(" "):
for v1314 in value1314.split(" "):
for v1516 in value1516.split(" "):
for v1718 in value1718.split(" "):
if v1718 != "":
yield "".join(v1112 + v1314 + v1516 + v1718)
|
import json
from django.core.serializers.json import DjangoJSONEncoder
from django.db import models
class JSONField(models.TextField):
"""JSON serializaed TextField."""
def __init__(self, **kwargs):
if "default" not in kwargs:
kwargs["default"] = {}
super().__init__(**kwargs)
def to_python(self, value):
"""Convert a string from the database to a Python value."""
if not value:
return None
try:
return json.loads(value)
except (ValueError, TypeError):
return value
def get_prep_value(self, value):
"""Convert the value to a string that can be stored in the database."""
if not value:
return None
if isinstance(value, (dict, list)):
return json.dumps(value, cls=DjangoJSONEncoder)
return super().get_prep_value(value)
def from_db_value(self, value, *args, **kwargs):
return self.to_python(value)
def get_db_prep_save(self, value, *args, **kwargs):
if value is None:
value = {}
return json.dumps(value, cls=DjangoJSONEncoder)
def value_from_object(self, obj):
value = super().value_from_object(obj)
return json.dumps(value, cls=DjangoJSONEncoder)
class CaseInsensitiveFieldMixin:
"""Field mixin that uses case-insensitive lookup alternatives if they exist."""
LOOKUP_CONVERSIONS = {
"exact": "iexact",
"contains": "icontains",
"startswith": "istartswith",
"endswith": "iendswith",
"regex": "iregex",
}
def get_lookup(self, lookup_name):
converted = self.LOOKUP_CONVERSIONS.get(lookup_name, lookup_name)
return super().get_lookup(converted)
class UsernameField(CaseInsensitiveFieldMixin, models.CharField):
pass
class EmailField(CaseInsensitiveFieldMixin, models.EmailField):
pass
|
from __future__ import division
import unittest
import numpy as np
from chainer.backends import cuda
from chainer import testing
from chainer.testing import attr
from chainercv.utils import mask_to_bbox
@testing.parameterize(
{'mask': np.array(
[[[False, False, False, False],
[False, True, True, True],
[False, True, True, True]
]]),
'expected': np.array([[1, 1, 3, 4]], dtype=np.float32)
},
{'mask': np.array(
[[[False, False],
[False, True]],
[[True, False],
[False, True]]]),
'expected': np.array([[1, 1, 2, 2], [0, 0, 2, 2]], dtype=np.float32)
},
{'mask': np.array(
[[[False, False],
[False, False]],
[[True, False],
[False, True]]]),
'expected': np.array([[0, 0, 0, 0], [0, 0, 2, 2]], dtype=np.float32)
},
)
class TestMaskToBbox(unittest.TestCase):
def check(self, mask, expected):
bbox = mask_to_bbox(mask)
self.assertIsInstance(bbox, type(expected))
self.assertEqual(bbox.dtype, expected.dtype)
np.testing.assert_equal(
cuda.to_cpu(bbox),
cuda.to_cpu(expected))
def test_mask_to_bbox_cpu(self):
self.check(self.mask, self.expected)
@attr.gpu
def test_mask_to_bbox_gpu(self):
self.check(
cuda.to_gpu(self.mask),
cuda.to_gpu(self.expected))
testing.run_module(__name__, __file__)
|
from datetime import timedelta
from pythinkingcleaner import Discovery, ThinkingCleaner
import voluptuous as vol
from homeassistant import util
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_HOST, PERCENTAGE
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
MIN_TIME_BETWEEN_SCANS = timedelta(seconds=10)
MIN_TIME_BETWEEN_FORCED_SCANS = timedelta(milliseconds=100)
SENSOR_TYPES = {
"battery": ["Battery", PERCENTAGE, "mdi:battery"],
"state": ["State", None, None],
"capacity": ["Capacity", None, None],
}
STATES = {
"st_base": "On homebase: Not Charging",
"st_base_recon": "On homebase: Reconditioning Charging",
"st_base_full": "On homebase: Full Charging",
"st_base_trickle": "On homebase: Trickle Charging",
"st_base_wait": "On homebase: Waiting",
"st_plug": "Plugged in: Not Charging",
"st_plug_recon": "Plugged in: Reconditioning Charging",
"st_plug_full": "Plugged in: Full Charging",
"st_plug_trickle": "Plugged in: Trickle Charging",
"st_plug_wait": "Plugged in: Waiting",
"st_stopped": "Stopped",
"st_clean": "Cleaning",
"st_cleanstop": "Stopped with cleaning",
"st_clean_spot": "Spot cleaning",
"st_clean_max": "Max cleaning",
"st_delayed": "Delayed cleaning will start soon",
"st_dock": "Searching Homebase",
"st_pickup": "Roomba picked up",
"st_remote": "Remote control driving",
"st_wait": "Waiting for command",
"st_off": "Off",
"st_error": "Error",
"st_locate": "Find me!",
"st_unknown": "Unknown state",
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({vol.Optional(CONF_HOST): cv.string})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the ThinkingCleaner platform."""
host = config.get(CONF_HOST)
if host:
devices = [ThinkingCleaner(host, "unknown")]
else:
discovery = Discovery()
devices = discovery.discover()
@util.Throttle(MIN_TIME_BETWEEN_SCANS, MIN_TIME_BETWEEN_FORCED_SCANS)
def update_devices():
"""Update all devices."""
for device_object in devices:
device_object.update()
dev = []
for device in devices:
for type_name in SENSOR_TYPES:
dev.append(ThinkingCleanerSensor(device, type_name, update_devices))
add_entities(dev)
class ThinkingCleanerSensor(Entity):
"""Representation of a ThinkingCleaner Sensor."""
def __init__(self, tc_object, sensor_type, update_devices):
"""Initialize the ThinkingCleaner."""
self.type = sensor_type
self._tc_object = tc_object
self._update_devices = update_devices
self._unit_of_measurement = SENSOR_TYPES[sensor_type][1]
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return "{} {}".format(self._tc_object.name, SENSOR_TYPES[self.type][0])
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return SENSOR_TYPES[self.type][2]
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
def update(self):
"""Update the sensor."""
self._update_devices()
if self.type == "battery":
self._state = self._tc_object.battery
elif self.type == "state":
self._state = STATES[self._tc_object.status]
elif self.type == "capacity":
self._state = self._tc_object.capacity
|
import socket
import struct
import numpy
VERSION = 1
PUT_HDR = 0x101
PUT_DAT = 0x102
PUT_EVT = 0x103
PUT_OK = 0x104
PUT_ERR = 0x105
GET_HDR = 0x201
GET_DAT = 0x202
GET_EVT = 0x203
GET_OK = 0x204
GET_ERR = 0x205
FLUSH_HDR = 0x301
FLUSH_DAT = 0x302
FLUSH_EVT = 0x303
FLUSH_OK = 0x304
FLUSH_ERR = 0x305
WAIT_DAT = 0x402
WAIT_OK = 0x404
WAIT_ERR = 0x405
DATATYPE_CHAR = 0
DATATYPE_UINT8 = 1
DATATYPE_UINT16 = 2
DATATYPE_UINT32 = 3
DATATYPE_UINT64 = 4
DATATYPE_INT8 = 5
DATATYPE_INT16 = 6
DATATYPE_INT32 = 7
DATATYPE_INT64 = 8
DATATYPE_FLOAT32 = 9
DATATYPE_FLOAT64 = 10
DATATYPE_UNKNOWN = 0xFFFFFFFF
CHUNK_UNSPECIFIED = 0
CHUNK_CHANNEL_NAMES = 1
CHUNK_CHANNEL_FLAGS = 2
CHUNK_RESOLUTIONS = 3
CHUNK_ASCII_KEYVAL = 4
CHUNK_NIFTI1 = 5
CHUNK_SIEMENS_AP = 6
CHUNK_CTF_RES4 = 7
CHUNK_NEUROMAG_FIF = 8
# List for converting FieldTrip datatypes to Numpy datatypes
numpyType = ['int8', 'uint8', 'uint16', 'uint32', 'uint64',
'int8', 'int16', 'int32', 'int64', 'float32', 'float64']
# Corresponding word sizes
wordSize = [1, 1, 2, 4, 8, 1, 2, 4, 8, 4, 8]
# FieldTrip data type as indexed by numpy dtype.num
# this goes 0 => nothing, 1..4 => int8, uint8, int16, uint16, 7..10 =>
# int32, uint32, int64, uint64 11..12 => float32, float64
dataType = [-1, 5, 1, 6, 2, -1, -1, 7, 3, 8, 4, 9, 10]
def serialize(A):
"""
Returns Fieldtrip data type and string representation of the given
object, if possible.
"""
if isinstance(A, str):
return (0, A)
if isinstance(A, numpy.ndarray):
dt = A.dtype
if not(dt.isnative) or dt.num < 1 or dt.num >= len(dataType):
return (DATATYPE_UNKNOWN, None)
ft = dataType[dt.num]
if ft == -1:
return (DATATYPE_UNKNOWN, None)
if A.flags['C_CONTIGUOUS']:
# great, just use the array's buffer interface
return (ft, str(A.data))
# otherwise, we need a copy to C order
AC = A.copy('C')
return (ft, str(AC.data))
if isinstance(A, int):
return (DATATYPE_INT32, struct.pack('i', A))
if isinstance(A, float):
return (DATATYPE_FLOAT64, struct.pack('d', A))
return (DATATYPE_UNKNOWN, None)
class Chunk:
def __init__(self):
self.type = 0
self.size = 0
self.buf = ''
class Header:
"""Class for storing header information in the FieldTrip buffer format"""
def __init__(self):
self.nChannels = 0
self.nSamples = 0
self.nEvents = 0
self.fSample = 0.0
self.dataType = 0
self.chunks = {}
self.labels = []
def __str__(self):
return ('Channels.: %i\nSamples..: %i\nEvents...: %i\nSampFreq.: '
'%f\nDataType.: %s\n'
% (self.nChannels, self.nSamples, self.nEvents,
self.fSample, numpyType[self.dataType]))
class Event:
"""Class for storing events in the FieldTrip buffer format"""
def __init__(self, S=None):
if S is None:
self.type = ''
self.value = ''
self.sample = 0
self.offset = 0
self.duration = 0
else:
self.deserialize(S)
def __str__(self):
return ('Type.....: %s\nValue....: %s\nSample...: %i\nOffset...: '
'%i\nDuration.: %i\n' % (str(self.type), str(self.value),
self.sample, self.offset,
self.duration))
def deserialize(self, buf):
bufsize = len(buf)
if bufsize < 32:
return 0
(type_type, type_numel, value_type, value_numel, sample,
offset, duration, bsiz) = struct.unpack('IIIIIiiI', buf[0:32])
self.sample = sample
self.offset = offset
self.duration = duration
st = type_numel * wordSize[type_type]
sv = value_numel * wordSize[value_type]
if bsiz + 32 > bufsize or st + sv > bsiz:
raise IOError(
'Invalid event definition -- does not fit in given buffer')
raw_type = buf[32:32 + st]
raw_value = buf[32 + st:32 + st + sv]
if type_type == 0:
self.type = raw_type
else:
self.type = numpy.ndarray(
(type_numel), dtype=numpyType[type_type], buffer=raw_type)
if value_type == 0:
self.value = raw_value
else:
self.value = numpy.ndarray(
(value_numel), dtype=numpyType[value_type], buffer=raw_value)
return bsiz + 32
def serialize(self):
"""
Returns the contents of this event as a string, ready to
send over the network, or None in case of conversion problems.
"""
type_type, type_buf = serialize(self.type)
if type_type == DATATYPE_UNKNOWN:
return None
type_size = len(type_buf)
type_numel = type_size / wordSize[type_type]
value_type, value_buf = serialize(self.value)
if value_type == DATATYPE_UNKNOWN:
return None
value_size = len(value_buf)
value_numel = value_size / wordSize[value_type]
bufsize = type_size + value_size
S = struct.pack('IIIIIiiI', type_type, type_numel, value_type,
value_numel, int(self.sample), int(self.offset),
int(self.duration), bufsize)
return S + type_buf + value_buf
class Client:
"""Class for managing a client connection to a FieldTrip buffer."""
def __init__(self):
self.isConnected = False
self.sock = []
def connect(self, hostname, port=1972):
"""
connect(hostname [, port]) -- make a connection, default port is
1972.
"""
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.connect((hostname, port))
self.sock.setblocking(True)
self.isConnected = True
def disconnect(self):
"""disconnect() -- close a connection."""
if self.isConnected:
self.sock.close()
self.sock = []
self.isConnected = False
def sendRaw(self, request):
"""Send all bytes of the string 'request' out to socket."""
if not(self.isConnected):
raise IOError('Not connected to FieldTrip buffer')
N = len(request)
nw = self.sock.send(request)
while nw < N:
nw += self.sock.send(request[nw:])
def sendRequest(self, command, payload=None):
if payload is None:
request = struct.pack('HHI', VERSION, command, 0)
else:
request = struct.pack(
'HHI', VERSION, command, len(payload)) + payload
self.sendRaw(request)
def receiveResponse(self, minBytes=0):
"""
Receive response from server on socket 's' and return it as
(status,bufsize,payload).
"""
resp_hdr = self.sock.recv(8)
while len(resp_hdr) < 8:
resp_hdr += self.sock.recv(8 - len(resp_hdr))
(version, command, bufsize) = struct.unpack('HHI', resp_hdr)
if version != VERSION:
self.disconnect()
raise IOError('Bad response from buffer server - disconnecting')
if bufsize > 0:
payload = self.sock.recv(bufsize)
while len(payload) < bufsize:
payload += self.sock.recv(bufsize - len(payload))
else:
payload = None
return (command, bufsize, payload)
def getHeader(self):
"""
getHeader() -- grabs header information from the buffer an returns
it as a Header object.
"""
self.sendRequest(GET_HDR)
(status, bufsize, payload) = self.receiveResponse()
if status == GET_ERR:
return None
if status != GET_OK:
self.disconnect()
raise IOError('Bad response from buffer server - disconnecting')
if bufsize < 24:
self.disconnect()
raise IOError('Invalid HEADER packet received (too few bytes) - '
'disconnecting')
(nchans, nsamp, nevt, fsamp, dtype,
bfsiz) = struct.unpack('IIIfII', payload[0:24])
H = Header()
H.nChannels = nchans
H.nSamples = nsamp
H.nEvents = nevt
H.fSample = fsamp
H.dataType = dtype
if bfsiz > 0:
offset = 24
while offset + 8 < bufsize:
(chunk_type, chunk_len) = struct.unpack(
'II', payload[offset:offset + 8])
offset += 8
if offset + chunk_len > bufsize:
break
H.chunks[chunk_type] = payload[offset:offset + chunk_len]
offset += chunk_len
if CHUNK_CHANNEL_NAMES in H.chunks:
L = H.chunks[CHUNK_CHANNEL_NAMES].split(b'\0')
numLab = len(L)
if numLab >= H.nChannels:
H.labels = [x.decode('utf-8') for x in L[0:H.nChannels]]
return H
def putHeader(self, nChannels, fSample, dataType, labels=None,
chunks=None):
haveLabels = False
extras = ''
if not(labels is None):
serLabels = ''
try:
for n in range(0, nChannels):
serLabels += labels[n] + '\0'
except:
raise ValueError('Channels names (labels), if given,'
' must be a list of N=numChannels strings')
extras = struct.pack('II', CHUNK_CHANNEL_NAMES,
len(serLabels)) + serLabels
haveLabels = True
if not(chunks is None):
for chunk_type, chunk_data in chunks:
if haveLabels and chunk_type == CHUNK_CHANNEL_NAMES:
# ignore channel names chunk in case we got labels
continue
extras += struct.pack('II', chunk_type,
len(chunk_data)) + chunk_data
sizeChunks = len(extras)
hdef = struct.pack('IIIfII', nChannels, 0, 0,
fSample, dataType, sizeChunks)
request = struct.pack('HHI', VERSION, PUT_HDR,
sizeChunks + len(hdef)) + hdef + extras
self.sendRaw(request)
(status, bufsize, resp_buf) = self.receiveResponse()
if status != PUT_OK:
raise IOError('Header could not be written')
def getData(self, index=None):
"""
getData([indices]) -- retrieve data samples and return them as a
Numpy array, samples in rows(!). The 'indices' argument is optional,
and if given, must be a tuple or list with inclusive, zero-based
start/end indices.
"""
if index is None:
request = struct.pack('HHI', VERSION, GET_DAT, 0)
else:
indS = int(index[0])
indE = int(index[1])
request = struct.pack('HHIII', VERSION, GET_DAT, 8, indS, indE)
self.sendRaw(request)
(status, bufsize, payload) = self.receiveResponse()
if status == GET_ERR:
return None
if status != GET_OK:
self.disconnect()
raise IOError('Bad response from buffer server - disconnecting')
if bufsize < 16:
self.disconnect()
raise IOError('Invalid DATA packet received (too few bytes)')
(nchans, nsamp, datype, bfsiz) = struct.unpack('IIII', payload[0:16])
if bfsiz < bufsize - 16 or datype >= len(numpyType):
raise IOError('Invalid DATA packet received')
raw = payload[16:bfsiz + 16]
D = numpy.ndarray((nsamp, nchans), dtype=numpyType[datype], buffer=raw)
return D
def getEvents(self, index=None):
"""
getEvents([indices]) -- retrieve events and return them as a list
of Event objects. The 'indices' argument is optional, and if given,
must be a tuple or list with inclusive, zero-based start/end indices.
The 'type' and 'value' fields of the event will be converted to strings
or Numpy arrays.
"""
if index is None:
request = struct.pack('HHI', VERSION, GET_EVT, 0)
else:
indS = int(index[0])
indE = int(index[1])
request = struct.pack('HHIII', VERSION, GET_EVT, 8, indS, indE)
self.sendRaw(request)
(status, bufsize, resp_buf) = self.receiveResponse()
if status == GET_ERR:
return []
if status != GET_OK:
self.disconnect()
raise IOError('Bad response from buffer server - disconnecting')
offset = 0
E = []
while 1:
e = Event()
nextOffset = e.deserialize(resp_buf[offset:])
if nextOffset == 0:
break
E.append(e)
offset = offset + nextOffset
return E
def putEvents(self, E):
"""
putEvents(E) -- writes a single or multiple events, depending on
whether an 'Event' object, or a list of 'Event' objects is
given as an argument.
"""
if isinstance(E, Event):
buf = E.serialize()
else:
buf = ''
num = 0
for e in E:
if not(isinstance(e, Event)):
raise 'Element %i in given list is not an Event' % num
buf = buf + e.serialize()
num = num + 1
self.sendRequest(PUT_EVT, buf)
(status, bufsize, resp_buf) = self.receiveResponse()
if status != PUT_OK:
raise IOError('Events could not be written.')
def putData(self, D):
"""
putData(D) -- writes samples that must be given as a NUMPY array,
samples x channels. The type of the samples (D) and the number of
channels must match the corresponding quantities in the FieldTrip
buffer.
"""
if not(isinstance(D, numpy.ndarray)) or len(D.shape) != 2:
raise ValueError(
'Data must be given as a NUMPY array (samples x channels)')
nSamp = D.shape[0]
nChan = D.shape[1]
(dataType, dataBuf) = serialize(D)
dataBufSize = len(dataBuf)
request = struct.pack('HHI', VERSION, PUT_DAT, 16 + dataBufSize)
dataDef = struct.pack('IIII', nChan, nSamp, dataType, dataBufSize)
self.sendRaw(request + dataDef + dataBuf)
(status, bufsize, resp_buf) = self.receiveResponse()
if status != PUT_OK:
raise IOError('Samples could not be written.')
def poll(self):
request = struct.pack('HHIIII', VERSION, WAIT_DAT, 12, 0, 0, 0)
self.sendRaw(request)
(status, bufsize, resp_buf) = self.receiveResponse()
if status != WAIT_OK or bufsize < 8:
raise IOError('Polling failed.')
return struct.unpack('II', resp_buf[0:8])
def wait(self, nsamples, nevents, timeout):
request = struct.pack('HHIIII', VERSION, WAIT_DAT,
12, int(nsamples), int(nevents), int(timeout))
self.sendRaw(request)
(status, bufsize, resp_buf) = self.receiveResponse()
if status != WAIT_OK or bufsize < 8:
raise IOError('Wait request failed.')
return struct.unpack('II', resp_buf[0:8])
|
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import activations, initializers, regularizers
from tensorflow.keras.layers import Layer
from tensorflow.python.keras.utils import conv_utils #pylint: disable=no-name-in-module
from typing import List, Tuple, Text, Optional, Union
import numpy as np
import tensornetwork as tn
import math
# pytype: disable=module-attr
@tf.keras.utils.register_keras_serializable(package='tensornetwork')
# pytype: enable=module-attr
class Conv2DMPO(Layer):
"""2D Convolutional Matrix Product Operator (MPO) TN layer.
This layer recreates the functionality of a traditional convolutional
layer, but stores the 'kernel' as a network of nodes forming an MPO.
The bond dimension of the MPO can be adjusted to increase or decrease the
number of parameters independently of the input and output dimensions.
When the layer is called, the MPO is contracted into a traditional kernel
and convolved with the layer input to produce a tensor of outputs.
Example:
::
# as first layer in a sequential model:
model = Sequential()
model.add(
Conv2DMPO(256,
kernel_size=3,
num_nodes=4,
bond_dim=16,
activation='relu',
input_shape=(32, 32, 256)))
# now the model will take as input tensors of shape (*, 32, 32, 256)
# and output arrays of shape (*, 32, 32, 256).
# After the first layer, you don't need to specify
# the size of the input anymore:
model.add(Conv2DMPO(256, 3, num_nodes=4, bond_dim=8, activation='relu'))
Args:
filters: Integer, the dimensionality of the output space
(i.e. the number of output filters in the convolution).
kernel_size: An integer or tuple/list of 2 integers, specifying the
height and width of the 2D convolution window.
Can be a single integer to specify the same value for
all spatial dimensions.
num_nodes: Positive integer, number of nodes in the MPO.
Note input_shape[-1]**(1. / num_nodes) and filters**(1. / num_nodes)
must both be round.
bond_dim: Positive integer, size of the MPO bond dimension (between nodes).
Lower bond dimension means more parameter compression.
strides: An integer or tuple/list of 2 integers,
specifying the strides of the convolution
along the height and width.
Can be a single integer to specify the same value for
all spatial dimensions.
Specifying any stride value != 1 is incompatible with specifying
any `dilation_rate` value != 1.
padding: one of `"valid"` or `"same"`
data_format: A string,
one of `"channels_last"` or `"channels_first"`.
The ordering of the dimensions in the inputs.
`"channels_last"` corresponds to inputs with shape
`(batch, height, width, channels)` while `"channels_first"`
corresponds to inputs with shape
`(batch, channels, height, width)`.
It defaults to "channels_last".
dilation_rate: an integer or tuple/list of 2 integers, specifying
the dilation rate to use for dilated convolution.
Can be a single integer to specify the same value for
all spatial dimensions.
Currently, specifying any `dilation_rate` value != 1 is
incompatible with specifying any stride value != 1.
activation: Activation function to use.
If you don't specify anything, no activation is applied
(ie. "linear" activation: `a(x) = x`).
use_bias: Boolean, whether the layer uses a bias vector.
kernel_initializer: Initializer for the node weight matrices.
bias_initializer: Initializer for the bias vector.
kernel_regularizer: Regularizer for the node weight matrices.
bias_regularizer: Regularizer for the bias vector.
Input shape:
4D tensor with shape: `(batch_size, h, w, channels)`.
Output shape:
4D tensor with shape: `(batch_size, h_out, w_out, filters)`.
"""
def __init__(self,
filters: int,
kernel_size: Union[int, Tuple[int, int]],
num_nodes: int,
bond_dim: int,
strides: Union[int, Tuple[int, int]] = 1,
padding: Text = "same",
data_format: Optional[Text] = "channels_last",
dilation_rate: Union[int, Tuple[int, int]] = (1, 1),
activation: Optional[Text] = None,
use_bias: bool = True,
kernel_initializer: Text = "glorot_uniform",
bias_initializer: Text = "zeros",
kernel_regularizer: Optional[Text] = None,
bias_regularizer: Optional[Text] = None,
**kwargs) -> None:
if num_nodes < 2:
raise ValueError('Need at least 2 nodes to create MPO')
if padding not in ('same', 'valid'):
raise ValueError('Padding must be "same" or "valid"')
if data_format not in ['channels_first', 'channels_last']:
raise ValueError('Invalid data_format string provided')
super().__init__(**kwargs)
self.nodes = []
self.filters = filters
self.kernel_size = conv_utils.normalize_tuple(kernel_size, 2, 'kernel_size')
self.num_nodes = num_nodes
self.bond_dim = bond_dim
self.strides = conv_utils.normalize_tuple(strides, 2, 'kernel_size')
self.padding = padding
self.data_format = data_format
self.dilation_rate = conv_utils.normalize_tuple(dilation_rate,
2, 'dilation_rate')
self.activation = activations.get(activation)
self.use_bias = use_bias
self.kernel_initializer = initializers.get(kernel_initializer)
self.bias_initializer = initializers.get(bias_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.bias_regularizer = regularizers.get(bias_regularizer)
def build(self, input_shape: List[int]) -> None:
# Disable the attribute-defined-outside-init violations in this function
# pylint: disable=attribute-defined-outside-init
if self.data_format == 'channels_first':
channel_axis = 1
else:
channel_axis = -1
if input_shape[channel_axis] is None:
raise ValueError('The channel dimension of the inputs '
'should be defined. Found `None`.')
def is_perfect_root(n, n_nodes):
root = n**(1. / n_nodes)
return round(root)**n_nodes == n
channels = input_shape[channel_axis]
# Ensure dividable dimensions
assert is_perfect_root(channels, self.num_nodes), (
f'Input dim incorrect. '
f'{input_shape[-1]}**(1. / {self.num_nodes}) must be round.')
assert is_perfect_root(self.filters, self.num_nodes), (
f'Output dim incorrect. '
f'{self.filters}**(1. / {self.num_nodes}) must be round.')
super().build(input_shape)
in_leg_dim = math.ceil(channels**(1. / self.num_nodes))
out_leg_dim = math.ceil(self.filters**(1. / self.num_nodes))
self.nodes.append(
self.add_weight(name='end_node_first',
shape=(in_leg_dim, self.kernel_size[0],
self.bond_dim, out_leg_dim),
trainable=True,
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer))
for i in range(self.num_nodes - 2):
self.nodes.append(
self.add_weight(name=f'middle_node_{i}',
shape=(in_leg_dim, self.bond_dim, self.bond_dim,
out_leg_dim),
trainable=True,
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer))
self.nodes.append(
self.add_weight(name='end_node_last',
shape=(in_leg_dim, self.bond_dim,
self.kernel_size[1], out_leg_dim),
trainable=True,
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer))
if self.use_bias:
self.bias_var = self.add_weight(
name='bias',
shape=(self.filters,),
trainable=True,
initializer=self.bias_initializer,
regularizer=self.bias_regularizer)
else:
self.use_bias = None
def call(self, inputs: tf.Tensor, **kwargs) -> tf.Tensor:
tn_nodes = [tn.Node(n, backend='tensorflow') for n in self.nodes]
for i in range(len(tn_nodes) - 1):
tn_nodes[i][2] ^ tn_nodes[i+1][1]
input_edges = [n[0] for n in tn_nodes]
output_edges = [n[3] for n in tn_nodes]
edges = [tn_nodes[0][1], tn_nodes[-1][2]] + input_edges + output_edges
contracted = tn.contractors.greedy(tn_nodes, edges)
tn.flatten_edges(input_edges)
tn.flatten_edges(output_edges)
tf_df = 'NCHW' if self.data_format == 'channels_first' else 'NHWC'
result = tf.nn.conv2d(inputs,
contracted.tensor,
self.strides,
self.padding.upper(),
data_format=tf_df,
dilations=self.dilation_rate)
if self.use_bias:
bias = tf.reshape(self.bias_var, (1, self.filters,))
result += bias
if self.activation is not None:
result = self.activation(result)
return result
def compute_output_shape(self, input_shape: List[int]) -> Tuple[
int, int, int, int]:
if self.data_format == 'channels_first':
space = input_shape[2:]
else:
space = input_shape[1:-1]
new_space = []
for i, _ in enumerate(space):
new_dim = conv_utils.conv_output_length(
space[i],
self.kernel_size[i],
padding=self.padding,
stride=self.strides[i],
dilation=self.dilation_rate[i])
new_space.append(new_dim)
if self.data_format == 'channels_first':
return (input_shape[0], self.filters) + tuple(new_space)
return (input_shape[0],) + tuple(new_space) + (self.filters,)
def get_config(self) -> dict:
config = {
'filters': self.filters,
'kernel_size': self.kernel_size,
'num_nodes': self.num_nodes,
'bond_dim': self.bond_dim,
'strides': self.strides,
'padding': self.padding,
'data_format': self.data_format,
'dilation_rate': self.dilation_rate,
'activation': activations.serialize(self.activation),
'use_bias': self.use_bias,
'kernel_initializer': initializers.serialize(self.kernel_initializer),
'bias_initializer': initializers.serialize(self.bias_initializer),
'kernel_regularizer': regularizers.serialize(self.kernel_regularizer),
'bias_regularizer': regularizers.serialize(self.bias_regularizer),
}
base_config = super().get_config()
config.update(base_config)
return config
|
from Handler import Handler
from diamond.collector import get_hostname
import os
HAVE_SSL = True
try:
import ssl
except ImportError:
HAVE_SSL = False
try:
import mosquitto
except ImportError:
mosquitto = None
__author__ = 'Jan-Piet Mens'
__email__ = '[email protected]'
class MQTTHandler(Handler):
"""
"""
def __init__(self, config=None):
"""
Create a new instance of the MQTTHandler class
"""
# Initialize Handler
Handler.__init__(self, config)
# Initialize Data
self.mqttc = None
self.hostname = get_hostname(self.config)
self.client_id = "%s_%s" % (self.hostname, os.getpid())
# Initialize Options
self.host = self.config.get('host', 'localhost')
self.port = 0
self.qos = int(self.config.get('qos', 0))
self.prefix = self.config.get('prefix', "")
self.tls = self.config.get('tls', False)
self.timestamp = 0
try:
self.timestamp = self.config['timestamp']
if not self.timestamp:
self.timestamp = 1
else:
self.timestamp = 0
except:
self.timestamp = 1
if not mosquitto:
self.log.error('mosquitto import failed. Handler disabled')
self.enabled = False
return
# Initialize
self.mqttc = mosquitto.Mosquitto(self.client_id, clean_session=True)
if not self.tls:
self.port = int(self.config.get('port', 1883))
else:
# Set up TLS if requested
self.port = int(self.config.get('port', 8883))
self.cafile = self.config.get('cafile', None)
self.certfile = self.config.get('certfile', None)
self.keyfile = self.config.get('keyfile', None)
if None in [self.cafile, self.certfile, self.keyfile]:
self.log.error("MQTTHandler: TLS configuration missing.")
return
try:
self.mqttc.tls_set(
self.cafile,
certfile=self.certfile,
keyfile=self.keyfile,
cert_reqs=ssl.CERT_REQUIRED,
tls_version=3,
ciphers=None)
except:
self.log.error("MQTTHandler: Cannot set up TLS " +
"configuration. Files missing?")
self.mqttc.will_set("clients/diamond/%s" % (self.hostname),
payload="Adios!", qos=0, retain=False)
self.mqttc.connect(self.host, self.port, 60)
self.mqttc.on_disconnect = self._disconnect
def get_default_config_help(self):
"""
Returns the help text for the configuration options for this handler
"""
config = super(MQTTHandler, self).get_default_config_help()
config.update({
})
return config
def get_default_config(self):
"""
Return the default config for the handler
"""
config = super(MQTTHandler, self).get_default_config()
config.update({
})
return config
def process(self, metric):
"""
Process a metric by converting metric name to MQTT topic name;
the payload is metric and timestamp.
"""
if not mosquitto:
return
line = str(metric)
topic, value, timestamp = line.split()
if len(self.prefix):
topic = "%s/%s" % (self.prefix, topic)
topic = topic.replace('.', '/')
topic = topic.replace('#', '&') # Topic must not contain wildcards
if self.timestamp == 0:
self.mqttc.publish(topic, "%s" % (value), self.qos)
else:
self.mqttc.publish(topic, "%s %s" % (value, timestamp), self.qos)
def _disconnect(self, mosq, obj, rc):
self.log.debug("MQTTHandler: reconnecting to broker...")
mosq.reconnect()
|
from astral import Astral
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_NAME
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
import homeassistant.util.dt as dt_util
DEFAULT_NAME = "Moon"
STATE_FIRST_QUARTER = "first_quarter"
STATE_FULL_MOON = "full_moon"
STATE_LAST_QUARTER = "last_quarter"
STATE_NEW_MOON = "new_moon"
STATE_WANING_CRESCENT = "waning_crescent"
STATE_WANING_GIBBOUS = "waning_gibbous"
STATE_WAXING_GIBBOUS = "waxing_gibbous"
STATE_WAXING_CRESCENT = "waxing_crescent"
MOON_ICONS = {
STATE_FIRST_QUARTER: "mdi:moon-first-quarter",
STATE_FULL_MOON: "mdi:moon-full",
STATE_LAST_QUARTER: "mdi:moon-last-quarter",
STATE_NEW_MOON: "mdi:moon-new",
STATE_WANING_CRESCENT: "mdi:moon-waning-crescent",
STATE_WANING_GIBBOUS: "mdi:moon-waning-gibbous",
STATE_WAXING_CRESCENT: "mdi:moon-waxing-crescent",
STATE_WAXING_GIBBOUS: "mdi:moon-waxing-gibbous",
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Moon sensor."""
name = config.get(CONF_NAME)
async_add_entities([MoonSensor(name)], True)
class MoonSensor(Entity):
"""Representation of a Moon sensor."""
def __init__(self, name):
"""Initialize the moon sensor."""
self._name = name
self._state = None
@property
def name(self):
"""Return the name of the entity."""
return self._name
@property
def device_class(self):
"""Return the device class of the entity."""
return "moon__phase"
@property
def state(self):
"""Return the state of the device."""
if self._state == 0:
return STATE_NEW_MOON
if self._state < 7:
return STATE_WAXING_CRESCENT
if self._state == 7:
return STATE_FIRST_QUARTER
if self._state < 14:
return STATE_WAXING_GIBBOUS
if self._state == 14:
return STATE_FULL_MOON
if self._state < 21:
return STATE_WANING_GIBBOUS
if self._state == 21:
return STATE_LAST_QUARTER
return STATE_WANING_CRESCENT
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return MOON_ICONS.get(self.state)
async def async_update(self):
"""Get the time and updates the states."""
today = dt_util.as_local(dt_util.utcnow()).date()
self._state = Astral().moon_phase(today)
|
try:
import dask
import dask.array
from dask.array.utils import meta_from_array
from dask.highlevelgraph import HighLevelGraph
except ImportError:
pass
import collections
import itertools
import operator
from typing import (
Any,
Callable,
DefaultDict,
Dict,
Hashable,
Iterable,
List,
Mapping,
Sequence,
Tuple,
TypeVar,
Union,
)
import numpy as np
from .alignment import align
from .dataarray import DataArray
from .dataset import Dataset
T_DSorDA = TypeVar("T_DSorDA", DataArray, Dataset)
def unzip(iterable):
return zip(*iterable)
def assert_chunks_compatible(a: Dataset, b: Dataset):
a = a.unify_chunks()
b = b.unify_chunks()
for dim in set(a.chunks).intersection(set(b.chunks)):
if a.chunks[dim] != b.chunks[dim]:
raise ValueError(f"Chunk sizes along dimension {dim!r} are not equal.")
def check_result_variables(
result: Union[DataArray, Dataset], expected: Mapping[str, Any], kind: str
):
if kind == "coords":
nice_str = "coordinate"
elif kind == "data_vars":
nice_str = "data"
# check that coords and data variables are as expected
missing = expected[kind] - set(getattr(result, kind))
if missing:
raise ValueError(
"Result from applying user function does not contain "
f"{nice_str} variables {missing}."
)
extra = set(getattr(result, kind)) - expected[kind]
if extra:
raise ValueError(
"Result from applying user function has unexpected "
f"{nice_str} variables {extra}."
)
def dataset_to_dataarray(obj: Dataset) -> DataArray:
if not isinstance(obj, Dataset):
raise TypeError("Expected Dataset, got %s" % type(obj))
if len(obj.data_vars) > 1:
raise TypeError(
"Trying to convert Dataset with more than one data variable to DataArray"
)
return next(iter(obj.data_vars.values()))
def dataarray_to_dataset(obj: DataArray) -> Dataset:
# only using _to_temp_dataset would break
# func = lambda x: x.to_dataset()
# since that relies on preserving name.
if obj.name is None:
dataset = obj._to_temp_dataset()
else:
dataset = obj.to_dataset()
return dataset
def make_meta(obj):
"""If obj is a DataArray or Dataset, return a new object of the same type and with
the same variables and dtypes, but where all variables have size 0 and numpy
backend.
If obj is neither a DataArray nor Dataset, return it unaltered.
"""
if isinstance(obj, DataArray):
obj_array = obj
obj = obj._to_temp_dataset()
elif isinstance(obj, Dataset):
obj_array = None
else:
return obj
meta = Dataset()
for name, variable in obj.variables.items():
meta_obj = meta_from_array(variable.data, ndim=variable.ndim)
meta[name] = (variable.dims, meta_obj, variable.attrs)
meta.attrs = obj.attrs
meta = meta.set_coords(obj.coords)
if obj_array is not None:
return obj_array._from_temp_dataset(meta)
return meta
def infer_template(
func: Callable[..., T_DSorDA], obj: Union[DataArray, Dataset], *args, **kwargs
) -> T_DSorDA:
"""Infer return object by running the function on meta objects."""
meta_args = [make_meta(arg) for arg in (obj,) + args]
try:
template = func(*meta_args, **kwargs)
except Exception as e:
raise Exception(
"Cannot infer object returned from running user provided function. "
"Please supply the 'template' kwarg to map_blocks."
) from e
if not isinstance(template, (Dataset, DataArray)):
raise TypeError(
"Function must return an xarray DataArray or Dataset. Instead it returned "
f"{type(template)}"
)
return template
def make_dict(x: Union[DataArray, Dataset]) -> Dict[Hashable, Any]:
"""Map variable name to numpy(-like) data
(Dataset.to_dict() is too complicated).
"""
if isinstance(x, DataArray):
x = x._to_temp_dataset()
return {k: v.data for k, v in x.variables.items()}
def _get_chunk_slicer(dim: Hashable, chunk_index: Mapping, chunk_bounds: Mapping):
if dim in chunk_index:
which_chunk = chunk_index[dim]
return slice(chunk_bounds[dim][which_chunk], chunk_bounds[dim][which_chunk + 1])
return slice(None)
def map_blocks(
func: Callable[..., T_DSorDA],
obj: Union[DataArray, Dataset],
args: Sequence[Any] = (),
kwargs: Mapping[str, Any] = None,
template: Union[DataArray, Dataset] = None,
) -> T_DSorDA:
"""Apply a function to each block of a DataArray or Dataset.
.. warning::
This function is experimental and its signature may change.
Parameters
----------
func : callable
User-provided function that accepts a DataArray or Dataset as its first
parameter ``obj``. The function will receive a subset or 'block' of ``obj`` (see below),
corresponding to one chunk along each chunked dimension. ``func`` will be
executed as ``func(subset_obj, *subset_args, **kwargs)``.
This function must return either a single DataArray or a single Dataset.
This function cannot add a new chunked dimension.
obj : DataArray, Dataset
Passed to the function as its first argument, one block at a time.
args : sequence
Passed to func after unpacking and subsetting any xarray objects by blocks.
xarray objects in args must be aligned with obj, otherwise an error is raised.
kwargs : mapping
Passed verbatim to func after unpacking. xarray objects, if any, will not be
subset to blocks. Passing dask collections in kwargs is not allowed.
template : DataArray or Dataset, optional
xarray object representing the final result after compute is called. If not provided,
the function will be first run on mocked-up data, that looks like ``obj`` but
has sizes 0, to determine properties of the returned object such as dtype,
variable names, attributes, new dimensions and new indexes (if any).
``template`` must be provided if the function changes the size of existing dimensions.
When provided, ``attrs`` on variables in `template` are copied over to the result. Any
``attrs`` set by ``func`` will be ignored.
Returns
-------
A single DataArray or Dataset with dask backend, reassembled from the outputs of the
function.
Notes
-----
This function is designed for when ``func`` needs to manipulate a whole xarray object
subset to each block. In the more common case where ``func`` can work on numpy arrays, it is
recommended to use ``apply_ufunc``.
If none of the variables in ``obj`` is backed by dask arrays, calling this function is
equivalent to calling ``func(obj, *args, **kwargs)``.
See Also
--------
dask.array.map_blocks, xarray.apply_ufunc, xarray.Dataset.map_blocks,
xarray.DataArray.map_blocks
Examples
--------
Calculate an anomaly from climatology using ``.groupby()``. Using
``xr.map_blocks()`` allows for parallel operations with knowledge of ``xarray``,
its indices, and its methods like ``.groupby()``.
>>> def calculate_anomaly(da, groupby_type="time.month"):
... gb = da.groupby(groupby_type)
... clim = gb.mean(dim="time")
... return gb - clim
...
>>> time = xr.cftime_range("1990-01", "1992-01", freq="M")
>>> month = xr.DataArray(time.month, coords={"time": time}, dims=["time"])
>>> np.random.seed(123)
>>> array = xr.DataArray(
... np.random.rand(len(time)),
... dims=["time"],
... coords={"time": time, "month": month},
... ).chunk()
>>> array.map_blocks(calculate_anomaly, template=array).compute()
<xarray.DataArray (time: 24)>
array([ 0.12894847, 0.11323072, -0.0855964 , -0.09334032, 0.26848862,
0.12382735, 0.22460641, 0.07650108, -0.07673453, -0.22865714,
-0.19063865, 0.0590131 , -0.12894847, -0.11323072, 0.0855964 ,
0.09334032, -0.26848862, -0.12382735, -0.22460641, -0.07650108,
0.07673453, 0.22865714, 0.19063865, -0.0590131 ])
Coordinates:
* time (time) object 1990-01-31 00:00:00 ... 1991-12-31 00:00:00
month (time) int64 1 2 3 4 5 6 7 8 9 10 11 12 1 2 3 4 5 6 7 8 9 10 11 12
Note that one must explicitly use ``args=[]`` and ``kwargs={}`` to pass arguments
to the function being applied in ``xr.map_blocks()``:
>>> array.map_blocks(
... calculate_anomaly,
... kwargs={"groupby_type": "time.year"},
... template=array,
... ) # doctest: +ELLIPSIS
<xarray.DataArray (time: 24)>
dask.array<calculate_anomaly-...-<this, shape=(24,), dtype=float64, chunksize=(24,), chunktype=numpy.ndarray>
Coordinates:
* time (time) object 1990-01-31 00:00:00 ... 1991-12-31 00:00:00
month (time) int64 dask.array<chunksize=(24,), meta=np.ndarray>
"""
def _wrapper(
func: Callable,
args: List,
kwargs: dict,
arg_is_array: Iterable[bool],
expected: dict,
):
"""
Wrapper function that receives datasets in args; converts to dataarrays when necessary;
passes these to the user function `func` and checks returned objects for expected shapes/sizes/etc.
"""
converted_args = [
dataset_to_dataarray(arg) if is_array else arg
for is_array, arg in zip(arg_is_array, args)
]
result = func(*converted_args, **kwargs)
# check all dims are present
missing_dimensions = set(expected["shapes"]) - set(result.sizes)
if missing_dimensions:
raise ValueError(
f"Dimensions {missing_dimensions} missing on returned object."
)
# check that index lengths and values are as expected
for name, index in result.indexes.items():
if name in expected["shapes"]:
if len(index) != expected["shapes"][name]:
raise ValueError(
f"Received dimension {name!r} of length {len(index)}. Expected length {expected['shapes'][name]}."
)
if name in expected["indexes"]:
expected_index = expected["indexes"][name]
if not index.equals(expected_index):
raise ValueError(
f"Expected index {name!r} to be {expected_index!r}. Received {index!r} instead."
)
# check that all expected variables were returned
check_result_variables(result, expected, "coords")
if isinstance(result, Dataset):
check_result_variables(result, expected, "data_vars")
return make_dict(result)
if template is not None and not isinstance(template, (DataArray, Dataset)):
raise TypeError(
f"template must be a DataArray or Dataset. Received {type(template).__name__} instead."
)
if not isinstance(args, Sequence):
raise TypeError("args must be a sequence (for example, a list or tuple).")
if kwargs is None:
kwargs = {}
elif not isinstance(kwargs, Mapping):
raise TypeError("kwargs must be a mapping (for example, a dict)")
for value in kwargs.values():
if dask.is_dask_collection(value):
raise TypeError(
"Cannot pass dask collections in kwargs yet. Please compute or "
"load values before passing to map_blocks."
)
if not dask.is_dask_collection(obj):
return func(obj, *args, **kwargs)
all_args = [obj] + list(args)
is_xarray = [isinstance(arg, (Dataset, DataArray)) for arg in all_args]
is_array = [isinstance(arg, DataArray) for arg in all_args]
# there should be a better way to group this. partition?
xarray_indices, xarray_objs = unzip(
(index, arg) for index, arg in enumerate(all_args) if is_xarray[index]
)
others = [
(index, arg) for index, arg in enumerate(all_args) if not is_xarray[index]
]
# all xarray objects must be aligned. This is consistent with apply_ufunc.
aligned = align(*xarray_objs, join="exact")
xarray_objs = tuple(
dataarray_to_dataset(arg) if is_da else arg
for is_da, arg in zip(is_array, aligned)
)
_, npargs = unzip(
sorted(list(zip(xarray_indices, xarray_objs)) + others, key=lambda x: x[0])
)
# check that chunk sizes are compatible
input_chunks = dict(npargs[0].chunks)
input_indexes = dict(npargs[0].indexes)
for arg in xarray_objs[1:]:
assert_chunks_compatible(npargs[0], arg)
input_chunks.update(arg.chunks)
input_indexes.update(arg.indexes)
if template is None:
# infer template by providing zero-shaped arrays
template = infer_template(func, aligned[0], *args, **kwargs)
template_indexes = set(template.indexes)
preserved_indexes = template_indexes & set(input_indexes)
new_indexes = template_indexes - set(input_indexes)
indexes = {dim: input_indexes[dim] for dim in preserved_indexes}
indexes.update({k: template.indexes[k] for k in new_indexes})
output_chunks = {
dim: input_chunks[dim] for dim in template.dims if dim in input_chunks
}
else:
# template xarray object has been provided with proper sizes and chunk shapes
indexes = dict(template.indexes)
if isinstance(template, DataArray):
output_chunks = dict(zip(template.dims, template.chunks)) # type: ignore
else:
output_chunks = dict(template.chunks)
for dim in output_chunks:
if dim in input_chunks and len(input_chunks[dim]) != len(output_chunks[dim]):
raise ValueError(
"map_blocks requires that one block of the input maps to one block of output. "
f"Expected number of output chunks along dimension {dim!r} to be {len(input_chunks[dim])}. "
f"Received {len(output_chunks[dim])} instead. Please provide template if not provided, or "
"fix the provided template."
)
if isinstance(template, DataArray):
result_is_array = True
template_name = template.name
template = template._to_temp_dataset()
elif isinstance(template, Dataset):
result_is_array = False
else:
raise TypeError(
f"func output must be DataArray or Dataset; got {type(template)}"
)
# We're building a new HighLevelGraph hlg. We'll have one new layer
# for each variable in the dataset, which is the result of the
# func applied to the values.
graph: Dict[Any, Any] = {}
new_layers: DefaultDict[str, Dict[Any, Any]] = collections.defaultdict(dict)
gname = "{}-{}".format(
dask.utils.funcname(func), dask.base.tokenize(npargs[0], args, kwargs)
)
# map dims to list of chunk indexes
ichunk = {dim: range(len(chunks_v)) for dim, chunks_v in input_chunks.items()}
# mapping from chunk index to slice bounds
input_chunk_bounds = {
dim: np.cumsum((0,) + chunks_v) for dim, chunks_v in input_chunks.items()
}
output_chunk_bounds = {
dim: np.cumsum((0,) + chunks_v) for dim, chunks_v in output_chunks.items()
}
def subset_dataset_to_block(
graph: dict, gname: str, dataset: Dataset, input_chunk_bounds, chunk_index
):
"""
Creates a task that subsets an xarray dataset to a block determined by chunk_index.
Block extents are determined by input_chunk_bounds.
Also subtasks that subset the constituent variables of a dataset.
"""
# this will become [[name1, variable1],
# [name2, variable2],
# ...]
# which is passed to dict and then to Dataset
data_vars = []
coords = []
chunk_tuple = tuple(chunk_index.values())
for name, variable in dataset.variables.items():
# make a task that creates tuple of (dims, chunk)
if dask.is_dask_collection(variable.data):
# recursively index into dask_keys nested list to get chunk
chunk = variable.__dask_keys__()
for dim in variable.dims:
chunk = chunk[chunk_index[dim]]
chunk_variable_task = (f"{gname}-{name}-{chunk[0]}",) + chunk_tuple
graph[chunk_variable_task] = (
tuple,
[variable.dims, chunk, variable.attrs],
)
else:
# non-dask array possibly with dimensions chunked on other variables
# index into variable appropriately
subsetter = {
dim: _get_chunk_slicer(dim, chunk_index, input_chunk_bounds)
for dim in variable.dims
}
subset = variable.isel(subsetter)
chunk_variable_task = (
"{}-{}".format(gname, dask.base.tokenize(subset)),
) + chunk_tuple
graph[chunk_variable_task] = (
tuple,
[subset.dims, subset, subset.attrs],
)
# this task creates dict mapping variable name to above tuple
if name in dataset._coord_names:
coords.append([name, chunk_variable_task])
else:
data_vars.append([name, chunk_variable_task])
return (Dataset, (dict, data_vars), (dict, coords), dataset.attrs)
# iterate over all possible chunk combinations
for chunk_tuple in itertools.product(*ichunk.values()):
# mapping from dimension name to chunk index
chunk_index = dict(zip(ichunk.keys(), chunk_tuple))
blocked_args = [
subset_dataset_to_block(graph, gname, arg, input_chunk_bounds, chunk_index)
if isxr
else arg
for isxr, arg in zip(is_xarray, npargs)
]
# expected["shapes", "coords", "data_vars", "indexes"] are used to
# raise nice error messages in _wrapper
expected = {}
# input chunk 0 along a dimension maps to output chunk 0 along the same dimension
# even if length of dimension is changed by the applied function
expected["shapes"] = {
k: output_chunks[k][v] for k, v in chunk_index.items() if k in output_chunks
}
expected["data_vars"] = set(template.data_vars.keys()) # type: ignore
expected["coords"] = set(template.coords.keys()) # type: ignore
expected["indexes"] = {
dim: indexes[dim][_get_chunk_slicer(dim, chunk_index, output_chunk_bounds)]
for dim in indexes
}
from_wrapper = (gname,) + chunk_tuple
graph[from_wrapper] = (_wrapper, func, blocked_args, kwargs, is_array, expected)
# mapping from variable name to dask graph key
var_key_map: Dict[Hashable, str] = {}
for name, variable in template.variables.items():
if name in indexes:
continue
gname_l = f"{gname}-{name}"
var_key_map[name] = gname_l
key: Tuple[Any, ...] = (gname_l,)
for dim in variable.dims:
if dim in chunk_index:
key += (chunk_index[dim],)
else:
# unchunked dimensions in the input have one chunk in the result
# output can have new dimensions with exactly one chunk
key += (0,)
# We're adding multiple new layers to the graph:
# The first new layer is the result of the computation on
# the array.
# Then we add one layer per variable, which extracts the
# result for that variable, and depends on just the first new
# layer.
new_layers[gname_l][key] = (operator.getitem, from_wrapper, name)
hlg = HighLevelGraph.from_collections(
gname,
graph,
dependencies=[arg for arg in npargs if dask.is_dask_collection(arg)],
)
for gname_l, layer in new_layers.items():
# This adds in the getitems for each variable in the dataset.
hlg.dependencies[gname_l] = {gname}
hlg.layers[gname_l] = layer
result = Dataset(coords=indexes, attrs=template.attrs)
for index in result.indexes:
result[index].attrs = template[index].attrs
result[index].encoding = template[index].encoding
for name, gname_l in var_key_map.items():
dims = template[name].dims
var_chunks = []
for dim in dims:
if dim in output_chunks:
var_chunks.append(output_chunks[dim])
elif dim in indexes:
var_chunks.append((len(indexes[dim]),))
elif dim in template.dims:
# new unindexed dimension
var_chunks.append((template.sizes[dim],))
data = dask.array.Array(
hlg, name=gname_l, chunks=var_chunks, dtype=template[name].dtype
)
result[name] = (dims, data, template[name].attrs)
result[name].encoding = template[name].encoding
result = result.set_coords(template._coord_names)
if result_is_array:
da = dataset_to_dataarray(result)
da.name = template_name
return da # type: ignore
return result # type: ignore
|
import getopt
import os
import re
import sys
import time
import cherrypy
from cherrypy import _cperror, _cpmodpy
from cherrypy.lib import httputil
curdir = os.path.join(os.getcwd(), os.path.dirname(__file__))
AB_PATH = ''
APACHE_PATH = 'apache'
SCRIPT_NAME = '/cpbench/users/rdelon/apps/blog'
__all__ = ['ABSession', 'Root', 'print_report',
'run_standard_benchmarks', 'safe_threads',
'size_report', 'thread_report',
]
size_cache = {}
class Root:
@cherrypy.expose
def index(self):
return """<html>
<head>
<title>CherryPy Benchmark</title>
</head>
<body>
<ul>
<li><a href="hello">Hello, world! (14 byte dynamic)</a></li>
<li><a href="static/index.html">Static file (14 bytes static)</a></li>
<li><form action="sizer">Response of length:
<input type='text' name='size' value='10' /></form>
</li>
</ul>
</body>
</html>"""
@cherrypy.expose
def hello(self):
return 'Hello, world\r\n'
@cherrypy.expose
def sizer(self, size):
resp = size_cache.get(size, None)
if resp is None:
size_cache[size] = resp = 'X' * int(size)
return resp
def init():
cherrypy.config.update({
'log.error.file': '',
'environment': 'production',
'server.socket_host': '127.0.0.1',
'server.socket_port': 54583,
'server.max_request_header_size': 0,
'server.max_request_body_size': 0,
})
# Cheat mode on ;)
del cherrypy.config['tools.log_tracebacks.on']
del cherrypy.config['tools.log_headers.on']
del cherrypy.config['tools.trailing_slash.on']
appconf = {
'/static': {
'tools.staticdir.on': True,
'tools.staticdir.dir': 'static',
'tools.staticdir.root': curdir,
},
}
globals().update(
app=cherrypy.tree.mount(Root(), SCRIPT_NAME, appconf),
)
class NullRequest:
"""A null HTTP request class, returning 200 and an empty body."""
def __init__(self, local, remote, scheme='http'):
pass
def close(self):
pass
def run(self, method, path, query_string, protocol, headers, rfile):
cherrypy.response.status = '200 OK'
cherrypy.response.header_list = [('Content-Type', 'text/html'),
('Server', 'Null CherryPy'),
('Date', httputil.HTTPDate()),
('Content-Length', '0'),
]
cherrypy.response.body = ['']
return cherrypy.response
class NullResponse:
pass
class ABSession:
"""A session of 'ab', the Apache HTTP server benchmarking tool.
Example output from ab:
This is ApacheBench, Version 2.0.40-dev <$Revision: 1.121.2.1 $> apache-2.0
Copyright (c) 1996 Adam Twiss, Zeus Technology Ltd, http://www.zeustech.net/
Copyright (c) 1998-2002 The Apache Software Foundation, http://www.apache.org/
Benchmarking 127.0.0.1 (be patient)
Completed 100 requests
Completed 200 requests
Completed 300 requests
Completed 400 requests
Completed 500 requests
Completed 600 requests
Completed 700 requests
Completed 800 requests
Completed 900 requests
Server Software: CherryPy/3.1beta
Server Hostname: 127.0.0.1
Server Port: 54583
Document Path: /static/index.html
Document Length: 14 bytes
Concurrency Level: 10
Time taken for tests: 9.643867 seconds
Complete requests: 1000
Failed requests: 0
Write errors: 0
Total transferred: 189000 bytes
HTML transferred: 14000 bytes
Requests per second: 103.69 [#/sec] (mean)
Time per request: 96.439 [ms] (mean)
Time per request: 9.644 [ms] (mean, across all concurrent requests)
Transfer rate: 19.08 [Kbytes/sec] received
Connection Times (ms)
min mean[+/-sd] median max
Connect: 0 0 2.9 0 10
Processing: 20 94 7.3 90 130
Waiting: 0 43 28.1 40 100
Total: 20 95 7.3 100 130
Percentage of the requests served within a certain time (ms)
50% 100
66% 100
75% 100
80% 100
90% 100
95% 100
98% 100
99% 110
100% 130 (longest request)
Finished 1000 requests
"""
parse_patterns = [
('complete_requests', 'Completed',
br'^Complete requests:\s*(\d+)'),
('failed_requests', 'Failed',
br'^Failed requests:\s*(\d+)'),
('requests_per_second', 'req/sec',
br'^Requests per second:\s*([0-9.]+)'),
('time_per_request_concurrent', 'msec/req',
br'^Time per request:\s*([0-9.]+).*concurrent requests\)$'),
('transfer_rate', 'KB/sec',
br'^Transfer rate:\s*([0-9.]+)')
]
def __init__(self, path=SCRIPT_NAME + '/hello', requests=1000,
concurrency=10):
self.path = path
self.requests = requests
self.concurrency = concurrency
def args(self):
port = cherrypy.server.socket_port
assert self.concurrency > 0
assert self.requests > 0
# Don't use "localhost".
# Cf
# http://mail.python.org/pipermail/python-win32/2008-March/007050.html
return ('-k -n %s -c %s http://127.0.0.1:%s%s' %
(self.requests, self.concurrency, port, self.path))
def run(self):
# Parse output of ab, setting attributes on self
try:
self.output = _cpmodpy.read_process(AB_PATH or 'ab', self.args())
except Exception:
print(_cperror.format_exc())
raise
for attr, name, pattern in self.parse_patterns:
val = re.search(pattern, self.output, re.MULTILINE)
if val:
val = val.group(1)
setattr(self, attr, val)
else:
setattr(self, attr, None)
safe_threads = (25, 50, 100, 200, 400)
if sys.platform in ('win32',):
# For some reason, ab crashes with > 50 threads on my Win2k laptop.
safe_threads = (10, 20, 30, 40, 50)
def thread_report(path=SCRIPT_NAME + '/hello', concurrency=safe_threads):
sess = ABSession(path)
attrs, names, patterns = list(zip(*sess.parse_patterns))
avg = dict.fromkeys(attrs, 0.0)
yield ('threads',) + names
for c in concurrency:
sess.concurrency = c
sess.run()
row = [c]
for attr in attrs:
val = getattr(sess, attr)
if val is None:
print(sess.output)
row = None
break
val = float(val)
avg[attr] += float(val)
row.append(val)
if row:
yield row
# Add a row of averages.
yield ['Average'] + [str(avg[attr] / len(concurrency)) for attr in attrs]
def size_report(sizes=(10, 100, 1000, 10000, 100000, 100000000),
concurrency=50):
sess = ABSession(concurrency=concurrency)
attrs, names, patterns = list(zip(*sess.parse_patterns))
yield ('bytes',) + names
for sz in sizes:
sess.path = '%s/sizer?size=%s' % (SCRIPT_NAME, sz)
sess.run()
yield [sz] + [getattr(sess, attr) for attr in attrs]
def print_report(rows):
for row in rows:
print('')
for val in row:
sys.stdout.write(str(val).rjust(10) + ' | ')
print('')
def run_standard_benchmarks():
print('')
print('Client Thread Report (1000 requests, 14 byte response body, '
'%s server threads):' % cherrypy.server.thread_pool)
print_report(thread_report())
print('')
print('Client Thread Report (1000 requests, 14 bytes via staticdir, '
'%s server threads):' % cherrypy.server.thread_pool)
print_report(thread_report('%s/static/index.html' % SCRIPT_NAME))
print('')
print('Size Report (1000 requests, 50 client threads, '
'%s server threads):' % cherrypy.server.thread_pool)
print_report(size_report())
# modpython and other WSGI #
def startup_modpython(req=None):
"""Start the CherryPy app server in 'serverless' mode (for modpython/WSGI).
"""
if cherrypy.engine.state == cherrypy._cpengine.STOPPED:
if req:
if 'nullreq' in req.get_options():
cherrypy.engine.request_class = NullRequest
cherrypy.engine.response_class = NullResponse
ab_opt = req.get_options().get('ab', '')
if ab_opt:
global AB_PATH
AB_PATH = ab_opt
cherrypy.engine.start()
if cherrypy.engine.state == cherrypy._cpengine.STARTING:
cherrypy.engine.wait()
return 0 # apache.OK
def run_modpython(use_wsgi=False):
print('Starting mod_python...')
pyopts = []
# Pass the null and ab=path options through Apache
if '--null' in opts:
pyopts.append(('nullreq', ''))
if '--ab' in opts:
pyopts.append(('ab', opts['--ab']))
s = _cpmodpy.ModPythonServer
if use_wsgi:
pyopts.append(('wsgi.application', 'cherrypy::tree'))
pyopts.append(
('wsgi.startup', 'cherrypy.test.benchmark::startup_modpython'))
handler = 'modpython_gateway::handler'
s = s(port=54583, opts=pyopts,
apache_path=APACHE_PATH, handler=handler)
else:
pyopts.append(
('cherrypy.setup', 'cherrypy.test.benchmark::startup_modpython'))
s = s(port=54583, opts=pyopts, apache_path=APACHE_PATH)
try:
s.start()
run()
finally:
s.stop()
if __name__ == '__main__':
init()
longopts = ['cpmodpy', 'modpython', 'null', 'notests',
'help', 'ab=', 'apache=']
try:
switches, args = getopt.getopt(sys.argv[1:], '', longopts)
opts = dict(switches)
except getopt.GetoptError:
print(__doc__)
sys.exit(2)
if '--help' in opts:
print(__doc__)
sys.exit(0)
if '--ab' in opts:
AB_PATH = opts['--ab']
if '--notests' in opts:
# Return without stopping the server, so that the pages
# can be tested from a standard web browser.
def run():
port = cherrypy.server.socket_port
print('You may now open http://127.0.0.1:%s%s/' %
(port, SCRIPT_NAME))
if '--null' in opts:
print('Using null Request object')
else:
def run():
end = time.time() - start
print('Started in %s seconds' % end)
if '--null' in opts:
print('\nUsing null Request object')
try:
try:
run_standard_benchmarks()
except Exception:
print(_cperror.format_exc())
raise
finally:
cherrypy.engine.exit()
print('Starting CherryPy app server...')
class NullWriter(object):
"""Suppresses the printing of socket errors."""
def write(self, data):
pass
sys.stderr = NullWriter()
start = time.time()
if '--cpmodpy' in opts:
run_modpython()
elif '--modpython' in opts:
run_modpython(use_wsgi=True)
else:
if '--null' in opts:
cherrypy.server.request_class = NullRequest
cherrypy.server.response_class = NullResponse
cherrypy.engine.start_with_callback(run)
cherrypy.engine.block()
|
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_OCCUPANCY,
BinarySensorEntity,
)
from .const import _LOGGER, DOMAIN, ECOBEE_MODEL_TO_NAME, MANUFACTURER
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up ecobee binary (occupancy) sensors."""
data = hass.data[DOMAIN]
dev = []
for index in range(len(data.ecobee.thermostats)):
for sensor in data.ecobee.get_remote_sensors(index):
for item in sensor["capability"]:
if item["type"] != "occupancy":
continue
dev.append(EcobeeBinarySensor(data, sensor["name"], index))
async_add_entities(dev, True)
class EcobeeBinarySensor(BinarySensorEntity):
"""Representation of an Ecobee sensor."""
def __init__(self, data, sensor_name, sensor_index):
"""Initialize the Ecobee sensor."""
self.data = data
self._name = f"{sensor_name} Occupancy"
self.sensor_name = sensor_name
self.index = sensor_index
self._state = None
@property
def name(self):
"""Return the name of the Ecobee sensor."""
return self._name.rstrip()
@property
def unique_id(self):
"""Return a unique identifier for this sensor."""
for sensor in self.data.ecobee.get_remote_sensors(self.index):
if sensor["name"] == self.sensor_name:
if "code" in sensor:
return f"{sensor['code']}-{self.device_class}"
thermostat = self.data.ecobee.get_thermostat(self.index)
return f"{thermostat['identifier']}-{sensor['id']}-{self.device_class}"
@property
def device_info(self):
"""Return device information for this sensor."""
identifier = None
model = None
for sensor in self.data.ecobee.get_remote_sensors(self.index):
if sensor["name"] != self.sensor_name:
continue
if "code" in sensor:
identifier = sensor["code"]
model = "ecobee Room Sensor"
else:
thermostat = self.data.ecobee.get_thermostat(self.index)
identifier = thermostat["identifier"]
try:
model = (
f"{ECOBEE_MODEL_TO_NAME[thermostat['modelNumber']]} Thermostat"
)
except KeyError:
_LOGGER.error(
"Model number for ecobee thermostat %s not recognized. "
"Please visit this link and provide the following information: "
"https://github.com/home-assistant/core/issues/27172 "
"Unrecognized model number: %s",
thermostat["name"],
thermostat["modelNumber"],
)
break
if identifier is not None and model is not None:
return {
"identifiers": {(DOMAIN, identifier)},
"name": self.sensor_name,
"manufacturer": MANUFACTURER,
"model": model,
}
return None
@property
def is_on(self):
"""Return the status of the sensor."""
return self._state == "true"
@property
def device_class(self):
"""Return the class of this sensor, from DEVICE_CLASSES."""
return DEVICE_CLASS_OCCUPANCY
async def async_update(self):
"""Get the latest state of the sensor."""
await self.data.update()
for sensor in self.data.ecobee.get_remote_sensors(self.index):
if sensor["name"] != self.sensor_name:
continue
for item in sensor["capability"]:
if item["type"] != "occupancy":
continue
self._state = item["value"]
break
|
import os
from flask import Flask
from flask_testing import LiveServerTestCase
from kalliope.core import LifoManager
from kalliope.core.ConfigurationManager import BrainLoader
from kalliope.core.ConfigurationManager import SettingLoader
from kalliope.core.Models import Singleton
from kalliope.core.RestAPI.FlaskAPI import FlaskAPI
class RestAPITestBase(LiveServerTestCase):
def tearDown(self):
Singleton._instances = {}
# clean the lifo
LifoManager.clean_saved_lifo()
def create_app(self):
"""
executed once at the beginning of the test
"""
# be sure that the singleton haven't been loaded before
Singleton._instances = {}
current_path = os.getcwd()
if "/Tests" in current_path:
full_path_brain_to_test = current_path + os.sep + os.pardir + os.sep + "brains/brain_test_api.yml"
self.audio_file = "files/bonjour.wav"
else:
full_path_brain_to_test = current_path + os.sep + "Tests/brains/brain_test_api.yml"
self.audio_file = "Tests/files/bonjour.wav"
# rest api config
self.sl = SettingLoader()
self.settings = self.sl.settings
self.settings.rest_api.password_protected = False
self.settings.active = True
self.settings.port = 5000
self.settings.allowed_cors_origin = "*"
self.settings.default_synapse = None
self.settings.hooks["on_order_not_found"] = "order-not-found-synapse"
# prepare a test brain
brain_to_test = full_path_brain_to_test
brain_loader = BrainLoader(file_path=brain_to_test)
brain = brain_loader.brain
self.app = Flask(__name__)
self.app.config['TESTING'] = True
self.flask_api = FlaskAPI(self.app, port=5000, brain=brain)
self.client = self.app.test_client()
return self.flask_api.app
|
import functools
import email.utils
import re
import builtins
from binascii import b2a_base64
from cgi import parse_header
from email.header import decode_header
from http.server import BaseHTTPRequestHandler
from urllib.parse import unquote_plus
import jaraco.collections
import cherrypy
from cherrypy._cpcompat import ntob, ntou
response_codes = BaseHTTPRequestHandler.responses.copy()
# From https://github.com/cherrypy/cherrypy/issues/361
response_codes[500] = ('Internal Server Error',
'The server encountered an unexpected condition '
'which prevented it from fulfilling the request.')
response_codes[503] = ('Service Unavailable',
'The server is currently unable to handle the '
'request due to a temporary overloading or '
'maintenance of the server.')
HTTPDate = functools.partial(email.utils.formatdate, usegmt=True)
def urljoin(*atoms):
r"""Return the given path \*atoms, joined into a single URL.
This will correctly join a SCRIPT_NAME and PATH_INFO into the
original URL, even if either atom is blank.
"""
url = '/'.join([x for x in atoms if x])
while '//' in url:
url = url.replace('//', '/')
# Special-case the final url of "", and return "/" instead.
return url or '/'
def urljoin_bytes(*atoms):
"""Return the given path `*atoms`, joined into a single URL.
This will correctly join a SCRIPT_NAME and PATH_INFO into the
original URL, even if either atom is blank.
"""
url = b'/'.join([x for x in atoms if x])
while b'//' in url:
url = url.replace(b'//', b'/')
# Special-case the final url of "", and return "/" instead.
return url or b'/'
def protocol_from_http(protocol_str):
"""Return a protocol tuple from the given 'HTTP/x.y' string."""
return int(protocol_str[5]), int(protocol_str[7])
def get_ranges(headervalue, content_length):
"""Return a list of (start, stop) indices from a Range header, or None.
Each (start, stop) tuple will be composed of two ints, which are suitable
for use in a slicing operation. That is, the header "Range: bytes=3-6",
if applied against a Python string, is requesting resource[3:7]. This
function will return the list [(3, 7)].
If this function returns an empty list, you should return HTTP 416.
"""
if not headervalue:
return None
result = []
bytesunit, byteranges = headervalue.split('=', 1)
for brange in byteranges.split(','):
start, stop = [x.strip() for x in brange.split('-', 1)]
if start:
if not stop:
stop = content_length - 1
start, stop = int(start), int(stop)
if start >= content_length:
# From rfc 2616 sec 14.16:
# "If the server receives a request (other than one
# including an If-Range request-header field) with an
# unsatisfiable Range request-header field (that is,
# all of whose byte-range-spec values have a first-byte-pos
# value greater than the current length of the selected
# resource), it SHOULD return a response code of 416
# (Requested range not satisfiable)."
continue
if stop < start:
# From rfc 2616 sec 14.16:
# "If the server ignores a byte-range-spec because it
# is syntactically invalid, the server SHOULD treat
# the request as if the invalid Range header field
# did not exist. (Normally, this means return a 200
# response containing the full entity)."
return None
result.append((start, stop + 1))
else:
if not stop:
# See rfc quote above.
return None
# Negative subscript (last N bytes)
#
# RFC 2616 Section 14.35.1:
# If the entity is shorter than the specified suffix-length,
# the entire entity-body is used.
if int(stop) > content_length:
result.append((0, content_length))
else:
result.append((content_length - int(stop), content_length))
return result
class HeaderElement(object):
"""An element (with parameters) from an HTTP header's element list."""
def __init__(self, value, params=None):
self.value = value
if params is None:
params = {}
self.params = params
def __cmp__(self, other):
return builtins.cmp(self.value, other.value)
def __lt__(self, other):
return self.value < other.value
def __str__(self):
p = [';%s=%s' % (k, v) for k, v in self.params.items()]
return str('%s%s' % (self.value, ''.join(p)))
def __bytes__(self):
return ntob(self.__str__())
def __unicode__(self):
return ntou(self.__str__())
@staticmethod
def parse(elementstr):
"""Transform 'token;key=val' to ('token', {'key': 'val'})."""
initial_value, params = parse_header(elementstr)
return initial_value, params
@classmethod
def from_str(cls, elementstr):
"""Construct an instance from a string of the form 'token;key=val'."""
ival, params = cls.parse(elementstr)
return cls(ival, params)
q_separator = re.compile(r'; *q *=')
class AcceptElement(HeaderElement):
"""An element (with parameters) from an Accept* header's element list.
AcceptElement objects are comparable; the more-preferred object will be
"less than" the less-preferred object. They are also therefore sortable;
if you sort a list of AcceptElement objects, they will be listed in
priority order; the most preferred value will be first. Yes, it should
have been the other way around, but it's too late to fix now.
"""
@classmethod
def from_str(cls, elementstr):
qvalue = None
# The first "q" parameter (if any) separates the initial
# media-range parameter(s) (if any) from the accept-params.
atoms = q_separator.split(elementstr, 1)
media_range = atoms.pop(0).strip()
if atoms:
# The qvalue for an Accept header can have extensions. The other
# headers cannot, but it's easier to parse them as if they did.
qvalue = HeaderElement.from_str(atoms[0].strip())
media_type, params = cls.parse(media_range)
if qvalue is not None:
params['q'] = qvalue
return cls(media_type, params)
@property
def qvalue(self):
'The qvalue, or priority, of this value.'
val = self.params.get('q', '1')
if isinstance(val, HeaderElement):
val = val.value
try:
return float(val)
except ValueError as val_err:
"""Fail client requests with invalid quality value.
Ref: https://github.com/cherrypy/cherrypy/issues/1370
"""
raise cherrypy.HTTPError(
400,
'Malformed HTTP header: `{}`'.
format(str(self)),
) from val_err
def __cmp__(self, other):
diff = builtins.cmp(self.qvalue, other.qvalue)
if diff == 0:
diff = builtins.cmp(str(self), str(other))
return diff
def __lt__(self, other):
if self.qvalue == other.qvalue:
return str(self) < str(other)
else:
return self.qvalue < other.qvalue
RE_HEADER_SPLIT = re.compile(',(?=(?:[^"]*"[^"]*")*[^"]*$)')
def header_elements(fieldname, fieldvalue):
"""Return a sorted HeaderElement list from a comma-separated header string.
"""
if not fieldvalue:
return []
result = []
for element in RE_HEADER_SPLIT.split(fieldvalue):
if fieldname.startswith('Accept') or fieldname == 'TE':
hv = AcceptElement.from_str(element)
else:
hv = HeaderElement.from_str(element)
result.append(hv)
return list(reversed(sorted(result)))
def decode_TEXT(value):
r"""
Decode :rfc:`2047` TEXT
>>> decode_TEXT("=?utf-8?q?f=C3=BCr?=") == b'f\xfcr'.decode('latin-1')
True
"""
atoms = decode_header(value)
decodedvalue = ''
for atom, charset in atoms:
if charset is not None:
atom = atom.decode(charset)
decodedvalue += atom
return decodedvalue
def decode_TEXT_maybe(value):
"""
Decode the text but only if '=?' appears in it.
"""
return decode_TEXT(value) if '=?' in value else value
def valid_status(status):
"""Return legal HTTP status Code, Reason-phrase and Message.
The status arg must be an int, a str that begins with an int
or the constant from ``http.client`` stdlib module.
If status has no reason-phrase is supplied, a default reason-
phrase will be provided.
>>> import http.client
>>> from http.server import BaseHTTPRequestHandler
>>> valid_status(http.client.ACCEPTED) == (
... int(http.client.ACCEPTED),
... ) + BaseHTTPRequestHandler.responses[http.client.ACCEPTED]
True
"""
if not status:
status = 200
code, reason = status, None
if isinstance(status, str):
code, _, reason = status.partition(' ')
reason = reason.strip() or None
try:
code = int(code)
except (TypeError, ValueError):
raise ValueError('Illegal response status from server '
'(%s is non-numeric).' % repr(code))
if code < 100 or code > 599:
raise ValueError('Illegal response status from server '
'(%s is out of range).' % repr(code))
if code not in response_codes:
# code is unknown but not illegal
default_reason, message = '', ''
else:
default_reason, message = response_codes[code]
if reason is None:
reason = default_reason
return code, reason, message
# NOTE: the parse_qs functions that follow are modified version of those
# in the python3.0 source - we need to pass through an encoding to the unquote
# method, but the default parse_qs function doesn't allow us to. These do.
def _parse_qs(qs, keep_blank_values=0, strict_parsing=0, encoding='utf-8'):
"""Parse a query given as a string argument.
Arguments:
qs: URL-encoded query string to be parsed
keep_blank_values: flag indicating whether blank values in
URL encoded queries should be treated as blank strings. A
true value indicates that blanks should be retained as blank
strings. The default false value indicates that blank values
are to be ignored and treated as if they were not included.
strict_parsing: flag indicating what to do with parsing errors. If
false (the default), errors are silently ignored. If true,
errors raise a ValueError exception.
Returns a dict, as G-d intended.
"""
pairs = [s2 for s1 in qs.split('&') for s2 in s1.split(';')]
d = {}
for name_value in pairs:
if not name_value and not strict_parsing:
continue
nv = name_value.split('=', 1)
if len(nv) != 2:
if strict_parsing:
raise ValueError('bad query field: %r' % (name_value,))
# Handle case of a control-name with no equal sign
if keep_blank_values:
nv.append('')
else:
continue
if len(nv[1]) or keep_blank_values:
name = unquote_plus(nv[0], encoding, errors='strict')
value = unquote_plus(nv[1], encoding, errors='strict')
if name in d:
if not isinstance(d[name], list):
d[name] = [d[name]]
d[name].append(value)
else:
d[name] = value
return d
image_map_pattern = re.compile(r'[0-9]+,[0-9]+')
def parse_query_string(query_string, keep_blank_values=True, encoding='utf-8'):
"""Build a params dictionary from a query_string.
Duplicate key/value pairs in the provided query_string will be
returned as {'key': [val1, val2, ...]}. Single key/values will
be returned as strings: {'key': 'value'}.
"""
if image_map_pattern.match(query_string):
# Server-side image map. Map the coords to 'x' and 'y'
# (like CGI::Request does).
pm = query_string.split(',')
pm = {'x': int(pm[0]), 'y': int(pm[1])}
else:
pm = _parse_qs(query_string, keep_blank_values, encoding=encoding)
return pm
class CaseInsensitiveDict(jaraco.collections.KeyTransformingDict):
"""A case-insensitive dict subclass.
Each key is changed on entry to title case.
"""
@staticmethod
def transform_key(key):
if key is None:
# TODO(#1830): why?
return 'None'
return key.title()
# TEXT = <any OCTET except CTLs, but including LWS>
#
# A CRLF is allowed in the definition of TEXT only as part of a header
# field continuation. It is expected that the folding LWS will be
# replaced with a single SP before interpretation of the TEXT value."
if str == bytes:
header_translate_table = ''.join([chr(i) for i in range(256)])
header_translate_deletechars = ''.join(
[chr(i) for i in range(32)]) + chr(127)
else:
header_translate_table = None
header_translate_deletechars = bytes(range(32)) + bytes([127])
class HeaderMap(CaseInsensitiveDict):
"""A dict subclass for HTTP request and response headers.
Each key is changed on entry to str(key).title(). This allows headers
to be case-insensitive and avoid duplicates.
Values are header values (decoded according to :rfc:`2047` if necessary).
"""
protocol = (1, 1)
encodings = ['ISO-8859-1']
# Someday, when http-bis is done, this will probably get dropped
# since few servers, clients, or intermediaries do it. But until then,
# we're going to obey the spec as is.
# "Words of *TEXT MAY contain characters from character sets other than
# ISO-8859-1 only when encoded according to the rules of RFC 2047."
use_rfc_2047 = True
def elements(self, key):
"""Return a sorted list of HeaderElements for the given header."""
return header_elements(self.transform_key(key), self.get(key))
def values(self, key):
"""Return a sorted list of HeaderElement.value for the given header."""
return [e.value for e in self.elements(key)]
def output(self):
"""Transform self into a list of (name, value) tuples."""
return list(self.encode_header_items(self.items()))
@classmethod
def encode_header_items(cls, header_items):
"""
Prepare the sequence of name, value tuples into a form suitable for
transmitting on the wire for HTTP.
"""
for k, v in header_items:
if not isinstance(v, str) and not isinstance(v, bytes):
v = str(v)
yield tuple(map(cls.encode_header_item, (k, v)))
@classmethod
def encode_header_item(cls, item):
if isinstance(item, str):
item = cls.encode(item)
# See header_translate_* constants above.
# Replace only if you really know what you're doing.
return item.translate(
header_translate_table, header_translate_deletechars)
@classmethod
def encode(cls, v):
"""Return the given header name or value, encoded for HTTP output."""
for enc in cls.encodings:
try:
return v.encode(enc)
except UnicodeEncodeError:
continue
if cls.protocol == (1, 1) and cls.use_rfc_2047:
# Encode RFC-2047 TEXT
# (e.g. u"\u8200" -> "=?utf-8?b?6IiA?=").
# We do our own here instead of using the email module
# because we never want to fold lines--folding has
# been deprecated by the HTTP working group.
v = b2a_base64(v.encode('utf-8'))
return (b'=?utf-8?b?' + v.strip(b'\n') + b'?=')
raise ValueError('Could not encode header part %r using '
'any of the encodings %r.' %
(v, cls.encodings))
class Host(object):
"""An internet address.
name
Should be the client's host name. If not available (because no DNS
lookup is performed), the IP address should be used instead.
"""
ip = '0.0.0.0'
port = 80
name = 'unknown.tld'
def __init__(self, ip, port, name=None):
self.ip = ip
self.port = port
if name is None:
name = ip
self.name = name
def __repr__(self):
return 'httputil.Host(%r, %r, %r)' % (self.ip, self.port, self.name)
|
from homeassistant.components import axis
from homeassistant.components.axis.const import CONF_MODEL, DOMAIN as AXIS_DOMAIN
from homeassistant.const import (
CONF_DEVICE,
CONF_HOST,
CONF_MAC,
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
CONF_USERNAME,
)
from homeassistant.setup import async_setup_component
from .test_device import MAC, setup_axis_integration
from tests.async_mock import AsyncMock, Mock, patch
from tests.common import MockConfigEntry
async def test_setup_no_config(hass):
"""Test setup without configuration."""
assert await async_setup_component(hass, AXIS_DOMAIN, {})
assert AXIS_DOMAIN not in hass.data
async def test_setup_entry(hass):
"""Test successful setup of entry."""
await setup_axis_integration(hass)
assert len(hass.data[AXIS_DOMAIN]) == 1
assert MAC in hass.data[AXIS_DOMAIN]
async def test_setup_entry_fails(hass):
"""Test successful setup of entry."""
config_entry = MockConfigEntry(
domain=AXIS_DOMAIN, data={CONF_MAC: "0123"}, version=2
)
config_entry.add_to_hass(hass)
mock_device = Mock()
mock_device.async_setup = AsyncMock(return_value=False)
with patch.object(axis, "AxisNetworkDevice") as mock_device_class:
mock_device_class.return_value = mock_device
assert not await hass.config_entries.async_setup(config_entry.entry_id)
assert not hass.data[AXIS_DOMAIN]
async def test_unload_entry(hass):
"""Test successful unload of entry."""
config_entry = await setup_axis_integration(hass)
device = hass.data[AXIS_DOMAIN][config_entry.unique_id]
assert hass.data[AXIS_DOMAIN]
assert await hass.config_entries.async_unload(device.config_entry.entry_id)
assert not hass.data[AXIS_DOMAIN]
async def test_migrate_entry(hass):
"""Test successful migration of entry data."""
legacy_config = {
CONF_DEVICE: {
CONF_HOST: "1.2.3.4",
CONF_USERNAME: "username",
CONF_PASSWORD: "password",
CONF_PORT: 80,
},
CONF_MAC: "mac",
CONF_MODEL: "model",
CONF_NAME: "name",
}
entry = MockConfigEntry(domain=AXIS_DOMAIN, data=legacy_config)
assert entry.data == legacy_config
assert entry.version == 1
await entry.async_migrate(hass)
assert entry.data == {
CONF_DEVICE: {
CONF_HOST: "1.2.3.4",
CONF_USERNAME: "username",
CONF_PASSWORD: "password",
CONF_PORT: 80,
},
CONF_HOST: "1.2.3.4",
CONF_USERNAME: "username",
CONF_PASSWORD: "password",
CONF_PORT: 80,
CONF_MAC: "mac",
CONF_MODEL: "model",
CONF_NAME: "name",
}
assert entry.version == 2
|
import os.path as op
import pytest
from mne import read_selection
from mne.io import read_raw_fif
from mne.utils import run_tests_if_main
test_path = op.join(op.split(__file__)[0], '..', 'io', 'tests', 'data')
raw_fname = op.join(test_path, 'test_raw.fif')
raw_new_fname = op.join(test_path, 'test_chpi_raw_sss.fif')
def test_read_selection():
"""Test reading of selections."""
# test one channel for each selection
ch_names = ['MEG 2211', 'MEG 0223', 'MEG 1312', 'MEG 0412', 'MEG 1043',
'MEG 2042', 'MEG 2032', 'MEG 0522', 'MEG 1031']
sel_names = ['Vertex', 'Left-temporal', 'Right-temporal', 'Left-parietal',
'Right-parietal', 'Left-occipital', 'Right-occipital',
'Left-frontal', 'Right-frontal']
raw = read_raw_fif(raw_fname)
for i, name in enumerate(sel_names):
sel = read_selection(name)
assert ch_names[i] in sel
sel_info = read_selection(name, info=raw.info)
assert sel == sel_info
# test some combinations
all_ch = read_selection(['L', 'R'])
left = read_selection('L')
right = read_selection('R')
assert len(all_ch) == len(left) + len(right)
assert len(set(left).intersection(set(right))) == 0
frontal = read_selection('frontal')
occipital = read_selection('Right-occipital')
assert len(set(frontal).intersection(set(occipital))) == 0
ch_names_new = [ch.replace(' ', '') for ch in ch_names]
raw_new = read_raw_fif(raw_new_fname)
for i, name in enumerate(sel_names):
sel = read_selection(name, info=raw_new.info)
assert ch_names_new[i] in sel
pytest.raises(TypeError, read_selection, name, info='foo')
run_tests_if_main()
|
from __future__ import print_function, absolute_import
import sys
from functools import reduce
from .names import color_names, default_styles
from .styles import ColorNotFound
__all__ = ['ColorFactory', 'StyleFactory']
class ColorFactory(object):
"""This creates color names given fg = True/False. It usually will
be called as part of a StyleFactory."""
def __init__(self, fg, style):
self._fg = fg
self._style = style
self.reset = style.from_color(style.color_class(fg=fg))
# Adding the color name shortcuts for foreground colors
for item in color_names[:16]:
setattr(
self, item,
style.from_color(style.color_class.from_simple(item, fg=fg)))
def __getattr__(self, item):
"""Full color names work, but do not populate __dir__."""
try:
return self._style.from_color(
self._style.color_class(item, fg=self._fg))
except ColorNotFound:
raise AttributeError(item)
def full(self, name):
"""Gets the style for a color, using standard name procedure: either full
color name, html code, or number."""
return self._style.from_color(
self._style.color_class.from_full(name, fg=self._fg))
def simple(self, name):
"""Return the extended color scheme color for a value or name."""
return self._style.from_color(
self._style.color_class.from_simple(name, fg=self._fg))
def rgb(self, r, g=None, b=None):
"""Return the extended color scheme color for a value."""
if g is None and b is None:
return self.hex(r)
else:
return self._style.from_color(
self._style.color_class(r, g, b, fg=self._fg))
def hex(self, hexcode):
"""Return the extended color scheme color for a value."""
return self._style.from_color(
self._style.color_class.from_hex(hexcode, fg=self._fg))
def ansi(self, ansiseq):
"""Make a style from an ansi text sequence"""
return self._style.from_ansi(ansiseq)
def __getitem__(self, val):
"""\
Shortcut to provide way to access colors numerically or by slice.
If end <= 16, will stay to simple ANSI version."""
if isinstance(val, slice):
(start, stop, stride) = val.indices(256)
if stop <= 16:
return [self.simple(v) for v in range(start, stop, stride)]
else:
return [self.full(v) for v in range(start, stop, stride)]
elif isinstance(val, tuple):
return self.rgb(*val)
try:
return self.full(val)
except ColorNotFound:
return self.hex(val)
def __call__(self, val_or_r=None, g=None, b=None):
"""Shortcut to provide way to access colors."""
if val_or_r is None or (isinstance(val_or_r, str) and val_or_r == ''):
return self._style()
if isinstance(val_or_r, self._style):
return self._style(val_or_r)
if isinstance(val_or_r, str) and '\033' in val_or_r:
return self.ansi(val_or_r)
return self._style.from_color(
self._style.color_class(val_or_r, g, b, fg=self._fg))
def __iter__(self):
"""Iterates through all colors in extended colorset."""
return (self.full(i) for i in range(256))
def __invert__(self):
"""Allows clearing a color with ~"""
return self.reset
def __enter__(self):
"""This will reset the color on leaving the with statement."""
return self
def __exit__(self, type, value, traceback):
"""This resets a FG/BG color or all styles,
due to different definition of RESET for the
factories."""
self.reset.now()
return False
def __repr__(self):
"""Simple representation of the class by name."""
return "<{0}>".format(self.__class__.__name__)
class StyleFactory(ColorFactory):
"""Factory for styles. Holds font styles, FG and BG objects representing colors, and
imitates the FG ColorFactory to a large degree."""
def __init__(self, style):
super(StyleFactory, self).__init__(True, style)
self.fg = ColorFactory(True, style)
self.bg = ColorFactory(False, style)
self.do_nothing = style()
self.reset = style(reset=True)
for item in style.attribute_names:
setattr(self, item, style(attributes={item: True}))
self.load_stylesheet(default_styles)
@property
def use_color(self):
"""Shortcut for setting color usage on Style"""
return self._style.use_color
@use_color.setter
def use_color(self, val):
self._style.use_color = val
def from_ansi(self, ansi_sequence):
"""Calling this is a shortcut for creating a style from an ANSI sequence."""
return self._style.from_ansi(ansi_sequence)
@property
def stdout(self):
"""This is a shortcut for getting stdout from a class without an instance."""
return self._style._stdout if self._style._stdout is not None else sys.stdout
@stdout.setter
def stdout(self, newout):
self._style._stdout = newout
def get_colors_from_string(self, color=''):
"""
Sets color based on string, use `.` or space for separator,
and numbers, fg/bg, htmlcodes, etc all accepted (as strings).
"""
names = color.replace('.', ' ').split()
prev = self
styleslist = []
for name in names:
try:
prev = getattr(prev, name)
except AttributeError:
try:
prev = prev(int(name))
except (ColorNotFound, ValueError):
prev = prev(name)
if isinstance(prev, self._style):
styleslist.append(prev)
prev = self
if styleslist:
prev = reduce(lambda a, b: a & b, styleslist)
return prev if isinstance(prev, self._style) else prev.reset
def filter(self, colored_string):
"""Filters out colors in a string, returning only the name."""
if isinstance(colored_string, self._style):
return colored_string
return self._style.string_filter_ansi(colored_string)
def contains_colors(self, colored_string):
"""Checks to see if a string contains colors."""
return self._style.string_contains_colors(colored_string)
def extract(self, colored_string):
"""Gets colors from an ansi string, returns those colors"""
return self._style.from_ansi(colored_string, True)
def load_stylesheet(self, stylesheet=default_styles):
for item in stylesheet:
setattr(self, item, self.get_colors_from_string(stylesheet[item]))
|
import os
import shutil
import uuid
from queue import Empty
import tempfile
from time import monotonic
from . import virtual
from kombu.exceptions import ChannelError
from kombu.utils.encoding import bytes_to_str, str_to_bytes
from kombu.utils.json import loads, dumps
from kombu.utils.objects import cached_property
VERSION = (1, 0, 0)
__version__ = '.'.join(map(str, VERSION))
# needs win32all to work on Windows
if os.name == 'nt':
import win32con
import win32file
import pywintypes
LOCK_EX = win32con.LOCKFILE_EXCLUSIVE_LOCK
# 0 is the default
LOCK_SH = 0 # noqa
LOCK_NB = win32con.LOCKFILE_FAIL_IMMEDIATELY # noqa
__overlapped = pywintypes.OVERLAPPED()
def lock(file, flags):
"""Create file lock."""
hfile = win32file._get_osfhandle(file.fileno())
win32file.LockFileEx(hfile, flags, 0, 0xffff0000, __overlapped)
def unlock(file):
"""Remove file lock."""
hfile = win32file._get_osfhandle(file.fileno())
win32file.UnlockFileEx(hfile, 0, 0xffff0000, __overlapped)
elif os.name == 'posix':
import fcntl
from fcntl import LOCK_EX, LOCK_SH, LOCK_NB # noqa
def lock(file, flags): # noqa
"""Create file lock."""
fcntl.flock(file.fileno(), flags)
def unlock(file): # noqa
"""Remove file lock."""
fcntl.flock(file.fileno(), fcntl.LOCK_UN)
else:
raise RuntimeError(
'Filesystem plugin only defined for NT and POSIX platforms')
class Channel(virtual.Channel):
"""Filesystem Channel."""
def _put(self, queue, payload, **kwargs):
"""Put `message` onto `queue`."""
filename = '{}_{}.{}.msg'.format(int(round(monotonic() * 1000)),
uuid.uuid4(), queue)
filename = os.path.join(self.data_folder_out, filename)
try:
f = open(filename, 'wb')
lock(f, LOCK_EX)
f.write(str_to_bytes(dumps(payload)))
except OSError:
raise ChannelError(
f'Cannot add file {filename!r} to directory')
finally:
unlock(f)
f.close()
def _get(self, queue):
"""Get next message from `queue`."""
queue_find = '.' + queue + '.msg'
folder = os.listdir(self.data_folder_in)
folder = sorted(folder)
while len(folder) > 0:
filename = folder.pop(0)
# only handle message for the requested queue
if filename.find(queue_find) < 0:
continue
if self.store_processed:
processed_folder = self.processed_folder
else:
processed_folder = tempfile.gettempdir()
try:
# move the file to the tmp/processed folder
shutil.move(os.path.join(self.data_folder_in, filename),
processed_folder)
except OSError:
pass # file could be locked, or removed in meantime so ignore
filename = os.path.join(processed_folder, filename)
try:
f = open(filename, 'rb')
payload = f.read()
f.close()
if not self.store_processed:
os.remove(filename)
except OSError:
raise ChannelError(
f'Cannot read file {filename!r} from queue.')
return loads(bytes_to_str(payload))
raise Empty()
def _purge(self, queue):
"""Remove all messages from `queue`."""
count = 0
queue_find = '.' + queue + '.msg'
folder = os.listdir(self.data_folder_in)
while len(folder) > 0:
filename = folder.pop()
try:
# only purge messages for the requested queue
if filename.find(queue_find) < 0:
continue
filename = os.path.join(self.data_folder_in, filename)
os.remove(filename)
count += 1
except OSError:
# we simply ignore its existence, as it was probably
# processed by another worker
pass
return count
def _size(self, queue):
"""Return the number of messages in `queue` as an :class:`int`."""
count = 0
queue_find = f'.{queue}.msg'
folder = os.listdir(self.data_folder_in)
while len(folder) > 0:
filename = folder.pop()
# only handle message for the requested queue
if filename.find(queue_find) < 0:
continue
count += 1
return count
@property
def transport_options(self):
return self.connection.client.transport_options
@cached_property
def data_folder_in(self):
return self.transport_options.get('data_folder_in', 'data_in')
@cached_property
def data_folder_out(self):
return self.transport_options.get('data_folder_out', 'data_out')
@cached_property
def store_processed(self):
return self.transport_options.get('store_processed', False)
@cached_property
def processed_folder(self):
return self.transport_options.get('processed_folder', 'processed')
class Transport(virtual.Transport):
"""Filesystem Transport."""
Channel = Channel
default_port = 0
driver_type = 'filesystem'
driver_name = 'filesystem'
def driver_version(self):
return 'N/A'
|
import doctest
import io
import sys, os, tempfile, shutil
from stat import S_IWRITE
from os.path import join
from logilab.common.testlib import TestCase, unittest_main, unittest
from logilab.common.fileutils import *
DATA_DIR = join(os.path.abspath(os.path.dirname(__file__)), 'data')
NEWLINES_TXT = join(DATA_DIR, 'newlines.txt')
class FirstleveldirectoryTC(TestCase):
def test_known_values_first_level_directory(self):
"""return the first level directory of a path"""
self.assertEqual(first_level_directory('truc/bidule/chouette'), 'truc', None)
self.assertEqual(first_level_directory('/truc/bidule/chouette'), '/', None)
class IsBinaryTC(TestCase):
def test(self):
self.assertEqual(is_binary('toto.txt'), 0)
#self.assertEqual(is_binary('toto.xml'), 0)
self.assertEqual(is_binary('toto.bin'), 1)
self.assertEqual(is_binary('toto.sxi'), 1)
self.assertEqual(is_binary('toto.whatever'), 1)
class GetModeTC(TestCase):
def test(self):
self.assertEqual(write_open_mode('toto.txt'), 'w')
#self.assertEqual(write_open_mode('toto.xml'), 'w')
self.assertEqual(write_open_mode('toto.bin'), 'wb')
self.assertEqual(write_open_mode('toto.sxi'), 'wb')
class NormReadTC(TestCase):
def test_known_values_norm_read(self):
with io.open(NEWLINES_TXT) as f:
data = f.read()
self.assertEqual(data.strip(), '\n'.join(['# mixed new lines', '1', '2', '3']))
class LinesTC(TestCase):
def test_known_values_lines(self):
self.assertEqual(lines(NEWLINES_TXT),
['# mixed new lines', '1', '2', '3'])
def test_known_values_lines_comment(self):
self.assertEqual(lines(NEWLINES_TXT, comments='#'),
['1', '2', '3'])
class ExportTC(TestCase):
def setUp(self):
self.tempdir = tempfile.mktemp()
os.mkdir(self.tempdir)
def test(self):
export(DATA_DIR, self.tempdir, verbose=0)
self.assertTrue(exists(join(self.tempdir, '__init__.py')))
self.assertTrue(exists(join(self.tempdir, 'sub')))
self.assertTrue(not exists(join(self.tempdir, '__init__.pyc')))
self.assertTrue(not exists(join(self.tempdir, 'CVS')))
def tearDown(self):
shutil.rmtree(self.tempdir)
class ProtectedFileTC(TestCase):
def setUp(self):
self.rpath = join(DATA_DIR, 'write_protected_file.txt')
self.rwpath = join(DATA_DIR, 'normal_file.txt')
# Make sure rpath is not writable !
os.chmod(self.rpath, 33060)
# Make sure rwpath is writable !
os.chmod(self.rwpath, 33188)
def test_mode_change(self):
"""tests that mode is changed when needed"""
# test on non-writable file
#self.assertTrue(not os.access(self.rpath, os.W_OK))
self.assertTrue(not os.stat(self.rpath).st_mode & S_IWRITE)
wp_file = ProtectedFile(self.rpath, 'w')
self.assertTrue(os.stat(self.rpath).st_mode & S_IWRITE)
self.assertTrue(os.access(self.rpath, os.W_OK))
# test on writable-file
self.assertTrue(os.stat(self.rwpath).st_mode & S_IWRITE)
self.assertTrue(os.access(self.rwpath, os.W_OK))
wp_file = ProtectedFile(self.rwpath, 'w')
self.assertTrue(os.stat(self.rwpath).st_mode & S_IWRITE)
self.assertTrue(os.access(self.rwpath, os.W_OK))
def test_restore_on_close(self):
"""tests original mode is restored on close"""
# test on non-writable file
#self.assertTrue(not os.access(self.rpath, os.W_OK))
self.assertTrue(not os.stat(self.rpath).st_mode & S_IWRITE)
ProtectedFile(self.rpath, 'w').close()
#self.assertTrue(not os.access(self.rpath, os.W_OK))
self.assertTrue(not os.stat(self.rpath).st_mode & S_IWRITE)
# test on writable-file
self.assertTrue(os.access(self.rwpath, os.W_OK))
self.assertTrue(os.stat(self.rwpath).st_mode & S_IWRITE)
ProtectedFile(self.rwpath, 'w').close()
self.assertTrue(os.access(self.rwpath, os.W_OK))
self.assertTrue(os.stat(self.rwpath).st_mode & S_IWRITE)
def test_mode_change_on_append(self):
"""tests that mode is changed when file is opened in 'a' mode"""
#self.assertTrue(not os.access(self.rpath, os.W_OK))
self.assertTrue(not os.stat(self.rpath).st_mode & S_IWRITE)
wp_file = ProtectedFile(self.rpath, 'a')
self.assertTrue(os.access(self.rpath, os.W_OK))
self.assertTrue(os.stat(self.rpath).st_mode & S_IWRITE)
wp_file.close()
#self.assertTrue(not os.access(self.rpath, os.W_OK))
self.assertTrue(not os.stat(self.rpath).st_mode & S_IWRITE)
if sys.version_info < (3, 0):
def load_tests(loader, tests, ignore):
from logilab.common import fileutils
tests.addTests(doctest.DocTestSuite(fileutils))
return tests
if __name__ == '__main__':
unittest_main()
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.