text
stringlengths 213
32.3k
|
---|
from __future__ import print_function
import argparse
import os
import sys
def main(args):
p = argparse.ArgumentParser(description=__doc__)
p.add_argument("-c", "--no-create", action="store_true", help="do not create nonexistant files")
p.add_argument("file", action="store", nargs="+", help="one or more files to be touched")
ns = p.parse_args(args)
status = 0
for filename in ns.file:
try:
if not os.path.exists(filename) and not ns.no_create:
open(filename, "wb").close()
os.utime(filename, None)
except Exception as err:
print("touch: {}: {!s}".format(type(err).__name__, err), file=sys.stderr)
status = 1
sys.exit(status)
if __name__ == "__main__":
main(sys.argv[1:])
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import patch
from diamond.collector import Collector
from openvpn import OpenVPNCollector
##########################################################################
class TestOpenVPNCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('OpenVPNCollector', {
'interval': 10,
'method': None,
'instances': 'file://' + self.getFixturePath('status.log'),
})
self.collector = OpenVPNCollector(config, None)
def test_import(self):
self.assertTrue(OpenVPNCollector)
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
self.collector.collect()
metrics = {
'status.clients.a_example_org.bytes_rx': 109619579.000000,
'status.clients.a_example_org.bytes_tx': 935436488.000000,
'status.clients.b_example_org.bytes_rx': 25067295.000000,
'status.clients.b_example_org.bytes_tx': 10497532.000000,
'status.clients.c_example_org.bytes_rx': 21842093.000000,
'status.clients.c_example_org.bytes_tx': 20185134.000000,
'status.clients.d_example_org.bytes_rx': 4559242.000000,
'status.clients.d_example_org.bytes_tx': 11133831.000000,
'status.clients.e_example_org.bytes_rx': 13090090.000000,
'status.clients.e_example_org.bytes_tx': 13401853.000000,
'status.clients.connected': 5,
'status.global.max_bcast-mcast_queue_length': 14.000000,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
##########################################################################
if __name__ == "__main__":
unittest.main()
|
from typing import List, Optional
from pyinsteon.constants import ThermostatMode
from pyinsteon.operating_flag import CELSIUS
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
ATTR_TARGET_TEMP_HIGH,
ATTR_TARGET_TEMP_LOW,
CURRENT_HVAC_COOL,
CURRENT_HVAC_FAN,
CURRENT_HVAC_HEAT,
CURRENT_HVAC_IDLE,
DOMAIN as CLIMATE_DOMAIN,
HVAC_MODE_AUTO,
HVAC_MODE_COOL,
HVAC_MODE_FAN_ONLY,
HVAC_MODE_HEAT,
HVAC_MODE_HEAT_COOL,
HVAC_MODE_OFF,
SUPPORT_FAN_MODE,
SUPPORT_TARGET_HUMIDITY,
SUPPORT_TARGET_TEMPERATURE,
SUPPORT_TARGET_TEMPERATURE_RANGE,
)
from homeassistant.const import ATTR_TEMPERATURE, TEMP_CELSIUS, TEMP_FAHRENHEIT
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .const import SIGNAL_ADD_ENTITIES
from .insteon_entity import InsteonEntity
from .utils import async_add_insteon_entities
COOLING = 1
HEATING = 2
DEHUMIDIFYING = 3
HUMIDIFYING = 4
TEMPERATURE = 10
HUMIDITY = 11
SYSTEM_MODE = 12
FAN_MODE = 13
COOL_SET_POINT = 14
HEAT_SET_POINT = 15
HUMIDITY_HIGH = 16
HUMIDITY_LOW = 17
HVAC_MODES = {
0: HVAC_MODE_OFF,
1: HVAC_MODE_HEAT,
2: HVAC_MODE_COOL,
3: HVAC_MODE_HEAT_COOL,
}
FAN_MODES = {4: HVAC_MODE_AUTO, 8: HVAC_MODE_FAN_ONLY}
SUPPORTED_FEATURES = (
SUPPORT_FAN_MODE
| SUPPORT_TARGET_HUMIDITY
| SUPPORT_TARGET_TEMPERATURE
| SUPPORT_TARGET_TEMPERATURE_RANGE
)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Insteon climate entities from a config entry."""
def add_entities(discovery_info=None):
"""Add the Insteon entities for the platform."""
async_add_insteon_entities(
hass,
CLIMATE_DOMAIN,
InsteonClimateEntity,
async_add_entities,
discovery_info,
)
signal = f"{SIGNAL_ADD_ENTITIES}_{CLIMATE_DOMAIN}"
async_dispatcher_connect(hass, signal, add_entities)
add_entities()
class InsteonClimateEntity(InsteonEntity, ClimateEntity):
"""A Class for an Insteon climate entity."""
@property
def supported_features(self):
"""Return the supported features for this entity."""
return SUPPORTED_FEATURES
@property
def temperature_unit(self) -> str:
"""Return the unit of measurement."""
if self._insteon_device.properties[CELSIUS].value:
return TEMP_CELSIUS
return TEMP_FAHRENHEIT
@property
def current_humidity(self) -> Optional[int]:
"""Return the current humidity."""
return self._insteon_device.groups[HUMIDITY].value
@property
def hvac_mode(self) -> str:
"""Return hvac operation ie. heat, cool mode."""
return HVAC_MODES[self._insteon_device.groups[SYSTEM_MODE].value]
@property
def hvac_modes(self) -> List[str]:
"""Return the list of available hvac operation modes."""
return list(HVAC_MODES.values())
@property
def current_temperature(self) -> Optional[float]:
"""Return the current temperature."""
return self._insteon_device.groups[TEMPERATURE].value
@property
def target_temperature(self) -> Optional[float]:
"""Return the temperature we try to reach."""
if self._insteon_device.groups[SYSTEM_MODE].value == ThermostatMode.HEAT:
return self._insteon_device.groups[HEAT_SET_POINT].value
if self._insteon_device.groups[SYSTEM_MODE].value == ThermostatMode.COOL:
return self._insteon_device.groups[COOL_SET_POINT].value
return None
@property
def target_temperature_high(self) -> Optional[float]:
"""Return the highbound target temperature we try to reach."""
if self._insteon_device.groups[SYSTEM_MODE].value == ThermostatMode.AUTO:
return self._insteon_device.groups[COOL_SET_POINT].value
return None
@property
def target_temperature_low(self) -> Optional[float]:
"""Return the lowbound target temperature we try to reach."""
if self._insteon_device.groups[SYSTEM_MODE].value == ThermostatMode.AUTO:
return self._insteon_device.groups[HEAT_SET_POINT].value
return None
@property
def fan_mode(self) -> Optional[str]:
"""Return the fan setting."""
return FAN_MODES[self._insteon_device.groups[FAN_MODE].value]
@property
def fan_modes(self) -> Optional[List[str]]:
"""Return the list of available fan modes."""
return list(FAN_MODES.values())
@property
def target_humidity(self) -> Optional[int]:
"""Return the humidity we try to reach."""
high = self._insteon_device.groups[HUMIDITY_HIGH].value
low = self._insteon_device.groups[HUMIDITY_LOW].value
# May not be loaded yet so return a default if required
return (high + low) / 2 if high and low else None
@property
def min_humidity(self) -> int:
"""Return the minimum humidity."""
return 1
@property
def hvac_action(self) -> Optional[str]:
"""Return the current running hvac operation if supported.
Need to be one of CURRENT_HVAC_*.
"""
if self._insteon_device.groups[COOLING].value:
return CURRENT_HVAC_COOL
if self._insteon_device.groups[HEATING].value:
return CURRENT_HVAC_HEAT
if self._insteon_device.groups[FAN_MODE].value == ThermostatMode.FAN_ALWAYS_ON:
return CURRENT_HVAC_FAN
return CURRENT_HVAC_IDLE
@property
def device_state_attributes(self):
"""Provide attributes for display on device card."""
attr = super().device_state_attributes
humidifier = "off"
if self._insteon_device.groups[DEHUMIDIFYING].value:
humidifier = "dehumidifying"
if self._insteon_device.groups[HUMIDIFYING].value:
humidifier = "humidifying"
attr["humidifier"] = humidifier
return attr
async def async_set_temperature(self, **kwargs) -> None:
"""Set new target temperature."""
target_temp = kwargs.get(ATTR_TEMPERATURE)
target_temp_low = kwargs.get(ATTR_TARGET_TEMP_LOW)
target_temp_high = kwargs.get(ATTR_TARGET_TEMP_HIGH)
if target_temp is not None:
if self._insteon_device.groups[SYSTEM_MODE].value == ThermostatMode.HEAT:
await self._insteon_device.async_set_heat_set_point(target_temp)
elif self._insteon_device.groups[SYSTEM_MODE].value == ThermostatMode.COOL:
await self._insteon_device.async_set_cool_set_point(target_temp)
else:
await self._insteon_device.async_set_heat_set_point(target_temp_low)
await self._insteon_device.async_set_cool_set_point(target_temp_high)
async def async_set_fan_mode(self, fan_mode: str) -> None:
"""Set new target fan mode."""
mode = list(FAN_MODES)[list(FAN_MODES.values()).index(fan_mode)]
await self._insteon_device.async_set_mode(mode)
async def async_set_hvac_mode(self, hvac_mode: str) -> None:
"""Set new target hvac mode."""
mode = list(HVAC_MODES)[list(HVAC_MODES.values()).index(hvac_mode)]
await self._insteon_device.async_set_mode(mode)
async def async_set_humidity(self, humidity):
"""Set new humidity level."""
change = humidity - self.target_humidity
high = self._insteon_device.groups[HUMIDITY_HIGH].value + change
low = self._insteon_device.groups[HUMIDITY_LOW].value + change
await self._insteon_device.async_set_humidity_low_set_point(low)
await self._insteon_device.async_set_humidity_high_set_point(high)
async def async_added_to_hass(self):
"""Register INSTEON update events."""
await super().async_added_to_hass()
await self._insteon_device.async_read_op_flags()
for group in [
COOLING,
HEATING,
DEHUMIDIFYING,
HUMIDIFYING,
HEAT_SET_POINT,
FAN_MODE,
SYSTEM_MODE,
TEMPERATURE,
HUMIDITY,
HUMIDITY_HIGH,
HUMIDITY_LOW,
]:
self._insteon_device.groups[group].subscribe(self.async_entity_update)
|
import os
import numpy as np
from ..core import indexing
from ..core.utils import Frozen, FrozenDict, close_on_error
from ..core.variable import Variable
from .common import AbstractDataStore, BackendArray, BackendEntrypoint
from .locks import SerializableLock, ensure_lock
from .store import open_backend_dataset_store
# FIXME: Add a dedicated lock, even if ecCodes is supposed to be thread-safe
# in most circumstances. See:
# https://confluence.ecmwf.int/display/ECC/Frequently+Asked+Questions
ECCODES_LOCK = SerializableLock()
class CfGribArrayWrapper(BackendArray):
def __init__(self, datastore, array):
self.datastore = datastore
self.shape = array.shape
self.dtype = array.dtype
self.array = array
def __getitem__(self, key):
return indexing.explicit_indexing_adapter(
key, self.shape, indexing.IndexingSupport.OUTER, self._getitem
)
def _getitem(self, key):
with self.datastore.lock:
return self.array[key]
class CfGribDataStore(AbstractDataStore):
"""
Implements the ``xr.AbstractDataStore`` read-only API for a GRIB file.
"""
def __init__(self, filename, lock=None, **backend_kwargs):
import cfgrib
if lock is None:
lock = ECCODES_LOCK
self.lock = ensure_lock(lock)
self.ds = cfgrib.open_file(filename, **backend_kwargs)
def open_store_variable(self, name, var):
if isinstance(var.data, np.ndarray):
data = var.data
else:
wrapped_array = CfGribArrayWrapper(self, var.data)
data = indexing.LazilyOuterIndexedArray(wrapped_array)
encoding = self.ds.encoding.copy()
encoding["original_shape"] = var.data.shape
return Variable(var.dimensions, data, var.attributes, encoding)
def get_variables(self):
return FrozenDict(
(k, self.open_store_variable(k, v)) for k, v in self.ds.variables.items()
)
def get_attrs(self):
return Frozen(self.ds.attributes)
def get_dimensions(self):
return Frozen(self.ds.dimensions)
def get_encoding(self):
dims = self.get_dimensions()
encoding = {"unlimited_dims": {k for k, v in dims.items() if v is None}}
return encoding
def guess_can_open_cfgrib(store_spec):
try:
_, ext = os.path.splitext(store_spec)
except TypeError:
return False
return ext in {".grib", ".grib2", ".grb", ".grb2"}
def open_backend_dataset_cfgrib(
filename_or_obj,
*,
mask_and_scale=True,
decode_times=None,
concat_characters=None,
decode_coords=None,
drop_variables=None,
use_cftime=None,
decode_timedelta=None,
lock=None,
indexpath="{path}.{short_hash}.idx",
filter_by_keys={},
read_keys=[],
encode_cf=("parameter", "time", "geography", "vertical"),
squeeze=True,
time_dims=("time", "step"),
):
store = CfGribDataStore(
filename_or_obj,
indexpath=indexpath,
filter_by_keys=filter_by_keys,
read_keys=read_keys,
encode_cf=encode_cf,
squeeze=squeeze,
time_dims=time_dims,
lock=lock,
)
with close_on_error(store):
ds = open_backend_dataset_store(
store,
mask_and_scale=mask_and_scale,
decode_times=decode_times,
concat_characters=concat_characters,
decode_coords=decode_coords,
drop_variables=drop_variables,
use_cftime=use_cftime,
decode_timedelta=decode_timedelta,
)
return ds
cfgrib_backend = BackendEntrypoint(
open_dataset=open_backend_dataset_cfgrib, guess_can_open=guess_can_open_cfgrib
)
|
import logging
import pytest_bdd as bdd
bdd.scenarios('prompts.feature')
@bdd.when("I load an SSL page")
def load_ssl_page(quteproc, ssl_server):
# We don't wait here as we can get an SSL question.
quteproc.open_path('/', port=ssl_server.port, https=True, wait=False,
new_tab=True)
@bdd.when("I wait until the SSL page finished loading")
def wait_ssl_page_finished_loading(quteproc, ssl_server):
quteproc.wait_for_load_finished('/', port=ssl_server.port, https=True,
load_status='warn')
@bdd.when("I wait for a prompt")
def wait_for_prompt(quteproc):
quteproc.wait_for(message='Asking question *')
@bdd.then("no prompt should be shown")
def no_prompt_shown(quteproc):
quteproc.ensure_not_logged(message='Entering mode KeyMode.* (reason: '
'question asked)')
@bdd.then("a SSL error page should be shown")
def ssl_error_page(request, quteproc):
if request.config.webengine:
quteproc.wait_for(message="Certificate error: *")
msg = quteproc.wait_for(message="Load error: *")
msg.expected = True
assert msg.message == 'Load error: ERR_CERT_AUTHORITY_INVALID'
else:
line = quteproc.wait_for(message='Error while loading *: SSL handshake failed')
line.expected = True
quteproc.wait_for(message="Changing title for idx * to 'Error loading page: *'")
content = quteproc.get_content().strip()
assert "Unable to load page" in content
def test_certificate_error_load_status(request, quteproc, ssl_server):
"""If we load the same page twice, we should get a 'warn' status twice."""
quteproc.set_setting('content.ssl_strict', 'false')
for i in range(2):
quteproc.open_path('/', port=ssl_server.port, https=True, wait=False,
new_tab=True)
if i == 0 or not request.config.webengine:
# Error is only logged on the first error with QtWebEngine
quteproc.mark_expected(category='message',
loglevel=logging.ERROR,
message="Certificate error: *")
quteproc.wait_for_load_finished('/', port=ssl_server.port, https=True,
load_status='warn')
|
import pytest
from homeassistant.components.huawei_lte import sensor
from homeassistant.const import (
SIGNAL_STRENGTH_DECIBELS,
SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
)
@pytest.mark.parametrize(
("value", "expected"),
(
("-71 dBm", (-71, SIGNAL_STRENGTH_DECIBELS_MILLIWATT)),
("15dB", (15, SIGNAL_STRENGTH_DECIBELS)),
(">=-51dBm", (-51, SIGNAL_STRENGTH_DECIBELS_MILLIWATT)),
),
)
def test_format_default(value, expected):
"""Test that default formatter copes with expected values."""
assert sensor.format_default(value) == expected
|
import numpy as np
import unittest
from chainercv.utils import assert_is_bbox
from chainercv.utils import testing
@testing.parameterize(
{
'tl': np.random.uniform(-1, 1, size=(10, 2)).astype(np.float32),
'hw': np.random.uniform(0.1, 1, size=(10, 2)).astype(np.float32),
'valid': True},
{
'tl': np.random.uniform(0, 32, size=(10, 2)).astype(np.float32),
'hw': np.random.uniform(0.1, 16, size=(10, 2)).astype(np.float32),
'size': (48, 64),
'valid': True},
{
'bbox': np.zeros((0, 4), dtype=np.float32),
'size': (48, 64),
'valid': True},
{
'tl': np.random.uniform(20, 32, size=(10, 2)).astype(np.float32),
'hw': np.random.uniform(16, 32, size=(10, 2)).astype(np.float32),
'size': (16, 24),
'valid': False},
{
'tl': np.random.uniform(-1, 1, size=(10, 2)).astype(float),
'hw': np.random.uniform(0.1, 1, size=(10, 2)).astype(float),
'valid': False},
{
'tl': np.random.uniform(-1, 1, size=(10, 2)).astype(np.float32),
'hw': np.random.uniform(-1, 0, size=(10, 2)).astype(np.float32),
'valid': False},
{
'bbox': np.random.uniform(-1, 1, size=(10, 5)).astype(np.float32),
'valid': False},
{
'bbox': np.random.uniform(-1, 1, size=(10, 4, 1)).astype(np.float32),
'valid': False},
{
'bbox': ((0, 1, 2, 3), (1, 2, 3, 4)),
'valid': False},
{
'bbox': np.zeros((0, 3), dtype=np.float32),
'size': (48, 64),
'valid': False},
)
class TestAssertIsBbox(unittest.TestCase):
def setUp(self):
if not hasattr(self, 'bbox'):
self.bbox = np.hstack((self.tl, self.tl + self.hw))
if not hasattr(self, 'size'):
self.size = None
def test_assert_is_bbox(self):
if self.valid:
assert_is_bbox(self.bbox, self.size)
else:
with self.assertRaises(AssertionError):
assert_is_bbox(self.bbox, self.size)
testing.run_module(__name__, __file__)
|
from gi.repository import GObject, Gtk, GtkSource, Pango
from meld.conf import _
from meld.ui.bufferselectors import EncodingSelector, SourceLangSelector
class MeldStatusMenuButton(Gtk.MenuButton):
"""Compact menu button with arrow indicator for use in a status bar
Implementation based on gedit-status-menu-button.c
Copyright (C) 2008 - Jesse van den Kieboom
"""
__gtype_name__ = "MeldStatusMenuButton"
style = b"""
* {
padding: 1px 8px 2px 4px;
border: 0;
outline-width: 0;
}
"""
css_provider = Gtk.CssProvider()
css_provider.load_from_data(style)
def get_label(self):
return self._label.get_text()
def set_label(self, markup):
if markup == self._label.get_text():
return
self._label.set_markup(markup)
label = GObject.Property(
type=str,
default=None,
getter=get_label,
setter=set_label,
)
def __init__(self):
super().__init__()
style_context = self.get_style_context()
style_context.add_provider(
self.css_provider, Gtk.STYLE_PROVIDER_PRIORITY_APPLICATION)
style_context.add_class('flat')
# Ideally this would be a template child, but there's still no
# Python support for this.
label = Gtk.Label()
label.props.single_line_mode = True
label.props.halign = Gtk.Align.START
label.props.valign = Gtk.Align.BASELINE
label.props.xalign = 1.0
arrow = Gtk.Image.new_from_icon_name(
'pan-down-symbolic', Gtk.IconSize.SMALL_TOOLBAR)
arrow.props.valign = Gtk.Align.BASELINE
box = Gtk.Box()
box.set_spacing(3)
box.add(label)
box.add(arrow)
box.show_all()
self.remove(self.get_child())
self.add(box)
self._label = label
def set_label_width(self, width):
self._label.set_width_chars(width)
class MeldStatusBar(Gtk.Statusbar):
__gtype_name__ = "MeldStatusBar"
__gsignals__ = {
'start-go-to-line': (
GObject.SignalFlags.ACTION, None, tuple()),
'go-to-line': (
GObject.SignalFlags.RUN_FIRST, None, (int,)),
'encoding-changed': (
GObject.SignalFlags.RUN_FIRST, None, (GtkSource.Encoding,)),
}
cursor_position = GObject.Property(
type=object,
nick="The position of the cursor displayed in the status bar",
default=None,
)
source_encoding = GObject.Property(
type=GtkSource.Encoding,
nick="The file encoding displayed in the status bar",
default=GtkSource.Encoding.get_utf8(),
)
source_language = GObject.Property(
type=GtkSource.Language,
nick="The GtkSourceLanguage displayed in the status bar",
default=None,
)
# Abbreviation for line, column so that it will fit in the status bar
_line_column_text = _("Ln %i, Col %i")
def __init__(self):
super().__init__()
self.props.margin = 0
self.props.spacing = 6
# On OSX, make the font slightly smaller for status bar
try:
from AppKit import NSFont
from gi.repository import Pango
system_font = NSFont.systemFontOfSize_(9)
self.modify_font(Pango.FontDescription(system_font.displayName() + " 9"))
except:
pass
hbox = self.get_message_area()
label = hbox.get_children()[0]
hbox.props.spacing = 6
label.props.ellipsize = Pango.EllipsizeMode.NONE
hbox.remove(label)
hbox.pack_end(label, False, True, 0)
def do_realize(self):
Gtk.Statusbar.do_realize(self)
self.box_box = Gtk.HBox(homogeneous=False, spacing=6)
self.pack_end(self.box_box, False, True, 0)
self.box_box.pack_end(
self.construct_line_display(), False, True, 0)
self.box_box.pack_end(
self.construct_highlighting_selector(), False, True, 0)
self.box_box.pack_end(
self.construct_encoding_selector(), False, True, 0)
self.box_box.pack_end(
self.construct_display_popover(), False, True, 0)
self.box_box.show_all()
def construct_line_display(self):
# Note that we're receiving one-based line numbers from the
# user and storing and emitting zero-base line numbers.
def go_to_line_text(text):
try:
line = int(text)
except ValueError:
return
self.emit('go-to-line', max(0, line - 1))
def line_entry_mapped(entry):
line, offset = self.props.cursor_position
entry.set_text(str(line + 1))
# This handler causes a failed assertion due to the `position`
# out param (see pygobject#12), but we don't need it here.
def line_entry_insert_text(entry, new_text, length, position):
if not new_text.isdigit():
GObject.signal_stop_emission_by_name(entry, 'insert-text')
return
def line_entry_changed(entry):
go_to_line_text(entry.get_text())
def line_entry_activated(entry):
go_to_line_text(entry.get_text())
pop.popdown()
entry = Gtk.Entry()
entry.set_tooltip_text(_('Line you want to move the cursor to'))
entry.set_icon_from_icon_name(
Gtk.EntryIconPosition.PRIMARY, 'go-jump-symbolic')
entry.set_icon_activatable(Gtk.EntryIconPosition.PRIMARY, False)
entry.set_input_purpose(Gtk.InputPurpose.DIGITS)
entry.connect('map', line_entry_mapped)
entry.connect('insert-text', line_entry_insert_text)
entry.connect('changed', line_entry_changed)
entry.connect('activate', line_entry_activated)
selector = Gtk.Grid()
selector.set_border_width(6)
selector.add(entry)
selector.show_all()
pop = Gtk.Popover()
pop.set_position(Gtk.PositionType.TOP)
pop.add(selector)
def format_cursor_position(binding, cursor):
line, offset = cursor
return self._line_column_text % (line + 1, offset + 1)
button = MeldStatusMenuButton()
self.bind_property(
'cursor_position', button, 'label', GObject.BindingFlags.DEFAULT,
format_cursor_position)
self.connect('start-go-to-line', lambda *args: button.clicked())
button.set_popover(pop)
# Set a label width to avoid other widgets moving on cursor change
reasonable_width = len(format_cursor_position(None, (1000, 100))) - 2
button.set_label_width(reasonable_width)
button.show()
return button
def construct_encoding_selector(self):
def change_encoding(selector, encoding):
self.emit('encoding-changed', encoding)
pop.hide()
def set_initial_encoding(selector):
selector.select_value(self.props.source_encoding)
selector = EncodingSelector()
selector.connect('encoding-selected', change_encoding)
selector.connect('map', set_initial_encoding)
pop = Gtk.Popover()
pop.set_position(Gtk.PositionType.TOP)
pop.add(selector)
button = MeldStatusMenuButton()
self.bind_property(
'source-encoding', button, 'label',
GObject.BindingFlags.DEFAULT | GObject.BindingFlags.SYNC_CREATE,
lambda binding, enc: selector.get_value_label(enc))
button.set_popover(pop)
button.show()
return button
def construct_highlighting_selector(self):
def change_language(selector, lang):
# TODO: Our other GObject properties are expected to be
# updated through a bound state from our parent. This is
# the only place where we assign to them instead of
# emitting a signal, and it makes the class logic as a
# whole kind of confusing.
self.props.source_language = lang
pop.hide()
def set_initial_language(selector):
selector.select_value(self.props.source_language)
selector = SourceLangSelector()
selector.connect('language-selected', change_language)
selector.connect('map', set_initial_language)
pop = Gtk.Popover()
pop.set_position(Gtk.PositionType.TOP)
pop.add(selector)
button = MeldStatusMenuButton()
self.bind_property(
'source-language', button, 'label',
GObject.BindingFlags.DEFAULT | GObject.BindingFlags.SYNC_CREATE,
lambda binding, enc: selector.get_value_label(enc))
button.set_popover(pop)
button.show()
return button
def construct_display_popover(self):
builder = Gtk.Builder.new_from_resource(
'/org/gnome/meld/ui/statusbar-menu.ui')
menu = builder.get_object('statusbar-menu')
pop = Gtk.Popover()
pop.bind_model(menu, 'view-local')
pop.set_position(Gtk.PositionType.TOP)
button = MeldStatusMenuButton()
# TRANSLATORS: This is the status bar label for a group of settings,
# such as text wrapping, show line numbers, whitespace, etc.
button.set_label(_('Display'))
button.set_popover(pop)
button.show()
return button
|
import voluptuous as vol
from homeassistant.components.device_automation import TRIGGER_BASE_SCHEMA
from homeassistant.components.device_automation.exceptions import (
InvalidDeviceAutomationConfig,
)
from homeassistant.components.homeassistant.triggers import event as event_trigger
from homeassistant.const import CONF_DEVICE_ID, CONF_DOMAIN, CONF_PLATFORM, CONF_TYPE
from . import DOMAIN
from .core.helpers import async_get_zha_device
CONF_SUBTYPE = "subtype"
DEVICE = "device"
DEVICE_IEEE = "device_ieee"
ZHA_EVENT = "zha_event"
TRIGGER_SCHEMA = TRIGGER_BASE_SCHEMA.extend(
{vol.Required(CONF_TYPE): str, vol.Required(CONF_SUBTYPE): str}
)
async def async_validate_trigger_config(hass, config):
"""Validate config."""
config = TRIGGER_SCHEMA(config)
if "zha" in hass.config.components:
trigger = (config[CONF_TYPE], config[CONF_SUBTYPE])
try:
zha_device = await async_get_zha_device(hass, config[CONF_DEVICE_ID])
except (KeyError, AttributeError) as err:
raise InvalidDeviceAutomationConfig from err
if (
zha_device.device_automation_triggers is None
or trigger not in zha_device.device_automation_triggers
):
raise InvalidDeviceAutomationConfig
return config
async def async_attach_trigger(hass, config, action, automation_info):
"""Listen for state changes based on configuration."""
trigger = (config[CONF_TYPE], config[CONF_SUBTYPE])
try:
zha_device = await async_get_zha_device(hass, config[CONF_DEVICE_ID])
except (KeyError, AttributeError):
return None
if trigger not in zha_device.device_automation_triggers:
return None
trigger = zha_device.device_automation_triggers[trigger]
event_config = {
event_trigger.CONF_PLATFORM: "event",
event_trigger.CONF_EVENT_TYPE: ZHA_EVENT,
event_trigger.CONF_EVENT_DATA: {DEVICE_IEEE: str(zha_device.ieee), **trigger},
}
event_config = event_trigger.TRIGGER_SCHEMA(event_config)
return await event_trigger.async_attach_trigger(
hass, event_config, action, automation_info, platform_type="device"
)
async def async_get_triggers(hass, device_id):
"""List device triggers.
Make sure the device supports device automations and
if it does return the trigger list.
"""
zha_device = await async_get_zha_device(hass, device_id)
if not zha_device.device_automation_triggers:
return
triggers = []
for trigger, subtype in zha_device.device_automation_triggers.keys():
triggers.append(
{
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_PLATFORM: DEVICE,
CONF_TYPE: trigger,
CONF_SUBTYPE: subtype,
}
)
return triggers
|
import asyncio
from datetime import timedelta
import logging
from aio_georss_gdacs import GdacsFeedManager
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.const import (
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_RADIUS,
CONF_SCAN_INTERVAL,
CONF_UNIT_SYSTEM_IMPERIAL,
LENGTH_MILES,
)
from homeassistant.core import callback
from homeassistant.helpers import aiohttp_client, config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.event import async_track_time_interval
from homeassistant.util.unit_system import METRIC_SYSTEM
from .const import (
CONF_CATEGORIES,
DEFAULT_RADIUS,
DEFAULT_SCAN_INTERVAL,
DOMAIN,
FEED,
PLATFORMS,
VALID_CATEGORIES,
)
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Inclusive(CONF_LATITUDE, "coordinates"): cv.latitude,
vol.Inclusive(CONF_LONGITUDE, "coordinates"): cv.longitude,
vol.Optional(CONF_RADIUS, default=DEFAULT_RADIUS): vol.Coerce(float),
vol.Optional(
CONF_SCAN_INTERVAL, default=DEFAULT_SCAN_INTERVAL
): cv.time_period,
vol.Optional(CONF_CATEGORIES, default=[]): vol.All(
cv.ensure_list, [vol.In(VALID_CATEGORIES)]
),
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Set up the GDACS component."""
if DOMAIN not in config:
return True
conf = config[DOMAIN]
latitude = conf.get(CONF_LATITUDE, hass.config.latitude)
longitude = conf.get(CONF_LONGITUDE, hass.config.longitude)
scan_interval = conf[CONF_SCAN_INTERVAL]
categories = conf[CONF_CATEGORIES]
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={
CONF_LATITUDE: latitude,
CONF_LONGITUDE: longitude,
CONF_RADIUS: conf[CONF_RADIUS],
CONF_SCAN_INTERVAL: scan_interval,
CONF_CATEGORIES: categories,
},
)
)
return True
async def async_setup_entry(hass, config_entry):
"""Set up the GDACS component as config entry."""
hass.data.setdefault(DOMAIN, {})
feeds = hass.data[DOMAIN].setdefault(FEED, {})
radius = config_entry.data[CONF_RADIUS]
if hass.config.units.name == CONF_UNIT_SYSTEM_IMPERIAL:
radius = METRIC_SYSTEM.length(radius, LENGTH_MILES)
# Create feed entity manager for all platforms.
manager = GdacsFeedEntityManager(hass, config_entry, radius)
feeds[config_entry.entry_id] = manager
_LOGGER.debug("Feed entity manager added for %s", config_entry.entry_id)
await manager.async_init()
return True
async def async_unload_entry(hass, config_entry):
"""Unload an GDACS component config entry."""
manager = hass.data[DOMAIN][FEED].pop(config_entry.entry_id)
await manager.async_stop()
await asyncio.wait(
[
hass.config_entries.async_forward_entry_unload(config_entry, domain)
for domain in PLATFORMS
]
)
return True
class GdacsFeedEntityManager:
"""Feed Entity Manager for GDACS feed."""
def __init__(self, hass, config_entry, radius_in_km):
"""Initialize the Feed Entity Manager."""
self._hass = hass
self._config_entry = config_entry
coordinates = (
config_entry.data[CONF_LATITUDE],
config_entry.data[CONF_LONGITUDE],
)
categories = config_entry.data[CONF_CATEGORIES]
websession = aiohttp_client.async_get_clientsession(hass)
self._feed_manager = GdacsFeedManager(
websession,
self._generate_entity,
self._update_entity,
self._remove_entity,
coordinates,
filter_radius=radius_in_km,
filter_categories=categories,
status_async_callback=self._status_update,
)
self._config_entry_id = config_entry.entry_id
self._scan_interval = timedelta(seconds=config_entry.data[CONF_SCAN_INTERVAL])
self._track_time_remove_callback = None
self._status_info = None
self.listeners = []
async def async_init(self):
"""Schedule initial and regular updates based on configured time interval."""
for domain in PLATFORMS:
self._hass.async_create_task(
self._hass.config_entries.async_forward_entry_setup(
self._config_entry, domain
)
)
async def update(event_time):
"""Update."""
await self.async_update()
# Trigger updates at regular intervals.
self._track_time_remove_callback = async_track_time_interval(
self._hass, update, self._scan_interval
)
_LOGGER.debug("Feed entity manager initialized")
async def async_update(self):
"""Refresh data."""
await self._feed_manager.update()
_LOGGER.debug("Feed entity manager updated")
async def async_stop(self):
"""Stop this feed entity manager from refreshing."""
for unsub_dispatcher in self.listeners:
unsub_dispatcher()
self.listeners = []
if self._track_time_remove_callback:
self._track_time_remove_callback()
_LOGGER.debug("Feed entity manager stopped")
@callback
def async_event_new_entity(self):
"""Return manager specific event to signal new entity."""
return f"gdacs_new_geolocation_{self._config_entry_id}"
def get_entry(self, external_id):
"""Get feed entry by external id."""
return self._feed_manager.feed_entries.get(external_id)
def status_info(self):
"""Return latest status update info received."""
return self._status_info
async def _generate_entity(self, external_id):
"""Generate new entity."""
async_dispatcher_send(
self._hass,
self.async_event_new_entity(),
self,
self._config_entry.unique_id,
external_id,
)
async def _update_entity(self, external_id):
"""Update entity."""
async_dispatcher_send(self._hass, f"gdacs_update_{external_id}")
async def _remove_entity(self, external_id):
"""Remove entity."""
async_dispatcher_send(self._hass, f"gdacs_delete_{external_id}")
async def _status_update(self, status_info):
"""Propagate status update."""
_LOGGER.debug("Status update received: %s", status_info)
self._status_info = status_info
async_dispatcher_send(self._hass, f"gdacs_status_{self._config_entry_id}")
|
import pytest
from unittest.mock import MagicMock
from yandextank.plugins.ShellExec import Plugin
def test_plugin_execute():
plugin = Plugin(MagicMock(), {}, 'shellexec')
assert plugin.execute('echo foo') == 0
def test_plugin_execute_raises():
plugin = Plugin(MagicMock(), {}, 'shellexec')
with pytest.raises(RuntimeError) as error:
plugin.execute('echo "foo')
assert 'Subprocess returned 2' in error.message
|
from homeassistant.components.gios.const import DOMAIN
from homeassistant.config_entries import (
ENTRY_STATE_LOADED,
ENTRY_STATE_NOT_LOADED,
ENTRY_STATE_SETUP_RETRY,
)
from homeassistant.const import STATE_UNAVAILABLE
from tests.async_mock import patch
from tests.common import MockConfigEntry
from tests.components.gios import init_integration
async def test_async_setup_entry(hass):
"""Test a successful setup entry."""
await init_integration(hass)
state = hass.states.get("air_quality.home")
assert state is not None
assert state.state != STATE_UNAVAILABLE
assert state.state == "4"
async def test_config_not_ready(hass):
"""Test for setup failure if connection to GIOS is missing."""
entry = MockConfigEntry(
domain=DOMAIN,
title="Home",
unique_id=123,
data={"station_id": 123, "name": "Home"},
)
with patch(
"homeassistant.components.gios.Gios._get_stations",
side_effect=ConnectionError(),
):
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
assert entry.state == ENTRY_STATE_SETUP_RETRY
async def test_unload_entry(hass):
"""Test successful unload of entry."""
entry = await init_integration(hass)
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
assert entry.state == ENTRY_STATE_LOADED
assert await hass.config_entries.async_unload(entry.entry_id)
await hass.async_block_till_done()
assert entry.state == ENTRY_STATE_NOT_LOADED
assert not hass.data.get(DOMAIN)
|
import os
from unittest import mock
import pytest
import lxml.html
from nikola import metadata_extractors
from nikola.plugins.task.sitemap import get_base_path as sitemap_get_base_path
from nikola.post import get_meta
from nikola.utils import (
TemplateHookRegistry,
TranslatableSetting,
demote_headers,
get_asset_path,
get_crumbs,
get_theme_chain,
get_translation_candidate,
write_metadata,
)
def test_getting_metadata_from_content(post):
post.source_path = "file_with_metadata"
post.metadata_path = "file_with_metadata.meta"
file_content = """\
.. title: Nikola needs more tests!
.. slug: write-tests-now
.. date: 2012/09/15 19:52:05
.. tags:
.. link:
.. description:
Post content
"""
opener_mock = mock.mock_open(read_data=file_content)
with mock.patch("nikola.post.io.open", opener_mock, create=True):
meta = get_meta(post, None)[0]
assert "Nikola needs more tests!" == meta["title"]
assert "write-tests-now" == meta["slug"]
assert "2012/09/15 19:52:05" == meta["date"]
assert "tags" not in meta
assert "link" not in meta
assert "description" not in meta
def test_get_title_from_fname(post):
post.source_path = "file_with_metadata"
post.metadata_path = "file_with_metadata.meta"
file_content = """\
.. slug: write-tests-now
.. date: 2012/09/15 19:52:05
.. tags:
.. link:
.. description:
"""
opener_mock = mock.mock_open(read_data=file_content)
with mock.patch("nikola.post.io.open", opener_mock, create=True):
meta = get_meta(post, None)[0]
assert "file_with_metadata" == meta["title"]
assert "write-tests-now" == meta["slug"]
assert "2012/09/15 19:52:05" == meta["date"]
assert "tags" not in meta
assert "link" not in meta
assert "description" not in meta
def test_use_filename_as_slug_fallback(post):
post.source_path = "Slugify this"
post.metadata_path = "Slugify this.meta"
file_content = """\
.. title: Nikola needs more tests!
.. date: 2012/09/15 19:52:05
.. tags:
.. link:
.. description:
Post content
"""
opener_mock = mock.mock_open(read_data=file_content)
with mock.patch("nikola.post.io.open", opener_mock, create=True):
meta = get_meta(post, None)[0]
assert "Nikola needs more tests!" == meta["title"]
assert "slugify-this" == meta["slug"]
assert "2012/09/15 19:52:05" == meta["date"]
assert "tags" not in meta
assert "link" not in meta
assert "description" not in meta
@pytest.mark.parametrize(
"unslugify, expected_title", [(True, "Dub dub title"), (False, "dub_dub_title")]
)
def test_extracting_metadata_from_filename(post, unslugify, expected_title):
post.source_path = "2013-01-23-the_slug-dub_dub_title.md"
post.metadata_path = "2013-01-23-the_slug-dub_dub_title.meta"
post.config[
"FILE_METADATA_REGEXP"
] = r"(?P<date>\d{4}-\d{2}-\d{2})-(?P<slug>.*)-(?P<title>.*)\.md"
post.config["FILE_METADATA_UNSLUGIFY_TITLES"] = unslugify
no_metadata_opener = mock.mock_open(read_data="No metadata in the file!")
with mock.patch("nikola.post.io.open", no_metadata_opener, create=True):
meta = get_meta(post, None)[0]
assert expected_title == meta["title"]
assert "the_slug" == meta["slug"]
assert "2013-01-23" == meta["date"]
def test_get_meta_slug_only_from_filename(post):
post.source_path = "some/path/the_slug.md"
post.metadata_path = "some/path/the_slug.meta"
no_metadata_opener = mock.mock_open(read_data="No metadata in the file!")
with mock.patch("nikola.post.io.open", no_metadata_opener, create=True):
meta = get_meta(post, None)[0]
assert "the_slug" == meta["slug"]
@pytest.mark.parametrize(
"level, input_str, expected_output",
[
pytest.param(
0,
"""
<h1>header 1</h1>
<h2>header 2</h2>
<h3>header 3</h3>
<h4>header 4</h4>
<h5>header 5</h5>
<h6>header 6</h6>
""",
"""
<h1>header 1</h1>
<h2>header 2</h2>
<h3>header 3</h3>
<h4>header 4</h4>
<h5>header 5</h5>
<h6>header 6</h6>
""",
id="by zero",
),
pytest.param(
1,
"""
<h1>header 1</h1>
<h2>header 2</h2>
<h3>header 3</h3>
<h4>header 4</h4>
<h5>header 5</h5>
<h6>header 6</h6>
""",
"""
<h2>header 1</h2>
<h3>header 2</h3>
<h4>header 3</h4>
<h5>header 4</h5>
<h6>header 5</h6>
<h6>header 6</h6>
""",
id="by one",
),
pytest.param(
2,
"""
<h1>header 1</h1>
<h2>header 2</h2>
<h3>header 3</h3>
<h4>header 4</h4>
<h5>header 5</h5>
<h6>header 6</h6>
""",
"""
<h3>header 1</h3>
<h4>header 2</h4>
<h5>header 3</h5>
<h6>header 4</h6>
<h6>header 5</h6>
<h6>header 6</h6>
""",
id="by two",
),
pytest.param(
-1,
"""
<h1>header 1</h1>
<h2>header 2</h2>
<h3>header 3</h3>
<h4>header 4</h4>
<h5>header 5</h5>
<h6>header 6</h6>
""",
"""
<h1>header 1</h1>
<h1>header 2</h1>
<h2>header 3</h2>
<h3>header 4</h3>
<h4>header 5</h4>
<h5>header 6</h5>
""",
id="by minus one",
),
],
)
def test_demoting_headers(level, input_str, expected_output):
doc = lxml.html.fromstring(input_str)
outdoc = lxml.html.fromstring(expected_output)
demote_headers(doc, level)
assert lxml.html.tostring(outdoc) == lxml.html.tostring(doc)
def test_TranslatableSettingsTest_with_string_input():
"""Test ing translatable settings with string input."""
inp = "Fancy Blog"
setting = TranslatableSetting("TestSetting", inp, {"xx": ""})
setting.default_lang = "xx"
setting.lang = "xx"
assert inp == str(setting)
assert inp == setting() # no language specified
assert inp == setting("xx") # real language specified
assert inp == setting("zz") # fake language specified
assert setting.lang == "xx"
assert setting.default_lang == "xx"
def test_TranslatableSetting_with_dict_input():
"""Tests for translatable setting with dict input."""
inp = {"xx": "Fancy Blog", "zz": "Schmancy Blog"}
setting = TranslatableSetting("TestSetting", inp, {"xx": "", "zz": ""})
setting.default_lang = "xx"
setting.lang = "xx"
assert inp["xx"] == str(setting)
assert inp["xx"] == setting() # no language specified
assert inp["xx"] == setting("xx") # real language specified
assert inp["zz"] == setting("zz") # fake language specified
assert inp["xx"] == setting("ff")
def test_TranslatableSetting_with_language_change():
"""Test translatable setting with language change along the way."""
inp = {"xx": "Fancy Blog", "zz": "Schmancy Blog"}
setting = TranslatableSetting("TestSetting", inp, {"xx": "", "zz": ""})
setting.default_lang = "xx"
setting.lang = "xx"
assert inp["xx"] == str(setting)
assert inp["xx"] == setting()
# Change the language.
# WARNING: DO NOT set lang locally in real code! Set it globally
# instead! (TranslatableSetting.lang = ...)
# WARNING: TranslatableSetting.lang is used to override the current
# locale settings returned by LocaleBorg! Use with care!
setting.lang = "zz"
assert inp["zz"] == str(setting)
assert inp["zz"] == setting()
@pytest.mark.parametrize(
"path, files_folders, expected_path_end",
[
(
"assets/css/nikola_rst.css",
{"files": ""}, # default files_folders
"nikola/data/themes/base/assets/css/nikola_rst.css",
),
(
"assets/css/theme.css",
{"files": ""}, # default files_folders
"nikola/data/themes/bootstrap4/assets/css/theme.css",
),
("nikola.py", {"nikola": ""}, "nikola/nikola.py"),
("nikola/nikola.py", {"nikola": "nikola"}, "nikola/nikola.py"),
("nikola.py", {"nikola": "nikola"}, None),
],
)
def test_get_asset_path(path, files_folders, expected_path_end):
theme_chain = get_theme_chain("bootstrap4", ["themes"])
asset_path = get_asset_path(path, theme_chain, files_folders)
if expected_path_end:
asset_path = asset_path.replace("\\", "/")
assert asset_path.endswith(expected_path_end)
else:
assert asset_path is None
@pytest.mark.parametrize(
"path, is_file, expected_crumbs",
[
("galleries", False, [["#", "galleries"]]),
(
os.path.join("galleries", "demo"),
False,
[["..", "galleries"], ["#", "demo"]],
),
(
os.path.join("listings", "foo", "bar"),
True,
[["..", "listings"], [".", "foo"], ["#", "bar"]],
),
],
)
def test_get_crumbs(path, is_file, expected_crumbs):
crumbs = get_crumbs(path, is_file=is_file)
assert len(crumbs) == len(expected_crumbs)
for crumb, expected_crumb in zip(crumbs, expected_crumbs):
assert crumb == expected_crumb
@pytest.mark.parametrize(
"pattern, path, lang, expected_path",
[
("{path}.{lang}.{ext}", "*.rst", "es", "*.es.rst"),
("{path}.{lang}.{ext}", "fancy.post.rst", "es", "fancy.post.es.rst"),
("{path}.{lang}.{ext}", "*.es.rst", "es", "*.es.rst"),
("{path}.{lang}.{ext}", "*.es.rst", "en", "*.rst"),
(
"{path}.{lang}.{ext}",
"cache/posts/fancy.post.es.html",
"en",
"cache/posts/fancy.post.html",
),
(
"{path}.{lang}.{ext}",
"cache/posts/fancy.post.html",
"es",
"cache/posts/fancy.post.es.html",
),
(
"{path}.{lang}.{ext}",
"cache/pages/charts.html",
"es",
"cache/pages/charts.es.html",
),
(
"{path}.{lang}.{ext}",
"cache/pages/charts.html",
"en",
"cache/pages/charts.html",
),
("{path}.{ext}.{lang}", "*.rst", "es", "*.rst.es"),
("{path}.{ext}.{lang}", "*.rst.es", "es", "*.rst.es"),
("{path}.{ext}.{lang}", "*.rst.es", "en", "*.rst"),
(
"{path}.{ext}.{lang}",
"cache/posts/fancy.post.html.es",
"en",
"cache/posts/fancy.post.html",
),
(
"{path}.{ext}.{lang}",
"cache/posts/fancy.post.html",
"es",
"cache/posts/fancy.post.html.es",
),
],
)
def test_get_translation_candidate(pattern, path, lang, expected_path):
config = {
"TRANSLATIONS_PATTERN": pattern,
"DEFAULT_LANG": "en",
"TRANSLATIONS": {"es": "1", "en": 1},
}
assert get_translation_candidate(config, path, lang) == expected_path
def test_TemplateHookRegistry():
r = TemplateHookRegistry("foo", None)
r.append("Hello!")
r.append(lambda x: "Hello " + x + "!", False, "world")
assert r() == "Hello!\nHello world!"
@pytest.mark.parametrize(
"base, expected_path",
[
("http://some.site", "/"),
("http://some.site/", "/"),
("http://some.site/some/sub-path", "/some/sub-path/"),
("http://some.site/some/sub-path/", "/some/sub-path/"),
],
)
def test_sitemap_get_base_path(base, expected_path):
assert expected_path == sitemap_get_base_path(base)
@pytest.mark.parametrize(
"metadata_format, expected_result",
[
(
"nikola",
"""\
.. title: Hello, world!
.. slug: hello-world
.. a: 1
.. b: 2
""",
),
(
"yaml",
"""\
---
a: '1'
b: '2'
slug: hello-world
title: Hello, world!
---
""",
),
],
)
def test_write_metadata_with_formats(metadata_format, expected_result):
"""
Test writing metadata with different formats.
YAML is expected to be sorted alphabetically.
Nikola sorts by putting the defaults first and then sorting the rest
alphabetically.
"""
data = {"slug": "hello-world", "title": "Hello, world!", "b": "2", "a": "1"}
assert write_metadata(data, metadata_format) == expected_result
def test_write_metadata_with_format_toml():
"""
Test writing metadata in TOML format.
TOML is sorted randomly in Python 3.5 or older and by insertion
order since Python 3.6.
"""
data = {"slug": "hello-world", "title": "Hello, world!", "b": "2", "a": "1"}
toml = write_metadata(data, "toml")
assert toml.startswith("+++\n")
assert toml.endswith("+++\n")
assert 'slug = "hello-world"' in toml
assert 'title = "Hello, world!"' in toml
assert 'b = "2"' in toml
assert 'a = "1"' in toml
@pytest.mark.parametrize(
"wrap, expected_result",
[
(
False,
"""\
.. title: Hello, world!
.. slug: hello-world
""",
),
(
True,
"""\
<!--
.. title: Hello, world!
.. slug: hello-world
-->
""",
),
(
("111", "222"),
"""\
111
.. title: Hello, world!
.. slug: hello-world
222
""",
),
],
)
def test_write_metadata_comment_wrap(wrap, expected_result):
data = {"title": "Hello, world!", "slug": "hello-world"}
assert write_metadata(data, "nikola", wrap) == expected_result
@pytest.mark.parametrize(
"metadata_format, expected_results",
[
(
"rest_docinfo",
[
"""=============
Hello, world!
=============
:slug: hello-world
"""
],
),
(
"markdown_meta",
[
"""title: Hello, world!
slug: hello-world
""",
"""slug: hello-world
title: Hello, world!
""",
],
),
],
)
def test_write_metadata_compiler(metadata_format, expected_results):
"""
Test writing metadata with different formats.
We test for multiple results because some compilers might produce
unordered output.
"""
data = {"title": "Hello, world!", "slug": "hello-world"}
assert write_metadata(data, metadata_format) in expected_results
@pytest.mark.parametrize(
"post_format, expected_metadata",
[
("rest", "==\nxx\n==\n\n"),
("markdown", "title: xx\n\n"),
("html", ".. title: xx\n\n"),
],
)
def test_write_metadata_pelican_detection(post, post_format, expected_metadata):
post.name = post_format
data = {"title": "xx"}
assert write_metadata(data, "pelican", compiler=post) == expected_metadata
def test_write_metadata_pelican_detection_default():
data = {"title": "xx"}
assert write_metadata(data, "pelican", compiler=None) == ".. title: xx\n\n"
def test_write_metadata_from_site(post):
post.config = {"METADATA_FORMAT": "yaml"}
data = {"title": "xx"}
assert write_metadata(data, site=post) == "---\ntitle: xx\n---\n"
def test_write_metadata_default(post):
data = {"title": "xx"}
assert write_metadata(data) == ".. title: xx\n\n"
@pytest.mark.parametrize("arg", ["foo", "filename_regex"])
def test_write_metadata_fallbacks(post, arg):
data = {"title": "xx"}
assert write_metadata(data, arg) == ".. title: xx\n\n"
@pytest.fixture
def post():
return FakePost()
class FakePost:
default_lang = "en"
metadata_extractors_by = metadata_extractors.default_metadata_extractors_by()
config = {
"TRANSLATIONS_PATTERN": "{path}.{lang}.{ext}",
"TRANSLATIONS": {"en": "./"},
"DEFAULT_LANG": "en",
}
def __init__(self):
metadata_extractors.load_defaults(self, self.metadata_extractors_by)
|
import numpy as np
import matplotlib.pyplot as plt
import mne
from mne.decoding import ReceptiveField, TimeDelayingRidge
from scipy.stats import multivariate_normal
from scipy.io import loadmat
from sklearn.preprocessing import scale
rng = np.random.RandomState(1337) # To make this example reproducible
###############################################################################
# Load audio data
# ---------------
#
# We'll read in the audio data from [3]_ in order to simulate a response.
#
# In addition, we'll downsample the data along the time dimension in order to
# speed up computation. Note that depending on the input values, this may
# not be desired. For example if your input stimulus varies more quickly than
# 1/2 the sampling rate to which we are downsampling.
# Read in audio that's been recorded in epochs.
path_audio = mne.datasets.mtrf.data_path()
data = loadmat(path_audio + '/speech_data.mat')
audio = data['spectrogram'].T
sfreq = float(data['Fs'][0, 0])
n_decim = 2
audio = mne.filter.resample(audio, down=n_decim, npad='auto')
sfreq /= n_decim
###############################################################################
# Create a receptive field
# ------------------------
#
# We'll simulate a linear receptive field for a theoretical neural signal. This
# defines how the signal will respond to power in this receptive field space.
n_freqs = 20
tmin, tmax = -0.1, 0.4
# To simulate the data we'll create explicit delays here
delays_samp = np.arange(np.round(tmin * sfreq),
np.round(tmax * sfreq) + 1).astype(int)
delays_sec = delays_samp / sfreq
freqs = np.linspace(50, 5000, n_freqs)
grid = np.array(np.meshgrid(delays_sec, freqs))
# We need data to be shaped as n_epochs, n_features, n_times, so swap axes here
grid = grid.swapaxes(0, -1).swapaxes(0, 1)
# Simulate a temporal receptive field with a Gabor filter
means_high = [.1, 500]
means_low = [.2, 2500]
cov = [[.001, 0], [0, 500000]]
gauss_high = multivariate_normal.pdf(grid, means_high, cov)
gauss_low = -1 * multivariate_normal.pdf(grid, means_low, cov)
weights = gauss_high + gauss_low # Combine to create the "true" STRF
kwargs = dict(vmax=np.abs(weights).max(), vmin=-np.abs(weights).max(),
cmap='RdBu_r', shading='gouraud')
fig, ax = plt.subplots()
ax.pcolormesh(delays_sec, freqs, weights, **kwargs)
ax.set(title='Simulated STRF', xlabel='Time Lags (s)', ylabel='Frequency (Hz)')
plt.setp(ax.get_xticklabels(), rotation=45)
plt.autoscale(tight=True)
mne.viz.tight_layout()
###############################################################################
# Simulate a neural response
# --------------------------
#
# Using this receptive field, we'll create an artificial neural response to
# a stimulus.
#
# To do this, we'll create a time-delayed version of the receptive field, and
# then calculate the dot product between this and the stimulus. Note that this
# is effectively doing a convolution between the stimulus and the receptive
# field. See `here <https://en.wikipedia.org/wiki/Convolution>`_ for more
# information.
# Reshape audio to split into epochs, then make epochs the first dimension.
n_epochs, n_seconds = 16, 5
audio = audio[:, :int(n_seconds * sfreq * n_epochs)]
X = audio.reshape([n_freqs, n_epochs, -1]).swapaxes(0, 1)
n_times = X.shape[-1]
# Delay the spectrogram according to delays so it can be combined w/ the STRF
# Lags will now be in axis 1, then we reshape to vectorize
delays = np.arange(np.round(tmin * sfreq),
np.round(tmax * sfreq) + 1).astype(int)
# Iterate through indices and append
X_del = np.zeros((len(delays),) + X.shape)
for ii, ix_delay in enumerate(delays):
# These arrays will take/put particular indices in the data
take = [slice(None)] * X.ndim
put = [slice(None)] * X.ndim
if ix_delay > 0:
take[-1] = slice(None, -ix_delay)
put[-1] = slice(ix_delay, None)
elif ix_delay < 0:
take[-1] = slice(-ix_delay, None)
put[-1] = slice(None, ix_delay)
X_del[ii][tuple(put)] = X[tuple(take)]
# Now set the delayed axis to the 2nd dimension
X_del = np.rollaxis(X_del, 0, 3)
X_del = X_del.reshape([n_epochs, -1, n_times])
n_features = X_del.shape[1]
weights_sim = weights.ravel()
# Simulate a neural response to the sound, given this STRF
y = np.zeros((n_epochs, n_times))
for ii, iep in enumerate(X_del):
# Simulate this epoch and add random noise
noise_amp = .002
y[ii] = np.dot(weights_sim, iep) + noise_amp * rng.randn(n_times)
# Plot the first 2 trials of audio and the simulated electrode activity
X_plt = scale(np.hstack(X[:2]).T).T
y_plt = scale(np.hstack(y[:2]))
time = np.arange(X_plt.shape[-1]) / sfreq
_, (ax1, ax2) = plt.subplots(2, 1, figsize=(6, 6), sharex=True)
ax1.pcolormesh(time, freqs, X_plt, vmin=0, vmax=4, cmap='Reds',
shading='gouraud')
ax1.set_title('Input auditory features')
ax1.set(ylim=[freqs.min(), freqs.max()], ylabel='Frequency (Hz)')
ax2.plot(time, y_plt)
ax2.set(xlim=[time.min(), time.max()], title='Simulated response',
xlabel='Time (s)', ylabel='Activity (a.u.)')
mne.viz.tight_layout()
###############################################################################
# Fit a model to recover this receptive field
# -------------------------------------------
#
# Finally, we'll use the :class:`mne.decoding.ReceptiveField` class to recover
# the linear receptive field of this signal. Note that properties of the
# receptive field (e.g. smoothness) will depend on the autocorrelation in the
# inputs and outputs.
# Create training and testing data
train, test = np.arange(n_epochs - 1), n_epochs - 1
X_train, X_test, y_train, y_test = X[train], X[test], y[train], y[test]
X_train, X_test, y_train, y_test = [np.rollaxis(ii, -1, 0) for ii in
(X_train, X_test, y_train, y_test)]
# Model the simulated data as a function of the spectrogram input
alphas = np.logspace(-3, 3, 7)
scores = np.zeros_like(alphas)
models = []
for ii, alpha in enumerate(alphas):
rf = ReceptiveField(tmin, tmax, sfreq, freqs, estimator=alpha)
rf.fit(X_train, y_train)
# Now make predictions about the model output, given input stimuli.
scores[ii] = rf.score(X_test, y_test)
models.append(rf)
times = rf.delays_ / float(rf.sfreq)
# Choose the model that performed best on the held out data
ix_best_alpha = np.argmax(scores)
best_mod = models[ix_best_alpha]
coefs = best_mod.coef_[0]
best_pred = best_mod.predict(X_test)[:, 0]
# Plot the original STRF, and the one that we recovered with modeling.
_, (ax1, ax2) = plt.subplots(1, 2, figsize=(6, 3), sharey=True, sharex=True)
ax1.pcolormesh(delays_sec, freqs, weights, **kwargs)
ax2.pcolormesh(times, rf.feature_names, coefs, **kwargs)
ax1.set_title('Original STRF')
ax2.set_title('Best Reconstructed STRF')
plt.setp([iax.get_xticklabels() for iax in [ax1, ax2]], rotation=45)
plt.autoscale(tight=True)
mne.viz.tight_layout()
# Plot the actual response and the predicted response on a held out stimulus
time_pred = np.arange(best_pred.shape[0]) / sfreq
fig, ax = plt.subplots()
ax.plot(time_pred, y_test, color='k', alpha=.2, lw=4)
ax.plot(time_pred, best_pred, color='r', lw=1)
ax.set(title='Original and predicted activity', xlabel='Time (s)')
ax.legend(['Original', 'Predicted'])
plt.autoscale(tight=True)
mne.viz.tight_layout()
###############################################################################
# Visualize the effects of regularization
# ---------------------------------------
#
# Above we fit a :class:`mne.decoding.ReceptiveField` model for one of many
# values for the ridge regularization parameter. Here we will plot the model
# score as well as the model coefficients for each value, in order to
# visualize how coefficients change with different levels of regularization.
# These issues as well as the STRF pipeline are described in detail
# in [1]_, [2]_, and [4]_.
# Plot model score for each ridge parameter
fig = plt.figure(figsize=(10, 4))
ax = plt.subplot2grid([2, len(alphas)], [1, 0], 1, len(alphas))
ax.plot(np.arange(len(alphas)), scores, marker='o', color='r')
ax.annotate('Best parameter', (ix_best_alpha, scores[ix_best_alpha]),
(ix_best_alpha, scores[ix_best_alpha] - .1),
arrowprops={'arrowstyle': '->'})
plt.xticks(np.arange(len(alphas)), ["%.0e" % ii for ii in alphas])
ax.set(xlabel="Ridge regularization value", ylabel="Score ($R^2$)",
xlim=[-.4, len(alphas) - .6])
mne.viz.tight_layout()
# Plot the STRF of each ridge parameter
for ii, (rf, i_alpha) in enumerate(zip(models, alphas)):
ax = plt.subplot2grid([2, len(alphas)], [0, ii], 1, 1)
ax.pcolormesh(times, rf.feature_names, rf.coef_[0], **kwargs)
plt.xticks([], [])
plt.yticks([], [])
plt.autoscale(tight=True)
fig.suptitle('Model coefficients / scores for many ridge parameters', y=1)
mne.viz.tight_layout()
###############################################################################
# Using different regularization types
# ------------------------------------
# In addition to the standard ridge regularization, the
# :class:`mne.decoding.TimeDelayingRidge` class also exposes
# `Laplacian <https://en.wikipedia.org/wiki/Laplacian_matrix>`_ regularization
# term as:
#
# .. math::
# \left[\begin{matrix}
# 1 & -1 & & & & \\
# -1 & 2 & -1 & & & \\
# & -1 & 2 & -1 & & \\
# & & \ddots & \ddots & \ddots & \\
# & & & -1 & 2 & -1 \\
# & & & & -1 & 1\end{matrix}\right]
#
# This imposes a smoothness constraint of nearby time samples and/or features.
# Quoting [3]_:
#
# Tikhonov [identity] regularization (Equation 5) reduces overfitting by
# smoothing the TRF estimate in a way that is insensitive to
# the amplitude of the signal of interest. However, the Laplacian
# approach (Equation 6) reduces off-sample error whilst preserving
# signal amplitude (Lalor et al., 2006). As a result, this approach
# usually leads to an improved estimate of the system’s response (as
# indexed by MSE) compared to Tikhonov regularization.
#
scores_lap = np.zeros_like(alphas)
models_lap = []
for ii, alpha in enumerate(alphas):
estimator = TimeDelayingRidge(tmin, tmax, sfreq, reg_type='laplacian',
alpha=alpha)
rf = ReceptiveField(tmin, tmax, sfreq, freqs, estimator=estimator)
rf.fit(X_train, y_train)
# Now make predictions about the model output, given input stimuli.
scores_lap[ii] = rf.score(X_test, y_test)
models_lap.append(rf)
ix_best_alpha_lap = np.argmax(scores_lap)
###############################################################################
# Compare model performance
# -------------------------
# Below we visualize the model performance of each regularization method
# (ridge vs. Laplacian) for different levels of alpha. As you can see, the
# Laplacian method performs better in general, because it imposes a smoothness
# constraint along the time and feature dimensions of the coefficients.
# This matches the "true" receptive field structure and results in a better
# model fit.
fig = plt.figure(figsize=(10, 6))
ax = plt.subplot2grid([3, len(alphas)], [2, 0], 1, len(alphas))
ax.plot(np.arange(len(alphas)), scores_lap, marker='o', color='r')
ax.plot(np.arange(len(alphas)), scores, marker='o', color='0.5', ls=':')
ax.annotate('Best Laplacian', (ix_best_alpha_lap,
scores_lap[ix_best_alpha_lap]),
(ix_best_alpha_lap, scores_lap[ix_best_alpha_lap] - .1),
arrowprops={'arrowstyle': '->'})
ax.annotate('Best Ridge', (ix_best_alpha, scores[ix_best_alpha]),
(ix_best_alpha, scores[ix_best_alpha] - .1),
arrowprops={'arrowstyle': '->'})
plt.xticks(np.arange(len(alphas)), ["%.0e" % ii for ii in alphas])
ax.set(xlabel="Laplacian regularization value", ylabel="Score ($R^2$)",
xlim=[-.4, len(alphas) - .6])
mne.viz.tight_layout()
# Plot the STRF of each ridge parameter
xlim = times[[0, -1]]
for ii, (rf_lap, rf, i_alpha) in enumerate(zip(models_lap, models, alphas)):
ax = plt.subplot2grid([3, len(alphas)], [0, ii], 1, 1)
ax.pcolormesh(times, rf_lap.feature_names, rf_lap.coef_[0], **kwargs)
ax.set(xticks=[], yticks=[], xlim=xlim)
if ii == 0:
ax.set(ylabel='Laplacian')
ax = plt.subplot2grid([3, len(alphas)], [1, ii], 1, 1)
ax.pcolormesh(times, rf.feature_names, rf.coef_[0], **kwargs)
ax.set(xticks=[], yticks=[], xlim=xlim)
if ii == 0:
ax.set(ylabel='Ridge')
fig.suptitle('Model coefficients / scores for laplacian regularization', y=1)
mne.viz.tight_layout()
###############################################################################
# Plot the original STRF, and the one that we recovered with modeling.
rf = models[ix_best_alpha]
rf_lap = models_lap[ix_best_alpha_lap]
_, (ax1, ax2, ax3) = plt.subplots(1, 3, figsize=(9, 3),
sharey=True, sharex=True)
ax1.pcolormesh(delays_sec, freqs, weights, **kwargs)
ax2.pcolormesh(times, rf.feature_names, rf.coef_[0], **kwargs)
ax3.pcolormesh(times, rf_lap.feature_names, rf_lap.coef_[0], **kwargs)
ax1.set_title('Original STRF')
ax2.set_title('Best Ridge STRF')
ax3.set_title('Best Laplacian STRF')
plt.setp([iax.get_xticklabels() for iax in [ax1, ax2, ax3]], rotation=45)
plt.autoscale(tight=True)
mne.viz.tight_layout()
|
from python_awair.exceptions import AuthError, AwairError
from homeassistant import data_entry_flow
from homeassistant.components.awair.const import DOMAIN
from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER
from homeassistant.const import CONF_ACCESS_TOKEN
from .const import CONFIG, DEVICES_FIXTURE, NO_DEVICES_FIXTURE, UNIQUE_ID, USER_FIXTURE
from tests.async_mock import patch
from tests.common import MockConfigEntry
async def test_show_form(hass):
"""Test that the form is served with no input."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == SOURCE_USER
async def test_invalid_access_token(hass):
"""Test that errors are shown when the access token is invalid."""
with patch("python_awair.AwairClient.query", side_effect=AuthError()):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data=CONFIG
)
assert result["errors"] == {CONF_ACCESS_TOKEN: "invalid_access_token"}
async def test_unexpected_api_error(hass):
"""Test that we abort on generic errors."""
with patch("python_awair.AwairClient.query", side_effect=AwairError()):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data=CONFIG
)
assert result["type"] == "abort"
assert result["reason"] == "unknown"
async def test_duplicate_error(hass):
"""Test that errors are shown when adding a duplicate config."""
with patch(
"python_awair.AwairClient.query", side_effect=[USER_FIXTURE, DEVICES_FIXTURE]
), patch(
"homeassistant.components.awair.sensor.async_setup_entry",
return_value=True,
):
MockConfigEntry(domain=DOMAIN, unique_id=UNIQUE_ID, data=CONFIG).add_to_hass(
hass
)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data=CONFIG
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
async def test_no_devices_error(hass):
"""Test that errors are shown when the API returns no devices."""
with patch(
"python_awair.AwairClient.query", side_effect=[USER_FIXTURE, NO_DEVICES_FIXTURE]
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data=CONFIG
)
assert result["type"] == "abort"
assert result["reason"] == "no_devices_found"
async def test_import(hass):
"""Test config.yaml import."""
with patch(
"python_awair.AwairClient.query", side_effect=[USER_FIXTURE, DEVICES_FIXTURE]
), patch(
"homeassistant.components.awair.sensor.async_setup_entry",
return_value=True,
):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={CONF_ACCESS_TOKEN: CONFIG[CONF_ACCESS_TOKEN]},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "[email protected] (32406)"
assert result["data"][CONF_ACCESS_TOKEN] == CONFIG[CONF_ACCESS_TOKEN]
assert result["result"].unique_id == UNIQUE_ID
async def test_import_aborts_on_api_error(hass):
"""Test config.yaml imports on api error."""
with patch("python_awair.AwairClient.query", side_effect=AwairError()):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={CONF_ACCESS_TOKEN: CONFIG[CONF_ACCESS_TOKEN]},
)
assert result["type"] == "abort"
assert result["reason"] == "unknown"
async def test_import_aborts_if_configured(hass):
"""Test config import doesn't re-import unnecessarily."""
with patch(
"python_awair.AwairClient.query", side_effect=[USER_FIXTURE, DEVICES_FIXTURE]
), patch(
"homeassistant.components.awair.sensor.async_setup_entry",
return_value=True,
):
MockConfigEntry(domain=DOMAIN, unique_id=UNIQUE_ID, data=CONFIG).add_to_hass(
hass
)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={CONF_ACCESS_TOKEN: CONFIG[CONF_ACCESS_TOKEN]},
)
assert result["type"] == "abort"
assert result["reason"] == "already_setup"
async def test_reauth(hass):
"""Test reauth flow."""
with patch(
"python_awair.AwairClient.query", side_effect=[USER_FIXTURE, DEVICES_FIXTURE]
), patch(
"homeassistant.components.awair.sensor.async_setup_entry",
return_value=True,
):
mock_config = MockConfigEntry(domain=DOMAIN, unique_id=UNIQUE_ID, data=CONFIG)
mock_config.add_to_hass(hass)
hass.config_entries.async_update_entry(
mock_config, data={**CONFIG, CONF_ACCESS_TOKEN: "blah"}
)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": "reauth", "unique_id": UNIQUE_ID},
data=CONFIG,
)
assert result["type"] == "abort"
assert result["reason"] == "reauth_successful"
with patch("python_awair.AwairClient.query", side_effect=AuthError()):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": "reauth", "unique_id": UNIQUE_ID},
data=CONFIG,
)
assert result["errors"] == {CONF_ACCESS_TOKEN: "invalid_access_token"}
with patch("python_awair.AwairClient.query", side_effect=AwairError()):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": "reauth", "unique_id": UNIQUE_ID},
data=CONFIG,
)
assert result["type"] == "abort"
assert result["reason"] == "unknown"
async def test_create_entry(hass):
"""Test overall flow."""
with patch(
"python_awair.AwairClient.query", side_effect=[USER_FIXTURE, DEVICES_FIXTURE]
), patch(
"homeassistant.components.awair.sensor.async_setup_entry",
return_value=True,
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data=CONFIG
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "[email protected] (32406)"
assert result["data"][CONF_ACCESS_TOKEN] == CONFIG[CONF_ACCESS_TOKEN]
assert result["result"].unique_id == UNIQUE_ID
|
import voluptuous as vol
from homeassistant.const import CONF_PLATFORM
from homeassistant.core import HassJob, callback
from homeassistant.helpers import config_validation as cv
# mypy: allow-untyped-defs
CONF_EVENT_TYPE = "event_type"
CONF_EVENT_DATA = "event_data"
CONF_EVENT_CONTEXT = "context"
TRIGGER_SCHEMA = vol.Schema(
{
vol.Required(CONF_PLATFORM): "event",
vol.Required(CONF_EVENT_TYPE): cv.string,
vol.Optional(CONF_EVENT_DATA): dict,
vol.Optional(CONF_EVENT_CONTEXT): dict,
}
)
def _schema_value(value):
if isinstance(value, list):
return vol.In(value)
return value
async def async_attach_trigger(
hass, config, action, automation_info, *, platform_type="event"
):
"""Listen for events based on configuration."""
event_type = config.get(CONF_EVENT_TYPE)
event_data_schema = None
if config.get(CONF_EVENT_DATA):
event_data_schema = vol.Schema(
{
vol.Required(key): value
for key, value in config.get(CONF_EVENT_DATA).items()
},
extra=vol.ALLOW_EXTRA,
)
event_context_schema = None
if config.get(CONF_EVENT_CONTEXT):
event_context_schema = vol.Schema(
{
vol.Required(key): _schema_value(value)
for key, value in config.get(CONF_EVENT_CONTEXT).items()
},
extra=vol.ALLOW_EXTRA,
)
job = HassJob(action)
@callback
def handle_event(event):
"""Listen for events and calls the action when data matches."""
try:
# Check that the event data and context match the configured
# schema if one was provided
if event_data_schema:
event_data_schema(event.data)
if event_context_schema:
event_context_schema(event.context.as_dict())
except vol.Invalid:
# If event doesn't match, skip event
return
hass.async_run_hass_job(
job,
{
"trigger": {
"platform": platform_type,
"event": event,
"description": f"event '{event.event_type}'",
}
},
event.context,
)
return hass.bus.async_listen(event_type, handle_event)
|
import pytest
from tests.async_mock import AsyncMock, patch
from tests.components.nws.const import DEFAULT_FORECAST, DEFAULT_OBSERVATION
@pytest.fixture()
def mock_simple_nws():
"""Mock pynws SimpleNWS with default values."""
with patch("homeassistant.components.nws.SimpleNWS") as mock_nws:
instance = mock_nws.return_value
instance.set_station = AsyncMock(return_value=None)
instance.update_observation = AsyncMock(return_value=None)
instance.update_forecast = AsyncMock(return_value=None)
instance.update_forecast_hourly = AsyncMock(return_value=None)
instance.station = "ABC"
instance.stations = ["ABC"]
instance.observation = DEFAULT_OBSERVATION
instance.forecast = DEFAULT_FORECAST
instance.forecast_hourly = DEFAULT_FORECAST
yield mock_nws
@pytest.fixture()
def mock_simple_nws_config():
"""Mock pynws SimpleNWS with default values in config_flow."""
with patch("homeassistant.components.nws.config_flow.SimpleNWS") as mock_nws:
instance = mock_nws.return_value
instance.set_station = AsyncMock(return_value=None)
instance.station = "ABC"
instance.stations = ["ABC"]
yield mock_nws
|
import argparse
import logging
import time
from kalliope.core import Utils
from kalliope.core.ConfigurationManager import SettingLoader
from kalliope.core.ConfigurationManager.BrainLoader import BrainLoader
from kalliope.core.SignalLauncher import SignalLauncher
from flask import Flask
from kalliope.core.RestAPI.FlaskAPI import FlaskAPI
from ._version import version_str
import signal
import sys
from kalliope.core.ResourcesManager import ResourcesManager
from kalliope.core.SynapseLauncher import SynapseLauncher
from kalliope.core.OrderAnalyser import OrderAnalyser
logging.basicConfig()
logger = logging.getLogger("kalliope")
def signal_handler(signal, frame):
"""
Used to catch a keyboard signal like Ctrl+C in order to kill the kalliope program.
:param signal: signal handler
:param frame: execution frame
"""
print("\n")
Utils.print_info("Ctrl+C pressed. Killing Kalliope")
sys.exit(0)
# actions available
ACTION_LIST = ["start", "gui", "install", "uninstall"]
def parse_args(args):
"""
Parsing function
:param args: arguments passed from the command line
:return: return parser
"""
# create arguments
parser = argparse.ArgumentParser(description='Kalliope')
parser.add_argument("action", help="[start|install|uninstall]")
parser.add_argument("--run-synapse",
help="Name of a synapse to load surrounded by quote")
parser.add_argument("--run-order", help="order surrounded by a quote")
parser.add_argument("--brain-file", help="Full path of a brain file")
parser.add_argument("--debug", action='store_true',
help="Show debug output")
parser.add_argument("--git-url", help="Git URL of the neuron to install")
parser.add_argument("--neuron-name", help="Neuron name to uninstall")
parser.add_argument("--stt-name", help="STT name to uninstall")
parser.add_argument("--tts-name", help="TTS name to uninstall")
parser.add_argument("--trigger-name", help="Trigger name to uninstall")
parser.add_argument("--signal-name", help="Signal name to uninstall")
parser.add_argument("--deaf", action='store_true', help="Starts Kalliope deaf")
parser.add_argument('-v', '--version', action='version',
version='Kalliope ' + version_str)
# parse arguments from script parameters
return parser.parse_args(args)
def main():
"""Entry point of Kalliope program."""
# parse argument. the script name is removed
try:
parser = parse_args(sys.argv[1:])
except SystemExit:
sys.exit(1)
# check if we want debug
configure_logging(debug=parser.debug)
logger.debug("kalliope args: %s" % parser)
# by default, no brain file is set.
# Use the default one: brain.yml in the root path
brain_file = None
# check if user set a brain.yml file
if parser.brain_file:
brain_file = parser.brain_file
# check the user provide a valid action
if parser.action not in ACTION_LIST:
Utils.print_warning("%s is not a recognised action\n" % parser.action)
sys.exit(1)
# install modules
if parser.action == "install":
if not parser.git_url:
Utils.print_danger("You must specify the git url")
sys.exit(1)
else:
parameters = {
"git_url": parser.git_url
}
res_manager = ResourcesManager(**parameters)
res_manager.install()
return
# uninstall modules
if parser.action == "uninstall":
if not parser.neuron_name \
and not parser.stt_name \
and not parser.tts_name \
and not parser.trigger_name \
and not parser.signal_name:
Utils.print_danger("You must specify a module name with "
"--neuron-name "
"or --stt-name "
"or --tts-name "
"or --trigger-name "
"or --signal-name")
sys.exit(1)
else:
res_manager = ResourcesManager()
res_manager.uninstall(neuron_name=parser.neuron_name,
stt_name=parser.stt_name,
tts_name=parser.tts_name,
trigger_name=parser.trigger_name,
signal_name=parser.signal_name)
return
# load the brain once
brain_loader = BrainLoader(file_path=brain_file)
brain = brain_loader.brain
# load settings
# get global configuration once
settings_loader = SettingLoader()
settings = settings_loader.settings
if parser.action == "start":
# user set a synapse to start
if parser.run_synapse is not None:
SynapseLauncher.start_synapse_by_list_name([parser.run_synapse],
brain=brain)
if parser.run_order is not None:
SynapseLauncher.run_matching_synapse_from_order(parser.run_order,
brain=brain,
settings=settings,
is_api_call=False)
if (parser.run_synapse is None) and (parser.run_order is None):
# if --deaf
if parser.deaf:
settings.options.deaf = True
# start rest api
start_rest_api(settings, brain)
start_kalliope(settings, brain)
class AppFilter(logging.Filter):
"""
Class used to add a custom entry into the logger
"""
def filter(self, record):
record.app_version = "kalliope-%s" % version_str
return True
def configure_logging(debug=None):
"""
Prepare log folder in current home directory.
:param debug: If true, set the lof level to debug
"""
logger = logging.getLogger("kalliope")
logger.addFilter(AppFilter())
logger.propagate = False
syslog = logging.StreamHandler()
syslog.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s :: %(app_version)s :: %(message)s', "%Y-%m-%d %H:%M:%S")
syslog.setFormatter(formatter)
if debug:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
# add the handlers to logger
logger.addHandler(syslog)
logger.debug("Logger ready")
def get_list_signal_class_to_load(brain):
"""
Return a list of signal class name
For all synapse, each signal type is added to a list only if the signal is not yet present in the list
:param brain: Brain object
:type brain: Brain
:return: set of signal class
"""
list_signal_class_name = set()
for synapse in brain.synapses:
for signal_object in synapse.signals:
list_signal_class_name.add(signal_object.name)
logger.debug("[Kalliope entrypoint] List of signal class to load: %s" % list_signal_class_name)
return list_signal_class_name
def start_rest_api(settings, brain):
"""
Start the Rest API if asked in the user settings
"""
# run the api if the user want it
if settings.rest_api.active:
Utils.print_info("Starting REST API Listening port: %s" % settings.rest_api.port)
app = Flask(__name__)
flask_api = FlaskAPI(app=app,
port=settings.rest_api.port,
brain=brain,
allowed_cors_origin=settings.rest_api.allowed_cors_origin)
flask_api.daemon = True
flask_api.start()
def start_kalliope(settings, brain):
"""
Start all signals declared in the brain
"""
# start kalliope
Utils.print_success("Starting Kalliope")
Utils.print_info("Press Ctrl+C for stopping")
# catch signal for killing on Ctrl+C pressed
signal.signal(signal.SIGINT, signal_handler)
# get a list of signal class to load from declared synapse in the brain
# this list will contain string of signal class type.
# For example, if the brain contains multiple time the signal type "order", the list will be ["order"]
# If the brain contains some synapse with "order" and "event", the list will be ["order", "event"]
list_signals_class_to_load = get_list_signal_class_to_load(brain)
# start each class name
for signal_class_name in list_signals_class_to_load:
signal_instance = SignalLauncher.launch_signal_class_by_name(signal_name=signal_class_name,
settings=settings)
if signal_instance is not None:
signal_instance.daemon = True
signal_instance.start()
while True: # keep main thread alive
time.sleep(0.1)
|
from collections import OrderedDict
import yaml
from .objects import NodeListClass
# mypy: allow-untyped-calls, no-warn-return-any
def dump(_dict: dict) -> str:
"""Dump YAML to a string and remove null."""
return yaml.safe_dump(
_dict, default_flow_style=False, allow_unicode=True, sort_keys=False
).replace(": null\n", ":\n")
def save_yaml(path: str, data: dict) -> None:
"""Save YAML to a file."""
# Dump before writing to not truncate the file if dumping fails
str_data = dump(data)
with open(path, "w", encoding="utf-8") as outfile:
outfile.write(str_data)
# From: https://gist.github.com/miracle2k/3184458
def represent_odict( # type: ignore
dumper, tag, mapping, flow_style=None
) -> yaml.MappingNode:
"""Like BaseRepresenter.represent_mapping but does not issue the sort()."""
value: list = []
node = yaml.MappingNode(tag, value, flow_style=flow_style)
if dumper.alias_key is not None:
dumper.represented_objects[dumper.alias_key] = node
best_style = True
if hasattr(mapping, "items"):
mapping = mapping.items()
for item_key, item_value in mapping:
node_key = dumper.represent_data(item_key)
node_value = dumper.represent_data(item_value)
if not (isinstance(node_key, yaml.ScalarNode) and not node_key.style):
best_style = False
if not (isinstance(node_value, yaml.ScalarNode) and not node_value.style):
best_style = False
value.append((node_key, node_value))
if flow_style is None:
if dumper.default_flow_style is not None:
node.flow_style = dumper.default_flow_style
else:
node.flow_style = best_style
return node
yaml.SafeDumper.add_representer(
OrderedDict,
lambda dumper, value: represent_odict(dumper, "tag:yaml.org,2002:map", value),
)
yaml.SafeDumper.add_representer(
NodeListClass,
lambda dumper, value: dumper.represent_sequence("tag:yaml.org,2002:seq", value),
)
|
import logging
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.event import async_call_later
from .const import (
CONF_SERVER_IDENTIFIER,
DISPATCHERS,
DOMAIN as PLEX_DOMAIN,
NAME_FORMAT,
PLEX_UPDATE_PLATFORMS_SIGNAL,
PLEX_UPDATE_SENSOR_SIGNAL,
SERVERS,
)
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Plex sensor from a config entry."""
server_id = config_entry.data[CONF_SERVER_IDENTIFIER]
plexserver = hass.data[PLEX_DOMAIN][SERVERS][server_id]
sensor = PlexSensor(plexserver)
async_add_entities([sensor])
class PlexSensor(Entity):
"""Representation of a Plex now playing sensor."""
def __init__(self, plex_server):
"""Initialize the sensor."""
self.sessions = []
self._state = None
self._now_playing = []
self._server = plex_server
self._name = NAME_FORMAT.format(plex_server.friendly_name)
self._unique_id = f"sensor-{plex_server.machine_identifier}"
async def async_added_to_hass(self):
"""Run when about to be added to hass."""
server_id = self._server.machine_identifier
unsub = async_dispatcher_connect(
self.hass,
PLEX_UPDATE_SENSOR_SIGNAL.format(server_id),
self.async_refresh_sensor,
)
self.hass.data[PLEX_DOMAIN][DISPATCHERS][server_id].append(unsub)
async def async_refresh_sensor(self, sessions):
"""Set instance object and trigger an entity state update."""
_LOGGER.debug("Refreshing sensor [%s]", self.unique_id)
self.sessions = sessions
update_failed = False
@callback
def update_plex(_):
async_dispatcher_send(
self.hass,
PLEX_UPDATE_PLATFORMS_SIGNAL.format(self._server.machine_identifier),
)
now_playing = []
for sess in self.sessions:
if sess.TYPE == "photo":
_LOGGER.debug("Photo session detected, skipping: %s", sess)
continue
if not sess.usernames:
_LOGGER.debug(
"Session temporarily incomplete, will try again: %s", sess
)
update_failed = True
continue
user = sess.usernames[0]
device = sess.players[0].title
now_playing_user = f"{user} - {device}"
now_playing_title = ""
if sess.TYPE == "episode":
# example:
# "Supernatural (2005) - s01e13 - Route 666"
def sync_io_attributes(session):
year = None
try:
year = session.show().year
except TypeError:
pass
return (year, session.seasonEpisode)
year, season_episode = await self.hass.async_add_executor_job(
sync_io_attributes, sess
)
season_title = sess.grandparentTitle
if year is not None:
season_title += f" ({year!s})"
episode_title = sess.title
now_playing_title = (
f"{season_title} - {season_episode} - {episode_title}"
)
elif sess.TYPE == "track":
# example:
# "Billy Talent - Afraid of Heights - Afraid of Heights"
track_artist = sess.grandparentTitle
track_album = sess.parentTitle
track_title = sess.title
now_playing_title = f"{track_artist} - {track_album} - {track_title}"
elif sess.TYPE == "movie":
# example:
# "picture_of_last_summer_camp (2015)"
# "The Incredible Hulk (2008)"
now_playing_title = sess.title
year = await self.hass.async_add_executor_job(getattr, sess, "year")
if year is not None:
now_playing_title += f" ({year})"
else:
now_playing_title = sess.title
now_playing.append((now_playing_user, now_playing_title))
self._state = len(self.sessions)
self._now_playing = now_playing
self.async_write_ha_state()
if update_failed:
async_call_later(self.hass, 5, update_plex)
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unique_id(self):
"""Return the id of this plex client."""
return self._unique_id
@property
def should_poll(self):
"""Return True if entity has to be polled for state."""
return False
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit this state is expressed in."""
return "Watching"
@property
def icon(self):
"""Return the icon of the sensor."""
return "mdi:plex"
@property
def device_state_attributes(self):
"""Return the state attributes."""
return {content[0]: content[1] for content in self._now_playing}
@property
def device_info(self):
"""Return a device description for device registry."""
if self.unique_id is None:
return None
return {
"identifiers": {(PLEX_DOMAIN, self._server.machine_identifier)},
"manufacturer": "Plex",
"model": "Plex Media Server",
"name": "Activity Sensor",
"sw_version": self._server.version,
}
|
import asyncio
import json
import logging
import os
import sys
import re
from copy import deepcopy
from pathlib import Path
from typing import Dict, Any, Optional, Union
import appdirs
import click
from redbot.core.cli import confirm
from redbot.core.utils._internal_utils import safe_delete, create_backup as red_create_backup
from redbot.core import config, data_manager, drivers
from redbot.core.drivers import BackendType, IdentifierData
conversion_log = logging.getLogger("red.converter")
config_dir = None
appdir = appdirs.AppDirs("Red-DiscordBot")
if sys.platform == "linux":
if 0 < os.getuid() < 1000: # pylint: disable=no-member # Non-exist on win
config_dir = Path(appdir.site_data_dir)
if not config_dir:
config_dir = Path(appdir.user_config_dir)
try:
config_dir.mkdir(parents=True, exist_ok=True)
except PermissionError:
print("You don't have permission to write to '{}'\nExiting...".format(config_dir))
sys.exit(1)
config_file = config_dir / "config.json"
def load_existing_config():
if not config_file.exists():
return {}
with config_file.open(encoding="utf-8") as fs:
return json.load(fs)
instance_data = load_existing_config()
if instance_data is None:
instance_list = []
else:
instance_list = list(instance_data.keys())
def save_config(name, data, remove=False):
_config = load_existing_config()
if remove and name in _config:
_config.pop(name)
else:
_config[name] = data
with config_file.open("w", encoding="utf-8") as fs:
json.dump(_config, fs, indent=4)
def get_data_dir(instance_name: str):
data_path = Path(appdir.user_data_dir) / "data" / instance_name
print()
print(
"We've attempted to figure out a sane default data location which is printed below."
" If you don't want to change this default please press [ENTER],"
" otherwise input your desired data location."
)
print()
print("Default: {}".format(data_path))
data_path_input = input("> ")
if data_path_input != "":
data_path = Path(data_path_input)
try:
exists = data_path.exists()
except OSError:
print(
"We were unable to check your chosen directory."
" Provided path may contain an invalid character."
)
sys.exit(1)
if not exists:
try:
data_path.mkdir(parents=True, exist_ok=True)
except OSError:
print(
"We were unable to create your chosen directory."
" You may need to restart this process with admin"
" privileges."
)
sys.exit(1)
print("You have chosen {} to be your data directory.".format(data_path))
if not click.confirm("Please confirm", default=True):
print("Please start the process over.")
sys.exit(0)
return str(data_path.resolve())
def get_storage_type():
storage_dict = {1: "JSON", 2: "PostgreSQL"}
storage = None
while storage is None:
print()
print("Please choose your storage backend (if you're unsure, just choose 1).")
print("1. JSON (file storage, requires no database).")
print("2. PostgreSQL (Requires a database server)")
storage = input("> ")
try:
storage = int(storage)
except ValueError:
storage = None
else:
if storage not in storage_dict:
storage = None
return storage
def get_name() -> str:
name = ""
while len(name) == 0:
print(
"Please enter a name for your instance,"
" it will be used to run your bot from here on out.\n"
"This name is case-sensitive and should only include characters"
" A-z, numbers, underscores (_) and periods (.)."
)
name = input("> ")
if re.fullmatch(r"[A-Za-z0-9_\.\-]*", name) is None:
print(
"ERROR: Instance names can only include characters A-z, numbers, "
"underscores (_) and periods (.)."
)
name = ""
elif "-" in name and not confirm(
"Hyphens (-) in instance names may cause issues. Are you sure you want to continue with this instance name?",
default=False,
):
name = ""
print() # new line for aesthetics
return name
def basic_setup():
"""
Creates the data storage folder.
:return:
"""
print(
"Hello! Before we begin, we need to gather some initial information for the new instance."
)
name = get_name()
default_data_dir = get_data_dir(name)
default_dirs = deepcopy(data_manager.basic_config_default)
default_dirs["DATA_PATH"] = default_data_dir
storage = get_storage_type()
storage_dict = {1: BackendType.JSON, 2: BackendType.POSTGRES}
storage_type: BackendType = storage_dict.get(storage, BackendType.JSON)
default_dirs["STORAGE_TYPE"] = storage_type.value
driver_cls = drivers.get_driver_class(storage_type)
default_dirs["STORAGE_DETAILS"] = driver_cls.get_config_details()
if name in instance_data:
print(
"WARNING: An instance already exists with this name. "
"Continuing will overwrite the existing instance config."
)
if not click.confirm("Are you absolutely certain you want to continue?", default=False):
print("Not continuing")
sys.exit(0)
save_config(name, default_dirs)
print()
print(
"Your basic configuration has been saved. Please run `redbot <name>` to"
" continue your setup process and to run the bot.\n\n"
"First time? Read the quickstart guide:\n"
"https://docs.discord.red/en/stable/getting_started.html"
)
def get_current_backend(instance) -> BackendType:
return BackendType(instance_data[instance]["STORAGE_TYPE"])
def get_target_backend(backend) -> BackendType:
if backend == "json":
return BackendType.JSON
elif backend == "postgres":
return BackendType.POSTGRES
async def do_migration(
current_backend: BackendType, target_backend: BackendType
) -> Dict[str, Any]:
cur_driver_cls = drivers._get_driver_class_include_old(current_backend)
new_driver_cls = drivers.get_driver_class(target_backend)
cur_storage_details = data_manager.storage_details()
new_storage_details = new_driver_cls.get_config_details()
await cur_driver_cls.initialize(**cur_storage_details)
await new_driver_cls.initialize(**new_storage_details)
await config.migrate(cur_driver_cls, new_driver_cls)
await cur_driver_cls.teardown()
await new_driver_cls.teardown()
return new_storage_details
async def create_backup(instance: str, destination_folder: Path = Path.home()) -> None:
data_manager.load_basic_configuration(instance)
backend_type = get_current_backend(instance)
if backend_type != BackendType.JSON:
await do_migration(backend_type, BackendType.JSON)
print("Backing up the instance's data...")
backup_fpath = await red_create_backup(destination_folder)
if backup_fpath is not None:
print(f"A backup of {instance} has been made. It is at {backup_fpath}")
else:
print("Creating the backup failed.")
async def remove_instance(
instance,
interactive: bool = False,
delete_data: Optional[bool] = None,
_create_backup: Optional[bool] = None,
drop_db: Optional[bool] = None,
remove_datapath: Optional[bool] = None,
):
data_manager.load_basic_configuration(instance)
if interactive is True and delete_data is None:
delete_data = click.confirm(
"Would you like to delete this instance's data?", default=False
)
if interactive is True and _create_backup is None:
_create_backup = click.confirm(
"Would you like to make a backup of the data for this instance?", default=False
)
if _create_backup is True:
await create_backup(instance)
backend = get_current_backend(instance)
driver_cls = drivers.get_driver_class(backend)
await driver_cls.initialize(**data_manager.storage_details())
try:
if delete_data is True:
await driver_cls.delete_all_data(interactive=interactive, drop_db=drop_db)
if interactive is True and remove_datapath is None:
remove_datapath = click.confirm(
"Would you like to delete the instance's entire datapath?", default=False
)
if remove_datapath is True:
data_path = data_manager.core_data_path().parent
safe_delete(data_path)
save_config(instance, {}, remove=True)
finally:
await driver_cls.teardown()
print("The instance {} has been removed\n".format(instance))
async def remove_instance_interaction():
if not instance_list:
print("No instances have been set up!")
return
print(
"You have chosen to remove an instance. The following "
"is a list of instances that currently exist:\n"
)
for instance in instance_data.keys():
print("{}\n".format(instance))
print("Please select one of the above by entering its name")
selected = input("> ")
if selected not in instance_data.keys():
print("That isn't a valid instance!")
return
await remove_instance(selected, interactive=True)
@click.group(invoke_without_command=True)
@click.option("--debug", type=bool)
@click.pass_context
def cli(ctx, debug):
"""Create a new instance."""
level = logging.DEBUG if debug else logging.INFO
base_logger = logging.getLogger("red")
base_logger.setLevel(level)
formatter = logging.Formatter(
"[{asctime}] [{levelname}] {name}: {message}", datefmt="%Y-%m-%d %H:%M:%S", style="{"
)
stdout_handler = logging.StreamHandler(sys.stdout)
stdout_handler.setFormatter(formatter)
base_logger.addHandler(stdout_handler)
if ctx.invoked_subcommand is None:
basic_setup()
@cli.command()
@click.argument("instance", type=click.Choice(instance_list), metavar="<INSTANCE_NAME>")
@click.option(
"--no-prompt",
"interactive",
is_flag=True,
default=True,
help="Don't ask for user input during the process.",
)
@click.option(
"--delete-data/--no-delete-data",
"delete_data",
is_flag=True,
default=None,
help=(
"Delete this instance's data. "
"If these options and --no-prompt are omitted, you will be asked about this."
),
)
@click.option(
"--backup/--no-backup",
"_create_backup",
is_flag=True,
default=None,
help=(
"Create backup of this instance's data. "
"If these options and --no-prompt are omitted, you will be asked about this."
),
)
@click.option(
"--drop-db/--no-drop-db",
is_flag=True,
default=None,
help=(
"Drop the entire database containing this instance's data. Has no effect on JSON "
"instances, or if --no-delete-data is set. If these options and --no-prompt are omitted,"
"you will be asked about this."
),
)
@click.option(
"--remove-datapath/--no-remove-datapath",
is_flag=True,
default=None,
help=(
"Remove this entire instance's datapath. If these options and --no-prompt are omitted, "
"you will be asked about this. NOTE: --remove-datapath will override --no-delete-data "
"for JSON instances."
),
)
def delete(
instance: str,
interactive: bool,
delete_data: Optional[bool],
_create_backup: Optional[bool],
drop_db: Optional[bool],
remove_datapath: Optional[bool],
):
"""Removes an instance."""
asyncio.run(
remove_instance(
instance, interactive, delete_data, _create_backup, drop_db, remove_datapath
)
)
@cli.command()
@click.argument("instance", type=click.Choice(instance_list), metavar="<INSTANCE_NAME>")
@click.argument("backend", type=click.Choice(["json", "postgres"]))
def convert(instance, backend):
"""Convert data backend of an instance."""
current_backend = get_current_backend(instance)
target = get_target_backend(backend)
data_manager.load_basic_configuration(instance)
default_dirs = deepcopy(data_manager.basic_config_default)
default_dirs["DATA_PATH"] = str(Path(instance_data[instance]["DATA_PATH"]))
if current_backend == BackendType.MONGOV1:
raise RuntimeError("Please see the 3.2 release notes for upgrading a bot using mongo.")
else:
new_storage_details = asyncio.run(do_migration(current_backend, target))
if new_storage_details is not None:
default_dirs["STORAGE_TYPE"] = target.value
default_dirs["STORAGE_DETAILS"] = new_storage_details
save_config(instance, default_dirs)
conversion_log.info(f"Conversion to {target} complete.")
else:
conversion_log.info(
f"Cannot convert {current_backend.value} to {target.value} at this time."
)
@cli.command()
@click.argument("instance", type=click.Choice(instance_list), metavar="<INSTANCE_NAME>")
@click.argument(
"destination_folder",
type=click.Path(
exists=False, dir_okay=True, file_okay=False, resolve_path=True, writable=True
),
default=Path.home(),
)
def backup(instance: str, destination_folder: Union[str, Path]) -> None:
"""Backup instance's data."""
asyncio.run(create_backup(instance, Path(destination_folder)))
def run_cli():
# Setuptools entry point script stuff...
try:
cli() # pylint: disable=no-value-for-parameter # click
except KeyboardInterrupt:
print("Exiting...")
else:
print("Exiting...")
if __name__ == "__main__":
run_cli()
|
from homeassistant.components.camera import SUPPORT_STREAM
from homeassistant.components.mjpeg.camera import (
CONF_MJPEG_URL,
CONF_STILL_IMAGE_URL,
MjpegCamera,
filter_urllib3_logging,
)
from homeassistant.const import (
CONF_AUTHENTICATION,
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
CONF_USERNAME,
HTTP_DIGEST_AUTHENTICATION,
)
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .axis_base import AxisEntityBase
from .const import DEFAULT_STREAM_PROFILE, DOMAIN as AXIS_DOMAIN
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Axis camera video stream."""
filter_urllib3_logging()
device = hass.data[AXIS_DOMAIN][config_entry.unique_id]
if not device.api.vapix.params.image_format:
return
async_add_entities([AxisCamera(device)])
class AxisCamera(AxisEntityBase, MjpegCamera):
"""Representation of a Axis camera."""
def __init__(self, device):
"""Initialize Axis Communications camera component."""
AxisEntityBase.__init__(self, device)
config = {
CONF_NAME: device.config_entry.data[CONF_NAME],
CONF_USERNAME: device.config_entry.data[CONF_USERNAME],
CONF_PASSWORD: device.config_entry.data[CONF_PASSWORD],
CONF_MJPEG_URL: self.mjpeg_source,
CONF_STILL_IMAGE_URL: self.image_source,
CONF_AUTHENTICATION: HTTP_DIGEST_AUTHENTICATION,
}
MjpegCamera.__init__(self, config)
async def async_added_to_hass(self):
"""Subscribe camera events."""
self.async_on_remove(
async_dispatcher_connect(
self.hass, self.device.signal_new_address, self._new_address
)
)
await super().async_added_to_hass()
@property
def supported_features(self):
"""Return supported features."""
return SUPPORT_STREAM
def _new_address(self):
"""Set new device address for video stream."""
self._mjpeg_url = self.mjpeg_source
self._still_image_url = self.image_source
@property
def unique_id(self):
"""Return a unique identifier for this device."""
return f"{self.device.serial}-camera"
@property
def image_source(self):
"""Return still image URL for device."""
return f"http://{self.device.host}:{self.device.config_entry.data[CONF_PORT]}/axis-cgi/jpg/image.cgi"
@property
def mjpeg_source(self):
"""Return mjpeg URL for device."""
options = ""
if self.device.option_stream_profile != DEFAULT_STREAM_PROFILE:
options = f"?&streamprofile={self.device.option_stream_profile}"
return f"http://{self.device.host}:{self.device.config_entry.data[CONF_PORT]}/axis-cgi/mjpg/video.cgi{options}"
async def stream_source(self):
"""Return the stream source."""
options = ""
if self.device.option_stream_profile != DEFAULT_STREAM_PROFILE:
options = f"&streamprofile={self.device.option_stream_profile}"
return f"rtsp://{self.device.config_entry.data[CONF_USERNAME]}:{self.device.config_entry.data[CONF_PASSWORD]}@{self.device.host}/axis-media/media.amp?videocodec=h264{options}"
|
import pytest
from lemur.tests.vectors import (
SAN_CERT,
SAN_CERT_STR,
INTERMEDIATE_CERT,
ROOTCA_CERT,
EC_CERT_EXAMPLE,
ECDSA_PRIME256V1_CERT,
ECDSA_SECP384r1_CERT,
ECDSA_SECP384r1_CERT_STR,
DSA_CERT,
CERT_CHAIN_PKCS7_PEM
)
def test_get_key_type_from_ec_curve():
from lemur.common.utils import get_key_type_from_ec_curve
assert get_key_type_from_ec_curve("secp256r1") == "ECCPRIME256V1"
def test_generate_private_key():
from lemur.common.utils import generate_private_key
assert generate_private_key("RSA2048")
assert generate_private_key("RSA4096")
assert generate_private_key("ECCPRIME192V1")
assert generate_private_key("ECCPRIME256V1")
assert generate_private_key("ECCSECP192R1")
assert generate_private_key("ECCSECP224R1")
assert generate_private_key("ECCSECP256R1")
assert generate_private_key("ECCSECP384R1")
assert generate_private_key("ECCSECP521R1")
assert generate_private_key("ECCSECP256K1")
assert generate_private_key("ECCSECT163K1")
assert generate_private_key("ECCSECT233K1")
assert generate_private_key("ECCSECT283K1")
assert generate_private_key("ECCSECT409K1")
assert generate_private_key("ECCSECT571K1")
assert generate_private_key("ECCSECT163R2")
assert generate_private_key("ECCSECT233R1")
assert generate_private_key("ECCSECT283R1")
assert generate_private_key("ECCSECT409R1")
assert generate_private_key("ECCSECT571R2")
with pytest.raises(Exception):
generate_private_key("LEMUR")
def test_get_authority_key():
"""test get authority key function"""
from lemur.common.utils import get_authority_key
test_cert = """-----BEGIN CERTIFICATE-----
MIIGYjCCBEqgAwIBAgIUVS7mn6LR5XlQyEGxQ4w9YAWL/XIwDQYJKoZIhvcNAQEN
BQAweTELMAkGA1UEBhMCREUxDTALBgNVBAgTBEJvbm4xEDAOBgNVBAcTB0dlcm1h
bnkxITAfBgNVBAoTGFRlbGVrb20gRGV1dHNjaGxhbmQgR21iSDELMAkGA1UECxMC
UEQxGTAXBgNVBAMTEERldk9wc0xhYiBTdWIgQ0EwHhcNMTcxMTI3MTMwMDAwWhcN
MjAxMTI2MTMwMDAwWjB+MQswCQYDVQQGEwJERTENMAsGA1UECBMEQm9ubjEQMA4G
A1UEBxMHR2VybWFueTEhMB8GA1UEChMYVGVsZWtvbSBEZXV0c2NobGFuZCBHbWJI
MQswCQYDVQQLEwJQRDEeMBwGA1UEAxMVRGV2T3BzTGFiIE9DU1AgU2VydmVyMIIC
IjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAvltiCxxqrlw4/utS4YRspnRR
cusQVesXUKPlxT0GrnofyRpKVVBKmEXo3LE4+5XrzbKbSAL67MInp2/yNM8In66r
iCIvgEwwB1sdQOVXQ4UTHG3o39ATYY9/1YHUly0nKXBg2McwShJcxgh5+eFbl3CD
kr4oTM8mk3YoYK6RqTofV5Hv0zjpXaiL07z2gLVMAtWgZCuxRbUbZBJPhI8qKlVq
vJVFc6vWusbWUFRMK3ozFzCMtrrcCcUGh//XAIT/bb9+aASF4Cj7HBrMZMTZDu1o
uhHxtpEnBoLoc6ikxQvP/kgt0znEusJke76dFygzId5PXY4SWwyetuq+J10HOuEf
Sqr1qLw7r3MJbp2hAoPJXwU60IPlXfmbfiaR+lu0IPDYq6QmoXng4fXzzrgSx1dG
Q+YIHonxa5lHMB/jqguc+nPvsdPJe3SdVul4A9V2wgC/+UFkXM5gm7DJBxhNWQNy
AtVH7JT+j3n+YYydSQFvnUK/ELnYVJ+HFQaflOhXMGVOHGFdMOkcm6u+x3Q1DNcw
ckhh8r2VUtCC9Le8mSUk/2mx6FJuQr6YiPYRSxpDvIpbEhXMKHmweAkmajzHNFTk
6B4v5ZqrEmRyu/3oNcTeZ0Y+Ki8RZDcuG6RsfrX8g4xj0tvW4iyMHJYmibL8Serv
43+EEw4SvmtMmOwXt5cCAwEAAaOB3DCB2TAOBgNVHQ8BAf8EBAMCB4AwEwYDVR0l
BAwwCgYIKwYBBQUHAwkwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQULCXtfXT5zwRE
ktOVHbzueQIcj8EwHwYDVR0jBBgwFoAU/qy1Qb6BdxKTr/pBLY3J9mo+u4AwLAYI
KwYBBQUHAQEEIDAeMBwGCCsGAQUFBzABhhBodHRwOi8vb2NzcDo4MDgwMA8GA1Ud
EQQIMAaCBG9jc3AwJQYDVR0fBB4wHDAaoBigFoYUaHR0cDovL29jc3A6ODA4MC9j
cmwwDQYJKoZIhvcNAQENBQADggIBAAjpqomMtSQE8nDBm4scvR6yoGI1d4fHXcjY
qQfbGgkV5L+5Iaavjk4HpcUokOS36c9oGsOkPmU2tk3nmE51lN+advA8uN9HgZ8b
r+hq1TA+G9IRVjtryA3W6jmQ5Vn2a4H9vjqVhpahGUrQ7ty5Ed3gl5GYXkX/XJba
n7KXnaG4ULB295VTpRmXp1sN7O8nJ/EiyCTVzkX31MwLfBKGggCkF0FZSb3IVEAb
0nzdRO/hZPoinLWh85BddRc942xW4RU3TmEzdXb5gTMVASi3wA+PyQKLmJCd4fPZ
Mq14nKFX3y7qrQh4Bm8CuEWbNAQ+9DBICW6Lp4LAS2bVoQC5T+U5ygeCaF+EdGRR
NfKK+5NvX+A2Jt6GxkMtMlI92em8+oofIZCGN2iRd+QEQHY/mk4vpMi8VPLggGSG
zc75IDsn5wP6A3KflduWW7ri0bYUiKe5higMcbUM0aXzTEAVxsxPk8aEsR9dazF7
031+Oj1unq+Tl4ADNUVEODEJ1Uo6iDEfAmCApajgk7vs8KYJ/hYUrbEoBhDpPoRv
y4L/msew3UjFE3ovDHgStjWM1NBMxuIvJEbWOsiB2WA2l3FiT8HvFi0eX/0hbkGi
5LL+oz7nvm9Of7te/BV6Rq0rXWN4d6asO+QlLkTqbmAH6rwunmPCY7MbLXXtP/qM
KFfxwrO1
-----END CERTIFICATE-----"""
authority_key = get_authority_key(test_cert)
assert authority_key == "feacb541be81771293affa412d8dc9f66a3ebb80"
def test_is_selfsigned(selfsigned_cert):
from lemur.common.utils import is_selfsigned
assert is_selfsigned(selfsigned_cert) is True
assert is_selfsigned(SAN_CERT) is False
assert is_selfsigned(INTERMEDIATE_CERT) is False
# Root CA certificates are also technically self-signed
assert is_selfsigned(ROOTCA_CERT) is True
assert is_selfsigned(EC_CERT_EXAMPLE) is False
# selfsigned certs
assert is_selfsigned(ECDSA_PRIME256V1_CERT) is True
assert is_selfsigned(ECDSA_SECP384r1_CERT) is True
# unsupported algorithm (DSA)
with pytest.raises(Exception):
is_selfsigned(DSA_CERT)
def test_get_key_type_from_certificate():
from lemur.common.utils import get_key_type_from_certificate
assert (get_key_type_from_certificate(SAN_CERT_STR) == "RSA2048")
assert (get_key_type_from_certificate(ECDSA_SECP384r1_CERT_STR) == "ECCSECP384R1")
def test_convert_pkcs7_bytes_to_pem():
from lemur.common.utils import convert_pkcs7_bytes_to_pem
from lemur.common.utils import parse_certificate
cert_chain = convert_pkcs7_bytes_to_pem(CERT_CHAIN_PKCS7_PEM)
assert(len(cert_chain) == 3)
leaf = cert_chain[1]
root = cert_chain[2]
assert(parse_certificate("\n".join(str(root).splitlines())) == ROOTCA_CERT)
assert (parse_certificate("\n".join(str(leaf).splitlines())) == INTERMEDIATE_CERT)
|
import mock
from pytest import raises
from paasta_tools.cli.cmds import performance_check
@mock.patch(
"paasta_tools.cli.cmds.performance_check.validate_service_name", autospec=True
)
@mock.patch("requests.post", autospec=True)
@mock.patch(
"paasta_tools.cli.cmds.performance_check.load_performance_check_config",
autospec=True,
)
def test_submit_performance_check_job_happy(
mock_load_performance_check_config, mock_requests_post, mock_validate_service_name
):
fake_endpoint = "http://foo:1234/submit"
mock_load_performance_check_config.return_value = {
"endpoint": fake_endpoint,
"fake_param": "fake_value",
}
mock_validate_service_name.return_value = True
performance_check.submit_performance_check_job("fake_service", "fake_soa_dir")
mock_requests_post.assert_called_once_with(
url=fake_endpoint, params={"fake_param": "fake_value"}
)
@mock.patch(
"paasta_tools.cli.cmds.performance_check.validate_service_name", autospec=True
)
@mock.patch(
"paasta_tools.cli.cmds.performance_check.submit_performance_check_job",
autospec=True,
)
def test_main_safely_returns_when_exceptions(
mock_submit_performance_check_job, mock_validate_service_name
):
mock_validate_service_name.return_value = True
fake_args = mock.Mock()
fake_args.service = "services-fake_service"
fake_args.soa_dir = "fake_soa_dir"
mock_submit_performance_check_job.side_effect = raises(Exception)
performance_check.perform_performance_check(fake_args)
mock_submit_performance_check_job.assert_called_once_with(
service="fake_service", soa_dir="fake_soa_dir"
)
|
from datetime import timedelta
import logging
from prayer_times_calculator import PrayerTimesCalculator, exceptions
from requests.exceptions import ConnectionError as ConnError
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.event import async_call_later, async_track_point_in_time
import homeassistant.util.dt as dt_util
from .const import (
CALC_METHODS,
CONF_CALC_METHOD,
DATA_UPDATED,
DEFAULT_CALC_METHOD,
DOMAIN,
)
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: {
vol.Optional(CONF_CALC_METHOD, default=DEFAULT_CALC_METHOD): vol.In(
CALC_METHODS
),
}
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Import the Islamic Prayer component from config."""
if DOMAIN in config:
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=config[DOMAIN]
)
)
return True
async def async_setup_entry(hass, config_entry):
"""Set up the Islamic Prayer Component."""
client = IslamicPrayerClient(hass, config_entry)
if not await client.async_setup():
return False
hass.data.setdefault(DOMAIN, client)
return True
async def async_unload_entry(hass, config_entry):
"""Unload Islamic Prayer entry from config_entry."""
if hass.data[DOMAIN].event_unsub:
hass.data[DOMAIN].event_unsub()
hass.data.pop(DOMAIN)
await hass.config_entries.async_forward_entry_unload(config_entry, "sensor")
return True
class IslamicPrayerClient:
"""Islamic Prayer Client Object."""
def __init__(self, hass, config_entry):
"""Initialize the Islamic Prayer client."""
self.hass = hass
self.config_entry = config_entry
self.prayer_times_info = {}
self.available = True
self.event_unsub = None
@property
def calc_method(self):
"""Return the calculation method."""
return self.config_entry.options[CONF_CALC_METHOD]
def get_new_prayer_times(self):
"""Fetch prayer times for today."""
calc = PrayerTimesCalculator(
latitude=self.hass.config.latitude,
longitude=self.hass.config.longitude,
calculation_method=self.calc_method,
date=str(dt_util.now().date()),
)
return calc.fetch_prayer_times()
async def async_schedule_future_update(self):
"""Schedule future update for sensors.
Midnight is a calculated time. The specifics of the calculation
depends on the method of the prayer time calculation. This calculated
midnight is the time at which the time to pray the Isha prayers have
expired.
Calculated Midnight: The Islamic midnight.
Traditional Midnight: 12:00AM
Update logic for prayer times:
If the Calculated Midnight is before the traditional midnight then wait
until the traditional midnight to run the update. This way the day
will have changed over and we don't need to do any fancy calculations.
If the Calculated Midnight is after the traditional midnight, then wait
until after the calculated Midnight. We don't want to update the prayer
times too early or else the timings might be incorrect.
Example:
calculated midnight = 11:23PM (before traditional midnight)
Update time: 12:00AM
calculated midnight = 1:35AM (after traditional midnight)
update time: 1:36AM.
"""
_LOGGER.debug("Scheduling next update for Islamic prayer times")
now = dt_util.utcnow()
midnight_dt = self.prayer_times_info["Midnight"]
if now > dt_util.as_utc(midnight_dt):
next_update_at = midnight_dt + timedelta(days=1, minutes=1)
_LOGGER.debug(
"Midnight is after day the changes so schedule update for after Midnight the next day"
)
else:
_LOGGER.debug(
"Midnight is before the day changes so schedule update for the next start of day"
)
next_update_at = dt_util.start_of_local_day(now + timedelta(days=1))
_LOGGER.info("Next update scheduled for: %s", next_update_at)
self.event_unsub = async_track_point_in_time(
self.hass, self.async_update, next_update_at
)
async def async_update(self, *_):
"""Update sensors with new prayer times."""
try:
prayer_times = await self.hass.async_add_executor_job(
self.get_new_prayer_times
)
self.available = True
except (exceptions.InvalidResponseError, ConnError):
self.available = False
_LOGGER.debug("Error retrieving prayer times")
async_call_later(self.hass, 60, self.async_update)
return
for prayer, time in prayer_times.items():
self.prayer_times_info[prayer] = dt_util.parse_datetime(
f"{dt_util.now().date()} {time}"
)
await self.async_schedule_future_update()
_LOGGER.debug("New prayer times retrieved. Updating sensors")
async_dispatcher_send(self.hass, DATA_UPDATED)
async def async_setup(self):
"""Set up the Islamic prayer client."""
await self.async_add_options()
try:
await self.hass.async_add_executor_job(self.get_new_prayer_times)
except (exceptions.InvalidResponseError, ConnError) as err:
raise ConfigEntryNotReady from err
await self.async_update()
self.config_entry.add_update_listener(self.async_options_updated)
self.hass.async_create_task(
self.hass.config_entries.async_forward_entry_setup(
self.config_entry, "sensor"
)
)
return True
async def async_add_options(self):
"""Add options for entry."""
if not self.config_entry.options:
data = dict(self.config_entry.data)
calc_method = data.pop(CONF_CALC_METHOD, DEFAULT_CALC_METHOD)
self.hass.config_entries.async_update_entry(
self.config_entry, data=data, options={CONF_CALC_METHOD: calc_method}
)
@staticmethod
async def async_options_updated(hass, entry):
"""Triggered by config entry options updates."""
if hass.data[DOMAIN].event_unsub:
hass.data[DOMAIN].event_unsub()
await hass.data[DOMAIN].async_update()
|
from django.utils.translation import gettext_lazy as _
from cms.app_base import CMSApp
class CatalogListCMSApp(CMSApp):
name = _("Catalog List")
def get_urls(self, page=None, language=None, **kwargs):
raise NotImplementedError("{} must implement method `.get_urls()`.".format(self.__class__))
class CatalogSearchApp(CMSApp):
"""
This CMS apphook shall be used to render the list view for generic search results.
These results are just determined by the search query and not influenced by other means,
such as filters and categories. Usually this `Catalog Search` app is attached to a CMS
page named "Search Results". That CMS page must be tagged with the ID: 'shop-search-product'.
"""
name = _("Catalog Search")
def get_urls(self, page=None, language=None, **kwargs):
from django.conf.urls import url
from shop.search.mixins import CatalogSearchViewMixin
from shop.views.catalog import ProductListView
SearchView = type('SearchView', (CatalogSearchViewMixin, ProductListView), {})
return [
url(r'^', SearchView.as_view(
filter_backends=[],
search_fields=['product_name', 'product_code', 'body']
)),
]
class OrderApp(CMSApp):
name = _("View Orders")
cache_placeholders = False
def get_urls(self, page=None, language=None, **kwargs):
from django.conf.urls import url
from shop.views.order import OrderView
return [
url(r'^(?P<slug>[\w-]+)/(?P<secret>[\w-]+)', OrderView.as_view(many=False)), # publicly accessible
url(r'^(?P<slug>[\w-]+)', OrderView.as_view(many=False)), # requires authentication
url(r'^', OrderView.as_view()), # requires authentication
]
class PasswordResetApp(CMSApp):
name = _("Password Reset Confirm")
def get_urls(self, page=None, language=None, **kwargs):
from django.conf.urls import url
from shop.views.auth import PasswordResetConfirmView
return [
url(r'^(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})',
PasswordResetConfirmView.as_view(),
)
]
|
import json
import logging
import os
import sys
import pandas as pd
from datetime import timedelta, datetime
import io
from collections import defaultdict
from ...common.interfaces import AbstractPlugin, AggregateResultListener
logger = logging.getLogger(__name__) # pylint: disable=C0103
def calc_overall_times(overall, quantiles):
cumulative = overall.cumsum()
total = cumulative.max()
positions = cumulative.searchsorted([float(i) / 100 * total for i in quantiles])
all_times = [cumulative.index[i] / 1000. for i in positions]
overall_times = zip(quantiles, all_times)
return overall_times
def calc_duration(first_ts, last_ts):
first_time = timedelta(seconds=first_ts)
last_time = timedelta(seconds=last_ts)
return str(last_time - first_time + timedelta(seconds=1))
def make_resp_json(overall_times, overall_proto_code, overall_net_code, duration, loadscheme, time_start,
autostop_info):
quant = {}
for q, t in overall_times:
quant['q' + str(q)] = t
res = {
"duration": duration,
"time_start": time_start,
"loadscheme": loadscheme,
"quantiles": quant,
"proto_code": overall_proto_code,
"net_code": overall_net_code
}
if autostop_info:
res['autostop_rps'] = autostop_info['rps']
res['autostop_reason'] = autostop_info['reason']
try:
response = json.dumps(res, indent=2, sort_keys=False)
except ValueError as e:
logger.warning('Can\'t convert offline report to json format: %s', e, exc_info=True)
response = None
return response
def make_resp_text(overall_times, overall_proto_code, overall_net_code, duration, loadscheme, time_start,
autostop_info):
res = ['Duration: {:>8}\n'.format(duration)]
res.append('Loadscheme: {}\n'.format(loadscheme))
if autostop_info:
res.append('Autostop rps: {}\n'.format(autostop_info['rps']))
res.append('Autostop reason: {}\n'.format(autostop_info['reason']))
res.append('Start time: {}\n'.format(time_start))
res.append('Percentiles all ms:\n')
for q, t in overall_times:
res.append('{:>5}% < {:>5}\n'.format(q, t))
res.append('HTTP codes(code/count):\n')
for q, t in overall_proto_code.items():
res.append('{:>5}: {:<7}\n'.format(q, t))
res.append('Net codes(code/count):\n')
for q, t in overall_net_code.items():
res.append('{:>5}: {:<7}\n'.format(q, t))
return ''.join(res)
class Plugin(AbstractPlugin, AggregateResultListener):
# pylint:disable=R0902
SECTION = 'offline_report'
def __init__(self, core, cfg, name):
super(Plugin, self).__init__(core, cfg, name)
try:
self.data_and_stats_stream = io.open(os.path.join(self.core.artifacts_dir,
self.get_option('offline_data_log')),
mode='w')
self.add_cleanup(lambda: self.data_and_stats_stream.close())
self.overall_json_stream = io.open(os.path.join(self.core.artifacts_dir,
self.get_option('offline_json_report')),
mode='w')
self.add_cleanup(lambda: self.overall_json_stream.close())
self.overall_text_stream = io.open(os.path.join(self.core.artifacts_dir,
self.get_option('offline_text_report')),
mode='w')
self.add_cleanup(lambda: self.overall_text_stream.close())
except Exception:
logging.exception('Failed to open file', exc_info=True)
raise OSError('Error opening OfflineReport log file')
self.overall = None
self.overall_proto_code = defaultdict(int)
self.overall_net_code = defaultdict(int)
self.quantiles = [10, 20, 30, 40, 50, 60, 70, 75, 80, 85, 90, 95, 99, 99.5, 100]
self.print_report = self.get_option("print_report")
self.first_ts = None
self.last_ts = None
self.stats = None
def get_available_options(self):
return ['offline_data_log', 'offline_json_report', 'offline_text_report', 'print_report']
def configure(self):
self.core.job.subscribe_plugin(self)
def prepare_test(self):
self.data_and_stats_stream.write('[')
def on_aggregated_data(self, data, stats):
"""
@data: aggregated data
@stats: stats about gun
"""
last_proto_code = data['overall']['proto_code']['count']
for code, count in last_proto_code.items():
self.overall_proto_code[code] += count
last_net_code = data['overall']['net_code']['count']
for code, count in last_net_code.items():
self.overall_net_code[code] += count
self.data_and_stats_stream.write(
'%s,\n' % json.dumps({
'ts': stats['ts'],
'instances': stats['metrics']['instances'],
'reqps': stats['metrics']['reqps'],
'quantiles': {
"q50": int(data['overall']['interval_real']['q']['value'][0]),
"q75": int(data['overall']['interval_real']['q']['value'][1]),
"q80": int(data['overall']['interval_real']['q']['value'][2]),
"q85": int(data['overall']['interval_real']['q']['value'][3]),
"q90": int(data['overall']['interval_real']['q']['value'][4]),
"q95": int(data['overall']['interval_real']['q']['value'][5]),
"q98": int(data['overall']['interval_real']['q']['value'][6]),
"q99": int(data['overall']['interval_real']['q']['value'][7]),
"q100": int(data['overall']['interval_real']['q']['value'][8]),
},
'proto_code': last_proto_code,
'net_code': data['overall']['net_code']['count']
}))
incoming_hist = data['overall']['interval_real']['hist']
dist = pd.Series(incoming_hist['data'], index=incoming_hist['bins'])
if self.overall is None:
self.overall = dist
else:
self.overall = self.overall.add(dist, fill_value=0)
if self.first_ts is None:
self.first_ts = stats['ts']
else:
self.last_ts = stats['ts']
def post_process(self, retcode):
try:
self.data_and_stats_stream.seek(self.data_and_stats_stream.tell() - 2, os.SEEK_SET)
self.data_and_stats_stream.write(']')
except ValueError as e:
logger.error('Can\'t write offline report %s', e)
overall_times = calc_overall_times(self.overall, self.quantiles)
stepper_info = self.core.info.get_value(['stepper'])
try:
duration = str(timedelta(seconds=stepper_info['duration']))
except (KeyError, TypeError) as e:
logger.error('Can\'t get test duration %s', e)
duration = calc_duration(self.first_ts, self.last_ts)
try:
loadscheme = ' '.join(stepper_info['loadscheme'])
except (KeyError, TypeError) as e:
logger.error('Can\'t get test loadscheme %s', e)
loadscheme = None
generator_info = self.core.info.get_value(['generator'])
try:
time_start = datetime.fromtimestamp(generator_info['time_start']).strftime("%Y-%m-%d %H:%M:%S")
except (KeyError, TypeError) as e:
logger.error('Can\'t get test start time %s', e)
time_start = datetime.fromtimestamp(self.first_ts).strftime("%Y-%m-%d %H:%M:%S")
try:
autostop_info = self.core.info.get_value(['autostop'])
except (KeyError, TypeError) as e:
logger.error('Can\'t get autostop info %s', e)
autostop_info = None
resp_json = make_resp_json(
overall_times,
self.overall_proto_code,
self.overall_net_code,
duration,
loadscheme,
time_start,
autostop_info
)
if resp_json is not None:
self.overall_json_stream.write('%s' % resp_json)
resp_text = make_resp_text(
overall_times,
self.overall_proto_code,
self.overall_net_code,
duration,
loadscheme,
time_start,
autostop_info
)
self.overall_text_stream.write('%s' % resp_text)
if self.print_report:
sys.stdout.write(resp_text)
return retcode
|
from functools import partial
from ...utils import verbose, get_config
from ..utils import (has_dataset, _data_path, _data_path_doc,
_get_version, _version_doc)
has_testing_data = partial(has_dataset, name='testing')
@verbose
def data_path(path=None, force_update=False, update_path=True,
download=True, verbose=None): # noqa: D103
# Make sure we don't do something stupid
if download and \
get_config('MNE_SKIP_TESTING_DATASET_TESTS', 'false') == 'true':
raise RuntimeError('Cannot download data if skipping is forced')
return _data_path(path=path, force_update=force_update,
update_path=update_path, name='testing',
download=download)
data_path.__doc__ = _data_path_doc.format(name='testing',
conf='MNE_DATASETS_TESTING_PATH')
def get_version(): # noqa: D103
return _get_version('testing')
get_version.__doc__ = _version_doc.format(name='testing')
# Allow forcing of testing dataset skip (for Debian tests) using:
# `make test-no-testing-data`
def _skip_testing_data():
skip_testing = (get_config('MNE_SKIP_TESTING_DATASET_TESTS', 'false') ==
'true')
skip = skip_testing or not has_testing_data()
return skip
def requires_testing_data(func):
"""Skip testing data test."""
return _pytest_mark()(func)
def _pytest_param(*args, **kwargs):
if len(args) == len(kwargs) == 0:
args = ('testing_data',)
import pytest
# turn anything that uses testing data into an auto-skipper by
# setting params=[testing._pytest_param()], or by parametrizing functions
# with testing._pytest_param(whatever)
return pytest.param(*args, **kwargs, marks=_pytest_mark())
def _pytest_mark():
import pytest
return pytest.mark.skipif(
_skip_testing_data(), reason='Requires testing dataset')
|
import unittest
from unit_tests.myStringIO import StringIO
class TestTrashPutIssueMessage(unittest.TestCase):
def setUp(self):
self.out = StringIO()
def test_trash_put_last_line(self):
from trashcli.put import TrashPutCmd
cmd = TrashPutCmd(self.out,
StringIO(),
None,
None,
None,
None,
None,
None,
None)
cmd.run(['', '--help'])
self.assert_last_line_of_output_is(
'Report bugs to https://github.com/andreafrancia/trash-cli/issues')
def test_trash_empty_last_line(self):
from trashcli.empty import EmptyCmd
from trashcli.fs import FileSystemReader
cmd = EmptyCmd(self.out, StringIO(), [], lambda:[],
now = None,
file_reader = FileSystemReader(),
getuid = None,
file_remover = None,
version = None,
)
cmd.run('', '--help')
self.assert_last_line_of_output_is(
'Report bugs to https://github.com/andreafrancia/trash-cli/issues')
def test_trash_list_last_line(self):
from trashcli.list import ListCmd
cmd = ListCmd(self.out, None, None, None, None)
cmd.run('', '--help')
self.assert_last_line_of_output_is(
'Report bugs to https://github.com/andreafrancia/trash-cli/issues')
def assert_last_line_of_output_is(self, expected):
output = self.out.getvalue()
if len(output.splitlines()) > 0:
last_line = output.splitlines()[-1]
else:
last_line = ''
assert expected == last_line, ('Last line of output should be:\n\n%s\n\n' % expected +
'but the output is\n\n%s' % output)
|
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_GAS,
DEVICE_CLASS_MOISTURE,
DEVICE_CLASS_OPENING,
DEVICE_CLASS_PROBLEM,
DEVICE_CLASS_SMOKE,
DEVICE_CLASS_VIBRATION,
BinarySensorEntity,
)
from .entity import (
BlockAttributeDescription,
ShellyBlockAttributeEntity,
async_setup_entry_attribute_entities,
)
SENSORS = {
("device", "overtemp"): BlockAttributeDescription(
name="Overheating", device_class=DEVICE_CLASS_PROBLEM
),
("device", "overpower"): BlockAttributeDescription(
name="Overpowering", device_class=DEVICE_CLASS_PROBLEM
),
("light", "overpower"): BlockAttributeDescription(
name="Overpowering", device_class=DEVICE_CLASS_PROBLEM
),
("relay", "overpower"): BlockAttributeDescription(
name="Overpowering", device_class=DEVICE_CLASS_PROBLEM
),
("sensor", "dwIsOpened"): BlockAttributeDescription(
name="Door", device_class=DEVICE_CLASS_OPENING
),
("sensor", "flood"): BlockAttributeDescription(
name="Flood", device_class=DEVICE_CLASS_MOISTURE
),
("sensor", "gas"): BlockAttributeDescription(
name="Gas",
device_class=DEVICE_CLASS_GAS,
value=lambda value: value in ["mild", "heavy"],
device_state_attributes=lambda block: {"detected": block.gas},
),
("sensor", "smoke"): BlockAttributeDescription(
name="Smoke", device_class=DEVICE_CLASS_SMOKE
),
("sensor", "vibration"): BlockAttributeDescription(
name="Vibration", device_class=DEVICE_CLASS_VIBRATION
),
}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up sensors for device."""
await async_setup_entry_attribute_entities(
hass, config_entry, async_add_entities, SENSORS, ShellyBinarySensor
)
class ShellyBinarySensor(ShellyBlockAttributeEntity, BinarySensorEntity):
"""Shelly binary sensor entity."""
@property
def is_on(self):
"""Return true if sensor state is on."""
return bool(self.attribute_value)
|
import logging
import os
import socket
from docopt import docopt
import tinychain as t
logging.basicConfig(
level=getattr(logging, os.environ.get('TC_LOG_LEVEL', 'INFO')),
format='[%(asctime)s][%(module)s:%(lineno)d] %(levelname)s %(message)s')
logger = logging.getLogger(__name__)
def main(args):
args['signing_key'], args['verifying_key'], args['my_addr'] = (
t.init_wallet(args.get('--wallet')))
if args['--port']:
send_msg.port = args['--port']
if args['--node']:
send_msg.node_hostname = args['--node']
if args['balance']:
get_balance(args)
elif args['send']:
send_value(args)
elif args['status']:
txn_status(args)
def get_balance(args):
"""
Get the balance of a given address.
"""
val = sum(i.value for i in find_utxos_for_address(args))
print(val if args['--raw'] else f"{val / t.Params.BELUSHIS_PER_COIN} ⛼ ")
def txn_status(args):
"""
Get the status of a transaction.
Prints [status],[containing block_id],[height mined]
"""
txid = args['<txid>']
as_csv = args['--csv']
mempool = send_msg(t.GetMempoolMsg())
if txid in mempool:
print(f'{txid}:in_mempool,,' if as_csv else 'Found in mempool')
return
chain = send_msg(t.GetActiveChainMsg())
for tx, block, height in t.txn_iterator(chain):
if tx.id == txid:
print(
f'{txid}:mined,{block.id},{height}' if as_csv else
f'Mined in {block.id} at height {height}')
return
print(f'{txid}:not_found,,' if as_csv else 'Not found')
def send_value(args: dict):
"""
Send value to some address.
"""
val, to_addr, sk = int(args['<val>']), args['<addr>'], args['signing_key']
selected = set()
my_coins = list(sorted(
find_utxos_for_address(args), key=lambda i: (i.value, i.height)))
for coin in my_coins:
selected.add(coin)
if sum(i.value for i in selected) > val:
break
txout = t.TxOut(value=val, to_address=to_addr)
txn = t.Transaction(
txins=[make_txin(sk, coin.outpoint, txout) for coin in selected],
txouts=[txout])
logger.info(f'built txn {txn}')
logger.info(f'broadcasting txn {txn.id}')
send_msg(txn)
def send_msg(data: bytes, node_hostname=None, port=None):
node_hostname = getattr(send_msg, 'node_hostname', 'localhost')
port = getattr(send_msg, 'port', 9999)
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.connect((node_hostname, port))
s.sendall(t.encode_socket_data(data))
return t.read_all_from_socket(s)
def find_utxos_for_address(args: dict):
utxo_set = dict(send_msg(t.GetUTXOsMsg()))
return [u for u in utxo_set.values() if u.to_address == args['my_addr']]
def make_txin(signing_key, outpoint: t.OutPoint, txout: t.TxOut) -> t.TxIn:
sequence = 0
pk = signing_key.verifying_key.to_string()
spend_msg = t.build_spend_message(outpoint, pk, sequence, [txout])
return t.TxIn(
to_spend=outpoint, unlock_pk=pk,
unlock_sig=signing_key.sign(spend_msg), sequence=sequence)
if __name__ == '__main__':
main(docopt(__doc__, version='tinychain client 0.1'))
|
import warnings
import numpy as np
import six
from .._externals.ppca import PPCA
from .._shared.helpers import get_type
def format_data(x, vectorizer='CountVectorizer',
semantic='LatentDirichletAllocation', corpus='wiki', ppca=True, text_align='hyper'):
"""
Formats data into a list of numpy arrays
This function is useful to identify rows of your array that contain missing
data or nans. The returned indices can be used to remove the rows with
missing data, or label the missing data points that are interpolated
using PPCA.
Parameters
----------
x : numpy array, dataframe, string or (mixed) list
The data to convert
vectorizer : str, dict, class or class instance
The vectorizer to use. Built-in options are 'CountVectorizer' or
'TfidfVectorizer'. To change default parameters, set to a dictionary
e.g. {'model' : 'CountVectorizer', 'params' : {'max_features' : 10}}. See
http://scikit-learn.org/stable/modules/classes.html#module-sklearn.feature_extraction.text
for details. You can also specify your own vectorizer model as a class,
or class instance. With either option, the class must have a
fit_transform method (see here: http://scikit-learn.org/stable/data_transforms.html).
If a class, pass any parameters as a dictionary to vectorizer_params. If
a class instance, no parameters can be passed.
semantic : str, dict, class or class instance
Text model to use to transform text data. Built-in options are
'LatentDirichletAllocation' or 'NMF' (default: LDA). To change default
parameters, set to a dictionary e.g. {'model' : 'NMF', 'params' :
{'n_components' : 10}}. See
http://scikit-learn.org/stable/modules/classes.html#module-sklearn.decomposition
for details on the two model options. You can also specify your own
text model as a class, or class instance. With either option, the class
must have a fit_transform method (see here:
http://scikit-learn.org/stable/data_transforms.html).
If a class, pass any parameters as a dictionary to text_params. If
a class instance, no parameters can be passed.
corpus : list (or list of lists) of text samples or 'wiki', 'nips', 'sotus'.
Text to use to fit the semantic model (optional). If set to 'wiki', 'nips'
or 'sotus' and the default semantic and vectorizer models are used, a
pretrained model will be loaded which can save a lot of time.
ppca : bool
Performs PPCA to fill in missing values (default: True)
text_align : str
Alignment algorithm to use when both text and numerical data are passed.
If numerical arrays have the same shape, and the text data contains the
same number of samples, the text and numerical data are automatically
aligned to a common space. Example use case: an array of movie frames
(frames by pixels) and text descriptions of the frame. In this case,
the movie and text will be automatically aligned to the same space
(default: hyperalignment).
Returns
----------
data : list of numpy arrays
A list of formatted arrays
"""
# not sure why i needed to import here, but its the only way I could get it to work
from .df2mat import df2mat
from .text2mat import text2mat
from ..datageometry import DataGeometry
# if x is not a list, make it one
if type(x) is not list:
x = [x]
if all([isinstance(xi, six.string_types) for xi in x]):
x = [x]
# check data type for each element in list
dtypes = list(map(get_type, x))
# handle text data:
if any(map(lambda x: x in ['list_str', 'str', 'arr_str'], dtypes)):
# default text args
text_args = {
'vectorizer' : vectorizer,
'semantic' : semantic,
'corpus' : corpus
}
# filter text data
text_data = []
for i,j in zip(x, dtypes):
if j in ['list_str', 'str', 'arr_str']:
text_data.append(np.array(i).reshape(-1, 1))
# convert text to numerical matrices
text_data = text2mat(text_data, **text_args)
# replace the text data with transformed data
processed_x = []
textidx=0
for i, dtype in enumerate(dtypes):
if dtype in ['list_str', 'str', 'arr_str']:
processed_x.append(text_data[textidx])
textidx+=1
elif dtype == 'df':
processed_x.append(df2mat(x[i]))
elif dtype == 'geo':
text_args = {
'vectorizer' : vectorizer,
'semantic' : semantic,
'corpus' : corpus
}
for j in format_data(x[i].get_data(), **text_args):
processed_x.append(j)
else:
processed_x.append(x[i])
# reshape anything that is 1d
if any([i.ndim<=1 for i in processed_x]):
processed_x = [np.reshape(i,(i.shape[0],1)) if i.ndim==1 else i for i in processed_x]
contains_text = any([dtype in ['list_str', 'str', 'arr_str'] for dtype in dtypes])
contains_num = any([dtype in ['list_num', 'array', 'df', 'arr_num'] for dtype in dtypes])
# if there are any nans in any of the lists, use ppca
if ppca is True:
if contains_num:
num_data = []
for i,j in zip(processed_x, dtypes):
if j in ['list_num', 'array', 'df', 'arr_num']:
num_data.append(i)
if np.isnan(np.vstack(num_data)).any():
warnings.warn('Missing data: Inexact solution computed with PPCA (see https://github.com/allentran/pca-magic for details)')
num_data = fill_missing(num_data)
x_temp = []
for dtype in dtypes:
if dtype in ['list_str', 'str', 'arr_str']:
x_temp.append(text_data.pop(0))
elif dtype in ['list_num', 'array', 'df', 'arr_num']:
x_temp.append(num_data.pop(0))
processed_x = x_temp
# if input data contains both text and numerical data
if contains_num and contains_text:
# and if they have the same number of samples
if np.unique(np.array([i.shape[0] for i, j in zip(processed_x, dtypes)])).shape[0] == 1:
from .align import align as aligner
# align the data
warnings.warn('Numerical and text data with same number of '
'samples detected. Aligning data to a common space.')
processed_x = aligner(processed_x, align=text_align, format_data=False)
return processed_x
def fill_missing(x):
# ppca if missing data
m = PPCA()
m.fit(data=np.vstack(x))
x_pca = m.transform()
# if the whole row is missing, return nans
all_missing = [idx for idx, a in enumerate(np.vstack(x)) if all([type(b)==np.nan for b in a])]
if len(all_missing)>0:
for i in all_missing:
x_pca[i, :] = np.nan
# get the original lists back
if len(x)>1:
x_split = np.cumsum([i.shape[0] for i in x][:-1])
return list(np.split(x_pca, x_split, axis=0))
else:
return [x_pca]
|
import numpy as np
import pytest
from tensornetwork.block_sparse.charge import (U1Charge, charge_equal,
BaseCharge)
from tensornetwork.block_sparse.index import Index
from tensornetwork.block_sparse.blocksparsetensor import BlockSparseTensor
from tensornetwork.block_sparse.initialization import (zeros, ones, randn,
random, ones_like,
zeros_like, empty_like,
randn_like, random_like)
np_dtypes = [np.float64, np.complex128]
@pytest.mark.parametrize('dtype', np_dtypes)
@pytest.mark.parametrize('num_charges', [1, 2, 3])
def test_tn_zeros(dtype, num_charges):
np.random.seed(10)
Ds = [8, 9, 10, 11]
rank = 4
flows = np.random.choice([True, False], size=rank, replace=True)
indices = [
Index(
BaseCharge(
np.random.randint(-5, 6, (Ds[n], num_charges)),
charge_types=[U1Charge] * num_charges), flows[n])
for n in range(rank)
]
arr = zeros(indices, dtype=dtype)
np.testing.assert_allclose(arr.data, 0)
np.testing.assert_allclose(Ds, arr.shape)
np.testing.assert_allclose(arr.flat_flows, flows)
for n in range(4):
assert charge_equal(arr.charges[n][0], indices[n].flat_charges[0])
@pytest.mark.parametrize('dtype', np_dtypes)
@pytest.mark.parametrize('num_charges', [1, 2, 3])
def test_tn_ones(dtype, num_charges):
np.random.seed(10)
Ds = [8, 9, 10, 11]
rank = 4
flows = np.random.choice([True, False], size=rank, replace=True)
indices = [
Index(
BaseCharge(
np.random.randint(-5, 6, (Ds[n], num_charges)),
charge_types=[U1Charge] * num_charges), flows[n])
for n in range(rank)
]
arr = ones(indices, dtype=dtype)
np.testing.assert_allclose(arr.data, 1)
np.testing.assert_allclose(Ds, arr.shape)
np.testing.assert_allclose(arr.flat_flows, flows)
for n in range(4):
assert charge_equal(arr.charges[n][0], indices[n].flat_charges[0])
@pytest.mark.parametrize('dtype', np_dtypes)
@pytest.mark.parametrize('num_charges', [1, 2, 3])
def test_tn_random(dtype, num_charges):
np.random.seed(10)
Ds = [8, 9, 10, 11]
rank = 4
flows = np.random.choice([True, False], size=rank, replace=True)
indices = [
Index(
BaseCharge(
np.random.randint(-5, 6, (Ds[n], num_charges)),
charge_types=[U1Charge] * num_charges), flows[n])
for n in range(rank)
]
arr = random(indices, dtype=dtype)
np.testing.assert_allclose(Ds, arr.shape)
np.testing.assert_allclose(arr.flat_flows, flows)
for n in range(4):
assert charge_equal(arr.charges[n][0], indices[n].flat_charges[0])
@pytest.mark.parametrize('dtype', np_dtypes)
@pytest.mark.parametrize('num_charges', [1, 2, 3])
def test_tn_randn(dtype, num_charges):
np.random.seed(10)
Ds = [8, 9, 10, 11]
rank = 4
flows = np.random.choice([True, False], size=rank, replace=True)
indices = [
Index(
BaseCharge(
np.random.randint(-5, 6, (Ds[n], num_charges)),
charge_types=[U1Charge] * num_charges), flows[n])
for n in range(rank)
]
arr = randn(indices, dtype=dtype)
np.testing.assert_allclose(Ds, arr.shape)
np.testing.assert_allclose(arr.flat_flows, flows)
for n in range(4):
assert charge_equal(arr.charges[n][0], indices[n].flat_charges[0])
@pytest.mark.parametrize('dtype', np_dtypes)
@pytest.mark.parametrize('num_charges', [1, 2, 3])
@pytest.mark.parametrize('fun, val', [(ones_like, 1), (zeros_like, 0),
(empty_like, None), (randn_like, None),
(random_like, None)])
def test_like_init(fun, val, dtype, num_charges):
np.random.seed(10)
Ds = [8, 9, 10, 11]
rank = 4
flows = np.random.choice([True, False], size=rank, replace=True)
indices = [
Index(
BaseCharge(
np.random.randint(-5, 6, (Ds[n], num_charges)),
charge_types=[U1Charge] * num_charges), flows[n])
for n in range(rank)
]
arr = randn(indices, dtype=dtype)
arr2 = fun(arr)
assert arr.dtype == arr2.dtype
np.testing.assert_allclose(arr.shape, arr2.shape)
np.testing.assert_allclose(arr.flat_flows, arr2.flat_flows)
for n in range(4):
assert charge_equal(arr.charges[n][0], arr2.charges[n][0])
if val is not None:
np.testing.assert_allclose(arr2.data, val)
|
import psutil
import time
import netifaces
class NetIOCounters(object):
def __init__(self, pernic=True):
self.last_req = None
self.last_req_time = None
self.pernic = pernic
def _get_net_io_counters(self):
"""
Fetch io counters from psutil and transform it to
dicts with the additional attributes defaulted
"""
counters = psutil.net_io_counters(pernic=self.pernic)
res = {}
for name, io in counters.iteritems():
res[name] = io._asdict()
res[name].update({'tx_per_sec': 0, 'rx_per_sec': 0})
return res
def _set_last_request(self, counters):
self.last_req = counters
self.last_req_time = time.time()
def get(self):
return self.last_req
def update(self):
counters = self._get_net_io_counters()
if not self.last_req:
self._set_last_request(counters)
return counters
time_delta = time.time() - self.last_req_time
if not time_delta:
return counters
for name, io in counters.iteritems():
last_io = self.last_req.get(name)
if not last_io:
continue
counters[name].update({
'rx_per_sec': (io['bytes_recv'] - last_io['bytes_recv']) / time_delta,
'tx_per_sec': (io['bytes_sent'] - last_io['bytes_sent']) / time_delta
})
self._set_last_request(counters)
return counters
def get_interface_addresses():
"""
Get addresses of available network interfaces.
See netifaces on pypi for details.
Returns a list of dicts
"""
addresses = []
ifaces = netifaces.interfaces()
for iface in ifaces:
addrs = netifaces.ifaddresses(iface)
families = addrs.keys()
# put IPv4 to the end so it lists as the main iface address
if netifaces.AF_INET in families:
families.remove(netifaces.AF_INET)
families.append(netifaces.AF_INET)
for family in families:
for addr in addrs[family]:
address = {
'name': iface,
'family': family,
'ip': addr['addr'],
}
addresses.append(address)
return addresses
|
import logging
import multiprocessing
import os
import signal
import sys
import time
try:
from setproctitle import getproctitle, setproctitle
except ImportError:
setproctitle = None
# Path Fix
sys.path.append(
os.path.abspath(
os.path.join(
os.path.dirname(__file__), "../")))
from diamond.utils.classes import initialize_collector
from diamond.utils.classes import load_collectors
from diamond.utils.classes import load_dynamic_class
from diamond.utils.classes import load_handlers
from diamond.utils.classes import load_include_path
from diamond.utils.config import load_config
from diamond.utils.config import str_to_bool
from diamond.utils.scheduler import collector_process
from diamond.utils.scheduler import handler_process
from diamond.handler.Handler import Handler
from diamond.utils.signals import signal_to_exception
from diamond.utils.signals import SIGHUPException
class Server(object):
"""
Server class loads and starts Handlers and Collectors
"""
def __init__(self, configfile):
# Initialize Logging
self.log = logging.getLogger('diamond')
# Initialize Members
self.configfile = configfile
self.config = None
self.handlers = []
self.handler_queue = []
self.modules = {}
self.metric_queue = None
# We do this weird process title swap around to get the sync manager
# title correct for ps
if setproctitle:
oldproctitle = getproctitle()
setproctitle('%s - SyncManager' % getproctitle())
self.manager = multiprocessing.Manager()
if setproctitle:
setproctitle(oldproctitle)
def run(self):
"""
Load handler and collector classes and then start collectors
"""
#######################################################################
# Config
#######################################################################
self.config = load_config(self.configfile)
collectors = load_collectors(self.config['server']['collectors_path'])
metric_queue_size = int(self.config['server'].get('metric_queue_size',
16384))
self.metric_queue = self.manager.Queue(maxsize=metric_queue_size)
self.log.debug('metric_queue_size: %d', metric_queue_size)
#######################################################################
# Handlers
#
# TODO: Eventually move each handler to it's own process space?
#######################################################################
if 'handlers_path' in self.config['server']:
handlers_path = self.config['server']['handlers_path']
# Make an list if not one
if isinstance(handlers_path, basestring):
handlers_path = handlers_path.split(',')
handlers_path = map(str.strip, handlers_path)
self.config['server']['handlers_path'] = handlers_path
load_include_path(handlers_path)
if 'handlers' not in self.config['server']:
self.log.critical('handlers missing from server section in config')
sys.exit(1)
handlers = self.config['server'].get('handlers')
if isinstance(handlers, basestring):
handlers = [handlers]
# Prevent the Queue Handler from being a normal handler
if 'diamond.handler.queue.QueueHandler' in handlers:
handlers.remove('diamond.handler.queue.QueueHandler')
self.handlers = load_handlers(self.config, handlers)
QueueHandler = load_dynamic_class(
'diamond.handler.queue.QueueHandler',
Handler
)
self.handler_queue = QueueHandler(
config=self.config, queue=self.metric_queue, log=self.log)
handlers_process = multiprocessing.Process(
name="Handlers",
target=handler_process,
args=(self.handlers, self.metric_queue, self.log),
)
handlers_process.daemon = True
handlers_process.start()
#######################################################################
# Signals
#######################################################################
if hasattr(signal, 'SIGHUP'):
signal.signal(signal.SIGHUP, signal_to_exception)
#######################################################################
while True:
try:
active_children = multiprocessing.active_children()
running_processes = []
for process in active_children:
running_processes.append(process.name)
running_processes = set(running_processes)
##############################################################
# Collectors
##############################################################
running_collectors = []
for collector, config in self.config['collectors'].iteritems():
if config.get('enabled', False) is not True:
continue
running_collectors.append(collector)
running_collectors = set(running_collectors)
# Collectors that are running but shouldn't be
for process_name in running_processes - running_collectors:
if 'Collector' not in process_name:
continue
for process in active_children:
if process.name == process_name:
process.terminate()
collector_classes = dict(
(cls.__name__.split('.')[-1], cls)
for cls in collectors.values()
)
load_delay = self.config['server'].get('collectors_load_delay',
1.0)
for process_name in running_collectors - running_processes:
# To handle running multiple collectors concurrently, we
# split on white space and use the first word as the
# collector name to spin
collector_name = process_name.split()[0]
if 'Collector' not in collector_name:
continue
if collector_name not in collector_classes:
self.log.error('Can not find collector %s',
collector_name)
continue
collector = initialize_collector(
collector_classes[collector_name],
name=process_name,
configfile=self.configfile,
handlers=[self.handler_queue])
if collector is None:
self.log.error('Failed to load collector %s',
process_name)
continue
# Splay the loads
time.sleep(float(load_delay))
process = multiprocessing.Process(
name=process_name,
target=collector_process,
args=(collector, self.metric_queue, self.log)
)
process.daemon = True
process.start()
if not handlers_process.is_alive():
self.log.error('Handlers process exited')
if (str_to_bool(self.config['server'].get(
'abort_on_handlers_process_exit', 'False'))):
raise Exception('Handlers process exited')
##############################################################
time.sleep(1)
except SIGHUPException:
# ignore further SIGHUPs for now
original_sighup_handler = signal.getsignal(signal.SIGHUP)
signal.signal(signal.SIGHUP, signal.SIG_IGN)
self.log.info('Reloading state due to HUP')
self.config = load_config(self.configfile)
collectors = load_collectors(
self.config['server']['collectors_path'])
# restore SIGHUP handler
signal.signal(signal.SIGHUP, original_sighup_handler)
|
import json
import logging
from absl import flags
from perfkitbenchmarker import errors
from perfkitbenchmarker import managed_memory_store
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.providers import gcp
from perfkitbenchmarker.providers.gcp import util
FLAGS = flags.FLAGS
STANDARD_TIER = 'STANDARD'
BASIC_TIER = 'BASIC'
COMMAND_TIMEOUT = 600 # 10 minutes
class CloudRedis(managed_memory_store.BaseManagedMemoryStore):
"""Object representing a GCP cloud redis instance."""
CLOUD = gcp.CLOUD
MEMORY_STORE = managed_memory_store.REDIS
def __init__(self, spec):
super(CloudRedis, self).__init__(spec)
self.project = FLAGS.project
self.size = FLAGS.gcp_redis_gb
self.redis_region = FLAGS.redis_region
self.redis_version = spec.config.cloud_redis.redis_version
self.failover_style = FLAGS.redis_failover_style
if self.failover_style == managed_memory_store.Failover.FAILOVER_NONE:
self.tier = BASIC_TIER
elif self.failover_style == managed_memory_store.Failover.FAILOVER_SAME_REGION:
self.tier = STANDARD_TIER
@staticmethod
def CheckPrerequisites(benchmark_config):
if FLAGS.redis_failover_style == managed_memory_store.Failover.FAILOVER_SAME_ZONE:
raise errors.Config.InvalidValue(
'GCP cloud redis does not support same zone failover')
if (FLAGS.managed_memory_store_version and
FLAGS.managed_memory_store_version not in
managed_memory_store.REDIS_VERSIONS):
raise errors.Config.InvalidValue('Invalid Redis version.')
def GetResourceMetadata(self):
"""Returns a dict containing metadata about the instance.
Returns:
dict mapping string property key to value.
"""
result = {
'cloud_redis_failover_style': self.failover_style,
'cloud_redis_size': self.size,
'cloud_redis_tier': self.tier,
'cloud_redis_region': self.redis_region,
'cloud_redis_version': self.redis_version,
}
return result
def _Create(self):
"""Creates the instance."""
cmd = util.GcloudCommand(self, 'redis', 'instances', 'create',
self.name)
cmd.flags['region'] = self.redis_region
cmd.flags['zone'] = FLAGS.zones[0]
cmd.flags['network'] = FLAGS.gce_network_name
cmd.flags['tier'] = self.tier
cmd.flags['size'] = self.size
cmd.flags['redis-version'] = self.redis_version
cmd.flags['labels'] = util.MakeFormattedDefaultTags()
cmd.Issue(timeout=COMMAND_TIMEOUT)
def _IsReady(self):
"""Returns whether cluster is ready."""
instance_details, _, _ = self.DescribeInstance()
return json.loads(instance_details).get('state') == 'READY'
def _Delete(self):
"""Deletes the instance."""
cmd = util.GcloudCommand(self, 'redis', 'instances', 'delete',
self.name)
cmd.flags['region'] = self.redis_region
cmd.Issue(timeout=COMMAND_TIMEOUT, raise_on_failure=False)
def _Exists(self):
"""Returns true if the instance exists."""
_, _, retcode = self.DescribeInstance()
return retcode == 0
def DescribeInstance(self):
"""Calls describe instance using the gcloud tool.
Returns:
stdout, stderr, and retcode.
"""
cmd = util.GcloudCommand(self, 'redis', 'instances', 'describe',
self.name)
cmd.flags['region'] = self.redis_region
stdout, stderr, retcode = cmd.Issue(
suppress_warning=True, raise_on_failure=False)
if retcode != 0:
logging.info('Could not find redis instance %s', self.name)
return stdout, stderr, retcode
@vm_util.Retry(max_retries=5)
def _PopulateEndpoint(self):
"""Populates endpoint information about the instance.
Raises:
errors.Resource.RetryableGetError:
Failed to retrieve information on instance
"""
stdout, _, retcode = self.DescribeInstance()
if retcode != 0:
raise errors.Resource.RetryableGetError(
'Failed to retrieve information on {}'.format(self.name))
self._ip = json.loads(stdout)['host']
self._port = json.loads(stdout)['port']
|
from datetime import timedelta
import pytest
from homeassistant.components import no_ip
from homeassistant.setup import async_setup_component
from homeassistant.util.dt import utcnow
from tests.common import async_fire_time_changed
DOMAIN = "test.example.com"
PASSWORD = "xyz789"
UPDATE_URL = no_ip.UPDATE_URL
USERNAME = "[email protected]"
@pytest.fixture
def setup_no_ip(hass, aioclient_mock):
"""Fixture that sets up NO-IP."""
aioclient_mock.get(UPDATE_URL, params={"hostname": DOMAIN}, text="good 0.0.0.0")
hass.loop.run_until_complete(
async_setup_component(
hass,
no_ip.DOMAIN,
{
no_ip.DOMAIN: {
"domain": DOMAIN,
"username": USERNAME,
"password": PASSWORD,
}
},
)
)
async def test_setup(hass, aioclient_mock):
"""Test setup works if update passes."""
aioclient_mock.get(UPDATE_URL, params={"hostname": DOMAIN}, text="nochg 0.0.0.0")
result = await async_setup_component(
hass,
no_ip.DOMAIN,
{no_ip.DOMAIN: {"domain": DOMAIN, "username": USERNAME, "password": PASSWORD}},
)
assert result
assert aioclient_mock.call_count == 1
async_fire_time_changed(hass, utcnow() + timedelta(minutes=5))
await hass.async_block_till_done()
assert aioclient_mock.call_count == 2
async def test_setup_fails_if_update_fails(hass, aioclient_mock):
"""Test setup fails if first update fails."""
aioclient_mock.get(UPDATE_URL, params={"hostname": DOMAIN}, text="nohost")
result = await async_setup_component(
hass,
no_ip.DOMAIN,
{no_ip.DOMAIN: {"domain": DOMAIN, "username": USERNAME, "password": PASSWORD}},
)
assert not result
assert aioclient_mock.call_count == 1
async def test_setup_fails_if_wrong_auth(hass, aioclient_mock):
"""Test setup fails if first update fails through wrong authentication."""
aioclient_mock.get(UPDATE_URL, params={"hostname": DOMAIN}, text="badauth")
result = await async_setup_component(
hass,
no_ip.DOMAIN,
{no_ip.DOMAIN: {"domain": DOMAIN, "username": USERNAME, "password": PASSWORD}},
)
assert not result
assert aioclient_mock.call_count == 1
|
from typing import Any, Dict
import aiohomekit
from aiohomekit.model import Accessory
from aiohomekit.model.characteristics import (
Characteristic,
CharacteristicPermissions,
CharacteristicsTypes,
)
from aiohomekit.model.services import Service, ServicesTypes
from homeassistant.components import zeroconf
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.entity import Entity
from .config_flow import normalize_hkid
from .connection import HKDevice
from .const import CONTROLLER, DOMAIN, ENTITY_MAP, KNOWN_DEVICES, TRIGGERS
from .storage import EntityMapStorage
def escape_characteristic_name(char_name):
"""Escape any dash or dots in a characteristics name."""
return char_name.replace("-", "_").replace(".", "_")
class HomeKitEntity(Entity):
"""Representation of a Home Assistant HomeKit device."""
def __init__(self, accessory, devinfo):
"""Initialise a generic HomeKit device."""
self._accessory = accessory
self._aid = devinfo["aid"]
self._iid = devinfo["iid"]
self._features = 0
self.setup()
self._signals = []
@property
def accessory(self) -> Accessory:
"""Return an Accessory model that this entity is attached to."""
return self._accessory.entity_map.aid(self._aid)
@property
def accessory_info(self) -> Service:
"""Information about the make and model of an accessory."""
return self.accessory.services.first(
service_type=ServicesTypes.ACCESSORY_INFORMATION
)
@property
def service(self) -> Service:
"""Return a Service model that this entity is attached to."""
return self.accessory.services.iid(self._iid)
async def async_added_to_hass(self):
"""Entity added to hass."""
self._signals.append(
self.hass.helpers.dispatcher.async_dispatcher_connect(
self._accessory.signal_state_updated, self.async_write_ha_state
)
)
self._accessory.add_pollable_characteristics(self.pollable_characteristics)
self._accessory.add_watchable_characteristics(self.watchable_characteristics)
async def async_will_remove_from_hass(self):
"""Prepare to be removed from hass."""
self._accessory.remove_pollable_characteristics(self._aid)
self._accessory.remove_watchable_characteristics(self._aid)
for signal_remove in self._signals:
signal_remove()
self._signals.clear()
async def async_put_characteristics(self, characteristics: Dict[str, Any]):
"""
Write characteristics to the device.
A characteristic type is unique within a service, but in order to write
to a named characteristic on a bridge we need to turn its type into
an aid and iid, and send it as a list of tuples, which is what this
helper does.
E.g. you can do:
await entity.async_put_characteristics({
CharacteristicsTypes.ON: True
})
"""
payload = self.service.build_update(characteristics)
return await self._accessory.put_characteristics(payload)
@property
def should_poll(self) -> bool:
"""Return False.
Data update is triggered from HKDevice.
"""
return False
def setup(self):
"""Configure an entity baed on its HomeKit characteristics metadata."""
self.pollable_characteristics = []
self.watchable_characteristics = []
char_types = self.get_characteristic_types()
# Setup events and/or polling for characteristics directly attached to this entity
for char in self.service.characteristics.filter(char_types=char_types):
self._setup_characteristic(char)
# Setup events and/or polling for characteristics attached to sub-services of this
# entity (like an INPUT_SOURCE).
for service in self.accessory.services.filter(parent_service=self.service):
for char in service.characteristics.filter(char_types=char_types):
self._setup_characteristic(char)
def _setup_characteristic(self, char: Characteristic):
"""Configure an entity based on a HomeKit characteristics metadata."""
# Build up a list of (aid, iid) tuples to poll on update()
if CharacteristicPermissions.paired_read in char.perms:
self.pollable_characteristics.append((self._aid, char.iid))
# Build up a list of (aid, iid) tuples to subscribe to
if CharacteristicPermissions.events in char.perms:
self.watchable_characteristics.append((self._aid, char.iid))
@property
def unique_id(self) -> str:
"""Return the ID of this device."""
serial = self.accessory_info.value(CharacteristicsTypes.SERIAL_NUMBER)
return f"homekit-{serial}-{self._iid}"
@property
def name(self) -> str:
"""Return the name of the device if any."""
return self.accessory_info.value(CharacteristicsTypes.NAME)
@property
def available(self) -> bool:
"""Return True if entity is available."""
return self._accessory.available
@property
def device_info(self):
"""Return the device info."""
info = self.accessory_info
accessory_serial = info.value(CharacteristicsTypes.SERIAL_NUMBER)
device_info = {
"identifiers": {(DOMAIN, "serial-number", accessory_serial)},
"name": info.value(CharacteristicsTypes.NAME),
"manufacturer": info.value(CharacteristicsTypes.MANUFACTURER, ""),
"model": info.value(CharacteristicsTypes.MODEL, ""),
"sw_version": info.value(CharacteristicsTypes.FIRMWARE_REVISION, ""),
}
# Some devices only have a single accessory - we don't add a
# via_device otherwise it would be self referential.
bridge_serial = self._accessory.connection_info["serial-number"]
if accessory_serial != bridge_serial:
device_info["via_device"] = (DOMAIN, "serial-number", bridge_serial)
return device_info
def get_characteristic_types(self):
"""Define the homekit characteristics the entity cares about."""
raise NotImplementedError
async def async_setup_entry(hass, entry):
"""Set up a HomeKit connection on a config entry."""
conn = HKDevice(hass, entry, entry.data)
hass.data[KNOWN_DEVICES][conn.unique_id] = conn
# For backwards compat
if entry.unique_id is None:
hass.config_entries.async_update_entry(
entry, unique_id=normalize_hkid(conn.unique_id)
)
if not await conn.async_setup():
del hass.data[KNOWN_DEVICES][conn.unique_id]
raise ConfigEntryNotReady
return True
async def async_setup(hass, config):
"""Set up for Homekit devices."""
map_storage = hass.data[ENTITY_MAP] = EntityMapStorage(hass)
await map_storage.async_initialize()
zeroconf_instance = await zeroconf.async_get_instance(hass)
hass.data[CONTROLLER] = aiohomekit.Controller(zeroconf_instance=zeroconf_instance)
hass.data[KNOWN_DEVICES] = {}
hass.data[TRIGGERS] = {}
return True
async def async_unload_entry(hass, entry):
"""Disconnect from HomeKit devices before unloading entry."""
hkid = entry.data["AccessoryPairingID"]
if hkid in hass.data[KNOWN_DEVICES]:
connection = hass.data[KNOWN_DEVICES][hkid]
await connection.async_unload()
return True
async def async_remove_entry(hass, entry):
"""Cleanup caches before removing config entry."""
hkid = entry.data["AccessoryPairingID"]
hass.data[ENTITY_MAP].async_delete_map(hkid)
|
from django.db import migrations
def migrate_componentlist(apps, schema_editor):
Group = apps.get_model("weblate_auth", "Group")
db_alias = schema_editor.connection.alias
groups = Group.objects.using(db_alias).filter(componentlist__isnull=False)
for group in groups:
group.componentlists.add(group.componentlist)
class Migration(migrations.Migration):
dependencies = [
("weblate_auth", "0008_auto_20200611_1232"),
]
operations = [
migrations.RunPython(
migrate_componentlist, migrations.RunPython.noop, elidable=True
),
]
|
from typing import Any, Dict
import aiodns
from homeassistant.const import CONF_HOST, CONF_PORT
from homeassistant.helpers.typing import HomeAssistantType
from .const import SRV_RECORD_PREFIX
async def async_check_srv_record(hass: HomeAssistantType, host: str) -> Dict[str, Any]:
"""Check if the given host is a valid Minecraft SRV record."""
# Check if 'host' is a valid SRV record.
return_value = None
srv_records = None
try:
srv_records = await aiodns.DNSResolver().query(
host=f"{SRV_RECORD_PREFIX}.{host}", qtype="SRV"
)
except (aiodns.error.DNSError):
# 'host' is not a SRV record.
pass
else:
# 'host' is a valid SRV record, extract the data.
return_value = {
CONF_HOST: srv_records[0].host,
CONF_PORT: srv_records[0].port,
}
return return_value
|
from homeassistant.components.flo.const import DOMAIN as FLO_DOMAIN
from homeassistant.const import ATTR_ENTITY_ID, CONF_PASSWORD, CONF_USERNAME
from homeassistant.setup import async_setup_component
from .common import TEST_PASSWORD, TEST_USER_ID
async def test_sensors(hass, config_entry, aioclient_mock_fixture):
"""Test Flo by Moen sensors."""
config_entry.add_to_hass(hass)
assert await async_setup_component(
hass, FLO_DOMAIN, {CONF_USERNAME: TEST_USER_ID, CONF_PASSWORD: TEST_PASSWORD}
)
await hass.async_block_till_done()
assert len(hass.data[FLO_DOMAIN][config_entry.entry_id]["devices"]) == 1
# we should have 5 entities for the device
assert hass.states.get("sensor.current_system_mode").state == "home"
assert hass.states.get("sensor.today_s_water_usage").state == "3.7"
assert hass.states.get("sensor.water_flow_rate").state == "0"
assert hass.states.get("sensor.water_pressure").state == "54.2"
assert hass.states.get("sensor.water_temperature").state == "21.1"
async def test_manual_update_entity(
hass, config_entry, aioclient_mock_fixture, aioclient_mock
):
"""Test manual update entity via service homeasasistant/update_entity."""
config_entry.add_to_hass(hass)
assert await async_setup_component(
hass, FLO_DOMAIN, {CONF_USERNAME: TEST_USER_ID, CONF_PASSWORD: TEST_PASSWORD}
)
await hass.async_block_till_done()
assert len(hass.data[FLO_DOMAIN][config_entry.entry_id]["devices"]) == 1
await async_setup_component(hass, "homeassistant", {})
call_count = aioclient_mock.call_count
await hass.services.async_call(
"homeassistant",
"update_entity",
{ATTR_ENTITY_ID: ["sensor.current_system_mode"]},
blocking=True,
)
assert aioclient_mock.call_count == call_count + 2
|
import logging
import RFXtrx as rfxtrxmod
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_MOTION,
DEVICE_CLASS_SMOKE,
BinarySensorEntity,
)
from homeassistant.const import (
CONF_COMMAND_OFF,
CONF_COMMAND_ON,
CONF_DEVICE_CLASS,
CONF_DEVICES,
STATE_ON,
)
from homeassistant.core import callback
from homeassistant.helpers import event as evt
from . import (
CONF_AUTOMATIC_ADD,
CONF_DATA_BITS,
CONF_OFF_DELAY,
SIGNAL_EVENT,
RfxtrxEntity,
find_possible_pt2262_device,
get_device_id,
get_pt2262_cmd,
get_rfx_object,
)
from .const import COMMAND_OFF_LIST, COMMAND_ON_LIST, DEVICE_PACKET_TYPE_LIGHTING4
_LOGGER = logging.getLogger(__name__)
SENSOR_STATUS_ON = [
"Panic",
"Motion",
"Motion Tamper",
"Light Detected",
"Alarm",
"Alarm Tamper",
]
SENSOR_STATUS_OFF = [
"End Panic",
"No Motion",
"No Motion Tamper",
"Dark Detected",
"Normal",
"Normal Tamper",
]
DEVICE_TYPE_DEVICE_CLASS = {
"X10 Security Motion Detector": DEVICE_CLASS_MOTION,
"KD101 Smoke Detector": DEVICE_CLASS_SMOKE,
"Visonic Powercode Motion Detector": DEVICE_CLASS_MOTION,
"Alecto SA30 Smoke Detector": DEVICE_CLASS_SMOKE,
"RM174RF Smoke Detector": DEVICE_CLASS_SMOKE,
}
def supported(event):
"""Return whether an event supports binary_sensor."""
if isinstance(event, rfxtrxmod.ControlEvent):
return True
if isinstance(event, rfxtrxmod.SensorEvent):
return event.values.get("Sensor Status") in [
*SENSOR_STATUS_ON,
*SENSOR_STATUS_OFF,
]
return False
async def async_setup_entry(
hass,
config_entry,
async_add_entities,
):
"""Set up platform."""
sensors = []
device_ids = set()
pt2262_devices = []
discovery_info = config_entry.data
for packet_id, entity_info in discovery_info[CONF_DEVICES].items():
event = get_rfx_object(packet_id)
if event is None:
_LOGGER.error("Invalid device: %s", packet_id)
continue
if not supported(event):
continue
device_id = get_device_id(
event.device, data_bits=entity_info.get(CONF_DATA_BITS)
)
if device_id in device_ids:
continue
device_ids.add(device_id)
if event.device.packettype == DEVICE_PACKET_TYPE_LIGHTING4:
find_possible_pt2262_device(pt2262_devices, event.device.id_string)
pt2262_devices.append(event.device.id_string)
device = RfxtrxBinarySensor(
event.device,
device_id,
entity_info.get(
CONF_DEVICE_CLASS,
DEVICE_TYPE_DEVICE_CLASS.get(event.device.type_string),
),
entity_info.get(CONF_OFF_DELAY),
entity_info.get(CONF_DATA_BITS),
entity_info.get(CONF_COMMAND_ON),
entity_info.get(CONF_COMMAND_OFF),
)
sensors.append(device)
async_add_entities(sensors)
@callback
def binary_sensor_update(event, device_id):
"""Call for control updates from the RFXtrx gateway."""
if not supported(event):
return
if device_id in device_ids:
return
device_ids.add(device_id)
_LOGGER.info(
"Added binary sensor (Device ID: %s Class: %s Sub: %s Event: %s)",
event.device.id_string.lower(),
event.device.__class__.__name__,
event.device.subtype,
"".join(f"{x:02x}" for x in event.data),
)
sensor = RfxtrxBinarySensor(
event.device,
device_id,
event=event,
device_class=DEVICE_TYPE_DEVICE_CLASS.get(event.device.type_string),
)
async_add_entities([sensor])
# Subscribe to main RFXtrx events
if discovery_info[CONF_AUTOMATIC_ADD]:
hass.helpers.dispatcher.async_dispatcher_connect(
SIGNAL_EVENT, binary_sensor_update
)
class RfxtrxBinarySensor(RfxtrxEntity, BinarySensorEntity):
"""A representation of a RFXtrx binary sensor."""
def __init__(
self,
device,
device_id,
device_class=None,
off_delay=None,
data_bits=None,
cmd_on=None,
cmd_off=None,
event=None,
):
"""Initialize the RFXtrx sensor."""
super().__init__(device, device_id, event=event)
self._device_class = device_class
self._data_bits = data_bits
self._off_delay = off_delay
self._state = None
self._delay_listener = None
self._cmd_on = cmd_on
self._cmd_off = cmd_off
async def async_added_to_hass(self):
"""Restore device state."""
await super().async_added_to_hass()
if self._event is None:
old_state = await self.async_get_last_state()
if old_state is not None:
self._state = old_state.state == STATE_ON
if self._state and self._off_delay is not None:
self._state = False
@property
def force_update(self) -> bool:
"""We should force updates. Repeated states have meaning."""
return True
@property
def device_class(self):
"""Return the sensor class."""
return self._device_class
@property
def is_on(self):
"""Return true if the sensor state is True."""
return self._state
def _apply_event_lighting4(self, event):
"""Apply event for a lighting 4 device."""
if self._data_bits is not None:
cmd = get_pt2262_cmd(event.device.id_string, self._data_bits)
cmd = int(cmd, 16)
if cmd == self._cmd_on:
self._state = True
elif cmd == self._cmd_off:
self._state = False
else:
self._state = True
def _apply_event_standard(self, event):
if event.values.get("Command") in COMMAND_ON_LIST:
self._state = True
elif event.values.get("Command") in COMMAND_OFF_LIST:
self._state = False
elif event.values.get("Sensor Status") in SENSOR_STATUS_ON:
self._state = True
elif event.values.get("Sensor Status") in SENSOR_STATUS_OFF:
self._state = False
def _apply_event(self, event):
"""Apply command from rfxtrx."""
super()._apply_event(event)
if event.device.packettype == DEVICE_PACKET_TYPE_LIGHTING4:
self._apply_event_lighting4(event)
else:
self._apply_event_standard(event)
@callback
def _handle_event(self, event, device_id):
"""Check if event applies to me and update."""
if device_id != self._device_id:
return
_LOGGER.debug(
"Binary sensor update (Device ID: %s Class: %s Sub: %s)",
event.device.id_string,
event.device.__class__.__name__,
event.device.subtype,
)
self._apply_event(event)
self.async_write_ha_state()
if self._delay_listener:
self._delay_listener()
self._delay_listener = None
if self.is_on and self._off_delay is not None:
@callback
def off_delay_listener(now):
"""Switch device off after a delay."""
self._delay_listener = None
self._state = False
self.async_write_ha_state()
self._delay_listener = evt.async_call_later(
self.hass, self._off_delay, off_delay_listener
)
|
import os.path as op
import inspect
from mne.utils import run_tests_if_main
from mne.io import read_raw_nicolet
from mne.io.tests.test_raw import _test_raw_reader
FILE = inspect.getfile(inspect.currentframe())
base_dir = op.join(op.dirname(op.abspath(FILE)), 'data')
fname = op.join(base_dir, 'test_nicolet_raw.data')
def test_data():
"""Test reading raw nicolet files."""
_test_raw_reader(read_raw_nicolet, input_fname=fname, ch_type='eeg',
ecg='auto', eog='auto', emg='auto', misc=['PHO'])
run_tests_if_main()
|
import os
import os.path
import sys
import time
import shutil
import plistlib
import subprocess
import argparse
import tarfile
import tempfile
import collections
import re
try:
import winreg
except ImportError:
pass
sys.path.insert(0, os.path.join(os.path.dirname(__file__), os.pardir,
os.pardir))
import qutebrowser
from scripts import utils
from scripts.dev import update_3rdparty, misc_checks
def call_script(name, *args, python=sys.executable):
"""Call a given shell script.
Args:
name: The script to call.
*args: The arguments to pass.
python: The python interpreter to use.
"""
path = os.path.join(os.path.dirname(__file__), os.pardir, name)
subprocess.run([python, path] + list(args), check=True)
def call_tox(toxenv, *args, python=sys.executable):
"""Call tox.
Args:
toxenv: Which tox environment to use
*args: The arguments to pass.
python: The python interpreter to use.
"""
env = os.environ.copy()
env['PYTHON'] = python
env['PATH'] = os.environ['PATH'] + os.pathsep + os.path.dirname(python)
subprocess.run(
[sys.executable, '-m', 'tox', '-vv', '-e', toxenv] + list(args),
env=env, check=True)
def run_asciidoc2html(args):
"""Common buildsteps used for all OS'."""
utils.print_title("Running asciidoc2html.py")
a2h_args = []
if args.asciidoc is not None:
a2h_args += ['--asciidoc', args.asciidoc]
if args.asciidoc_python is not None:
a2h_args += ['--asciidoc-python', args.asciidoc_python]
call_script('asciidoc2html.py', *a2h_args)
def _maybe_remove(path):
"""Remove a path if it exists."""
try:
shutil.rmtree(path)
except FileNotFoundError:
pass
def _filter_whitelisted(output, patterns):
for line in output.decode('utf-8').splitlines():
if not any(re.fullmatch(pattern, line) for pattern in patterns):
yield line
def smoke_test(executable):
"""Try starting the given qutebrowser executable."""
stdout_whitelist = []
stderr_whitelist = [
# PyInstaller debug output
r'\[.*\] PyInstaller Bootloader .*',
r'\[.*\] LOADER: .*',
# https://github.com/qutebrowser/qutebrowser/issues/4919
(r'objc\[.*\]: .* One of the two will be used\. '
r'Which one is undefined\.'),
(r'QCoreApplication::applicationDirPath: Please instantiate the '
r'QApplication object first'),
(r'\[.*:ERROR:mach_port_broker.mm\(48\)\] bootstrap_look_up '
r'org\.chromium\.Chromium\.rohitfork\.1: Permission denied \(1100\)'),
(r'\[.*:ERROR:mach_port_broker.mm\(43\)\] bootstrap_look_up: '
r'Unknown service name \(1102\)')
]
proc = subprocess.run([executable, '--no-err-windows', '--nowindow',
'--temp-basedir', 'about:blank',
':later 500 quit'], check=True, capture_output=True)
stdout = '\n'.join(_filter_whitelisted(proc.stdout, stdout_whitelist))
stderr = '\n'.join(_filter_whitelisted(proc.stderr, stderr_whitelist))
if stdout:
raise Exception("Unexpected stdout:\n{}".format(stdout))
if stderr:
raise Exception("Unexpected stderr:\n{}".format(stderr))
def patch_mac_app():
"""Patch .app to use our Info.plist and save some space."""
app_path = os.path.join('dist', 'qutebrowser.app')
# Patch Info.plist - pyinstaller's options are too limiting
plist_path = os.path.join(app_path, 'Contents', 'Info.plist')
with open(plist_path, "rb") as f:
plist_data = plistlib.load(f)
plist_data.update(INFO_PLIST_UPDATES)
with open(plist_path, "wb") as f:
plistlib.dump(plist_data, f)
# Replace some duplicate files by symlinks
framework_path = os.path.join(app_path, 'Contents', 'MacOS', 'PyQt5',
'Qt', 'lib', 'QtWebEngineCore.framework')
core_lib = os.path.join(framework_path, 'Versions', '5', 'QtWebEngineCore')
os.remove(core_lib)
core_target = os.path.join(*[os.pardir] * 7, 'MacOS', 'QtWebEngineCore')
os.symlink(core_target, core_lib)
framework_resource_path = os.path.join(framework_path, 'Resources')
for name in os.listdir(framework_resource_path):
file_path = os.path.join(framework_resource_path, name)
target = os.path.join(*[os.pardir] * 5, name)
if os.path.isdir(file_path):
shutil.rmtree(file_path)
else:
os.remove(file_path)
os.symlink(target, file_path)
INFO_PLIST_UPDATES = {
'CFBundleVersion': qutebrowser.__version__,
'CFBundleShortVersionString': qutebrowser.__version__,
'NSSupportsAutomaticGraphicsSwitching': True,
'NSHighResolutionCapable': True,
'CFBundleURLTypes': [{
"CFBundleURLName": "http(s) URL",
"CFBundleURLSchemes": ["http", "https"]
}, {
"CFBundleURLName": "local file URL",
"CFBundleURLSchemes": ["file"]
}],
'CFBundleDocumentTypes': [{
"CFBundleTypeExtensions": ["html", "htm"],
"CFBundleTypeMIMETypes": ["text/html"],
"CFBundleTypeName": "HTML document",
"CFBundleTypeOSTypes": ["HTML"],
"CFBundleTypeRole": "Viewer",
}, {
"CFBundleTypeExtensions": ["xhtml"],
"CFBundleTypeMIMETypes": ["text/xhtml"],
"CFBundleTypeName": "XHTML document",
"CFBundleTypeRole": "Viewer",
}]
}
def build_mac():
"""Build macOS .dmg/.app."""
utils.print_title("Cleaning up...")
for f in ['wc.dmg', 'template.dmg']:
try:
os.remove(f)
except FileNotFoundError:
pass
for d in ['dist', 'build']:
shutil.rmtree(d, ignore_errors=True)
utils.print_title("Updating 3rdparty content")
update_3rdparty.run(ace=False, pdfjs=True, fancy_dmg=False)
utils.print_title("Building .app via pyinstaller")
call_tox('pyinstaller-64', '-r')
utils.print_title("Patching .app")
patch_mac_app()
utils.print_title("Building .dmg")
subprocess.run(['make', '-f', 'scripts/dev/Makefile-dmg'], check=True)
dmg_name = 'qutebrowser-{}.dmg'.format(qutebrowser.__version__)
os.rename('qutebrowser.dmg', dmg_name)
utils.print_title("Running smoke test")
try:
with tempfile.TemporaryDirectory() as tmpdir:
subprocess.run(['hdiutil', 'attach', dmg_name,
'-mountpoint', tmpdir], check=True)
try:
binary = os.path.join(tmpdir, 'qutebrowser.app', 'Contents',
'MacOS', 'qutebrowser')
smoke_test(binary)
finally:
time.sleep(5)
subprocess.run(['hdiutil', 'detach', tmpdir], check=False)
except PermissionError as e:
print("Failed to remove tempdir: {}".format(e))
return [(dmg_name, 'application/x-apple-diskimage', 'macOS .dmg')]
def _get_windows_python_path(x64):
"""Get the path to Python.exe on Windows."""
parts = str(sys.version_info.major), str(sys.version_info.minor)
ver = ''.join(parts)
dot_ver = '.'.join(parts)
if x64:
path = (r'SOFTWARE\Python\PythonCore\{}\InstallPath'
.format(dot_ver))
fallback = r'C:\Python{}\python.exe'.format(ver)
else:
path = (r'SOFTWARE\WOW6432Node\Python\PythonCore\{}-32\InstallPath'
.format(dot_ver))
fallback = r'C:\Python{}-32\python.exe'.format(ver)
try:
key = winreg.OpenKeyEx(winreg.HKEY_LOCAL_MACHINE, path)
return winreg.QueryValueEx(key, 'ExecutablePath')[0]
except FileNotFoundError:
return fallback
def build_windows():
"""Build windows executables/setups."""
utils.print_title("Updating 3rdparty content")
update_3rdparty.run(nsis=True, ace=False, pdfjs=True, fancy_dmg=False)
utils.print_title("Building Windows binaries")
python_x64 = _get_windows_python_path(x64=True)
python_x86 = _get_windows_python_path(x64=False)
out_pyinstaller = os.path.join('dist', 'qutebrowser')
out_32 = os.path.join('dist',
'qutebrowser-{}-x86'.format(qutebrowser.__version__))
out_64 = os.path.join('dist',
'qutebrowser-{}-x64'.format(qutebrowser.__version__))
artifacts = []
from scripts.dev import gen_versioninfo
utils.print_title("Updating VersionInfo file")
gen_versioninfo.main()
utils.print_title("Running pyinstaller 32bit")
_maybe_remove(out_32)
call_tox('pyinstaller-32', '-r', python=python_x86)
shutil.move(out_pyinstaller, out_32)
utils.print_title("Running pyinstaller 64bit")
_maybe_remove(out_64)
call_tox('pyinstaller-64', '-r', python=python_x64)
shutil.move(out_pyinstaller, out_64)
utils.print_title("Running 32bit smoke test")
smoke_test(os.path.join(out_32, 'qutebrowser.exe'))
utils.print_title("Running 64bit smoke test")
smoke_test(os.path.join(out_64, 'qutebrowser.exe'))
utils.print_title("Building installers")
subprocess.run(['makensis.exe',
'/DVERSION={}'.format(qutebrowser.__version__),
'misc/nsis/qutebrowser.nsi'], check=True)
subprocess.run(['makensis.exe',
'/DX86',
'/DVERSION={}'.format(qutebrowser.__version__),
'misc/nsis/qutebrowser.nsi'], check=True)
name_32 = 'qutebrowser-{}-win32.exe'.format(qutebrowser.__version__)
name_64 = 'qutebrowser-{}-amd64.exe'.format(qutebrowser.__version__)
artifacts += [
(os.path.join('dist', name_32),
'application/vnd.microsoft.portable-executable',
'Windows 32bit installer'),
(os.path.join('dist', name_64),
'application/vnd.microsoft.portable-executable',
'Windows 64bit installer'),
]
utils.print_title("Zipping 32bit standalone...")
template = 'qutebrowser-{}-windows-standalone-{}'
name = os.path.join('dist',
template.format(qutebrowser.__version__, 'win32'))
shutil.make_archive(name, 'zip', 'dist', os.path.basename(out_32))
artifacts.append(('{}.zip'.format(name),
'application/zip',
'Windows 32bit standalone'))
utils.print_title("Zipping 64bit standalone...")
name = os.path.join('dist',
template.format(qutebrowser.__version__, 'amd64'))
shutil.make_archive(name, 'zip', 'dist', os.path.basename(out_64))
artifacts.append(('{}.zip'.format(name),
'application/zip',
'Windows 64bit standalone'))
return artifacts
def build_sdist():
"""Build an sdist and list the contents."""
utils.print_title("Building sdist")
_maybe_remove('dist')
subprocess.run([sys.executable, 'setup.py', 'sdist'], check=True)
dist_files = os.listdir(os.path.abspath('dist'))
assert len(dist_files) == 1
dist_file = os.path.join('dist', dist_files[0])
subprocess.run(['gpg', '--detach-sign', '-a', dist_file], check=True)
tar = tarfile.open(dist_file)
by_ext = collections.defaultdict(list)
for tarinfo in tar.getmembers():
if not tarinfo.isfile():
continue
name = os.sep.join(tarinfo.name.split(os.sep)[1:])
_base, ext = os.path.splitext(name)
by_ext[ext].append(name)
assert '.pyc' not in by_ext
utils.print_title("sdist contents")
for ext, files in sorted(by_ext.items()):
utils.print_subtitle(ext)
print('\n'.join(files))
filename = 'qutebrowser-{}.tar.gz'.format(qutebrowser.__version__)
artifacts = [
(os.path.join('dist', filename), 'application/gzip', 'Source release'),
(os.path.join('dist', filename + '.asc'), 'application/pgp-signature',
'Source release - PGP signature'),
]
return artifacts
def test_makefile():
"""Make sure the Makefile works correctly."""
utils.print_title("Testing makefile")
with tempfile.TemporaryDirectory() as tmpdir:
subprocess.run(['make', '-f', 'misc/Makefile',
'DESTDIR={}'.format(tmpdir), 'install'], check=True)
def read_github_token():
"""Read the GitHub API token from disk."""
token_file = os.path.join(os.path.expanduser('~'), '.gh_token')
with open(token_file, encoding='ascii') as f:
token = f.read().strip()
return token
def github_upload(artifacts, tag):
"""Upload the given artifacts to GitHub.
Args:
artifacts: A list of (filename, mimetype, description) tuples
tag: The name of the release tag
"""
import github3
import github3.exceptions
utils.print_title("Uploading to github...")
token = read_github_token()
gh = github3.login(token=token)
repo = gh.repository('qutebrowser', 'qutebrowser')
release = None # to satisfy pylint
for release in repo.releases():
if release.tag_name == tag:
break
else:
raise Exception("No release found for {!r}!".format(tag))
for filename, mimetype, description in artifacts:
while True:
print("Uploading {}".format(filename))
basename = os.path.basename(filename)
assets = [asset for asset in release.assets()
if asset.name == basename]
if assets:
print("Assets already exist: {}".format(assets))
print("Press enter to continue anyways or Ctrl-C to abort.")
input()
try:
with open(filename, 'rb') as f:
release.upload_asset(mimetype, basename, f, description)
except github3.exceptions.ConnectionError as e:
utils.print_error('Failed to upload: {}'.format(e))
print("Press Enter to retry...", file=sys.stderr)
input()
print("Retrying!")
assets = [asset for asset in release.assets()
if asset.name == basename]
if assets:
asset = assets[0]
print("Deleting stray asset {}".format(asset.name))
asset.delete()
else:
break
def pypi_upload(artifacts):
"""Upload the given artifacts to PyPI using twine."""
utils.print_title("Uploading to PyPI...")
filenames = [a[0] for a in artifacts]
subprocess.run([sys.executable, '-m', 'twine', 'upload'] + filenames,
check=True)
def upgrade_sdist_dependencies():
"""Make sure we have the latest tools for an sdist release."""
subprocess.run([sys.executable, '-m', 'pip', 'install', '-U', 'twine',
'pip', 'wheel', 'setuptools'], check=True)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--no-asciidoc', action='store_true',
help="Don't generate docs")
parser.add_argument('--asciidoc', help="Full path to asciidoc.py. "
"If not given, it's searched in PATH.",
nargs='?')
parser.add_argument('--asciidoc-python', help="Python to use for asciidoc."
"If not given, the current Python interpreter is used.",
nargs='?')
parser.add_argument('--upload', action='store_true', required=False,
help="Toggle to upload the release to GitHub")
args = parser.parse_args()
utils.change_cwd()
upload_to_pypi = False
if args.upload:
# Fail early when trying to upload without github3 installed
# or without API token
import github3 # pylint: disable=unused-import
read_github_token()
if not misc_checks.check_git():
utils.print_error("Refusing to do a release with a dirty git tree")
sys.exit(1)
if args.no_asciidoc:
os.makedirs(os.path.join('qutebrowser', 'html', 'doc'), exist_ok=True)
else:
run_asciidoc2html(args)
if os.name == 'nt':
artifacts = build_windows()
elif sys.platform == 'darwin':
artifacts = build_mac()
else:
upgrade_sdist_dependencies()
test_makefile()
artifacts = build_sdist()
upload_to_pypi = True
if args.upload:
version_tag = "v" + qutebrowser.__version__
utils.print_title("Press enter to release {}...".format(version_tag))
input()
github_upload(artifacts, version_tag)
if upload_to_pypi:
pypi_upload(artifacts)
else:
print()
utils.print_title("Artifacts")
for artifact in artifacts:
print(artifact)
if __name__ == '__main__':
main()
|
from homeassistant.components.homeassistant import (
DOMAIN as HA_DOMAIN,
SERVICE_UPDATE_ENTITY,
)
from homeassistant.components.smart_meter_texas.const import (
ELECTRIC_METER,
ESIID,
METER_NUMBER,
)
from homeassistant.const import ATTR_ENTITY_ID, CONF_ADDRESS
from homeassistant.setup import async_setup_component
from .conftest import TEST_ENTITY_ID, refresh_data, setup_integration
from tests.async_mock import patch
async def test_sensor(hass, config_entry, aioclient_mock):
"""Test that the sensor is setup."""
await setup_integration(hass, config_entry, aioclient_mock)
await refresh_data(hass, config_entry, aioclient_mock)
meter = hass.states.get(TEST_ENTITY_ID)
assert meter
assert meter.state == "9751.212"
async def test_name(hass, config_entry, aioclient_mock):
"""Test sensor name property."""
await setup_integration(hass, config_entry, aioclient_mock)
await refresh_data(hass, config_entry, aioclient_mock)
meter = hass.states.get(TEST_ENTITY_ID)
assert meter.name == f"{ELECTRIC_METER} 123456789"
async def test_attributes(hass, config_entry, aioclient_mock):
"""Test meter attributes."""
await setup_integration(hass, config_entry, aioclient_mock)
await refresh_data(hass, config_entry, aioclient_mock)
meter = hass.states.get(TEST_ENTITY_ID)
assert meter.attributes[METER_NUMBER] == "123456789"
assert meter.attributes[ESIID] == "12345678901234567"
assert meter.attributes[CONF_ADDRESS] == "123 MAIN ST"
async def test_generic_entity_update_service(hass, config_entry, aioclient_mock):
"""Test generic update entity service homeasasistant/update_entity."""
await setup_integration(hass, config_entry, aioclient_mock)
await async_setup_component(hass, HA_DOMAIN, {})
with patch("smart_meter_texas.Meter.read_meter") as updater:
await hass.services.async_call(
HA_DOMAIN,
SERVICE_UPDATE_ENTITY,
{ATTR_ENTITY_ID: TEST_ENTITY_ID},
blocking=True,
)
await hass.async_block_till_done()
updater.assert_called_once()
|
import unittest
import numpy as np
from chainer import testing
from chainercv.transforms import random_crop
class TestRandomCrop(unittest.TestCase):
def test_random_crop(self):
img = np.random.uniform(size=(3, 48, 32))
out, param = random_crop(img, (48, 32), return_param=True)
y_slice = param['y_slice']
x_slice = param['x_slice']
np.testing.assert_equal(out, img)
self.assertEqual(y_slice, slice(0, 48))
self.assertEqual(x_slice, slice(0, 32))
out = random_crop(img, (24, 12))
self.assertEqual(out.shape[1:], (24, 12))
testing.run_module(__name__, __file__)
|
import logging
from pyqvrpro.client import QVRResponseError
from homeassistant.components.camera import Camera
from .const import DOMAIN, SHORT_NAME
_LOGGER = logging.getLogger(__name__)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the QVR Pro camera platform."""
if discovery_info is None:
return
client = hass.data[DOMAIN]["client"]
entities = []
for channel in hass.data[DOMAIN]["channels"]:
stream_source = get_stream_source(channel["guid"], client)
entities.append(
QVRProCamera(**channel, stream_source=stream_source, client=client)
)
add_entities(entities)
def get_stream_source(guid, client):
"""Get channel stream source."""
try:
resp = client.get_channel_live_stream(guid, protocol="rtsp")
full_url = resp["resourceUris"]
protocol = full_url[:7]
auth = f"{client.get_auth_string()}@"
url = full_url[7:]
return f"{protocol}{auth}{url}"
except QVRResponseError as ex:
_LOGGER.error(ex)
return None
class QVRProCamera(Camera):
"""Representation of a QVR Pro camera."""
def __init__(self, name, model, brand, channel_index, guid, stream_source, client):
"""Init QVR Pro camera."""
self._name = f"{SHORT_NAME} {name}"
self._model = model
self._brand = brand
self.index = channel_index
self.guid = guid
self._client = client
self._stream_source = stream_source
self._supported_features = 0
super().__init__()
@property
def name(self):
"""Return the name of the entity."""
return self._name
@property
def model(self):
"""Return the model of the entity."""
return self._model
@property
def brand(self):
"""Return the brand of the entity."""
return self._brand
@property
def device_state_attributes(self):
"""Get the state attributes."""
attrs = {"qvr_guid": self.guid}
return attrs
def camera_image(self):
"""Get image bytes from camera."""
try:
return self._client.get_snapshot(self.guid)
except QVRResponseError as ex:
_LOGGER.error("Error getting image: %s", ex)
self._client.connect()
return self._client.get_snapshot(self.guid)
async def stream_source(self):
"""Get stream source."""
return self._stream_source
@property
def supported_features(self):
"""Get supported features."""
return self._supported_features
|
from datetime import datetime, timezone
from typing import Optional, Union
import discord
from redbot.core import checks, commands, modlog
from redbot.core.bot import Red
from redbot.core.i18n import Translator, cog_i18n
from redbot.core.utils.chat_formatting import box
from redbot.core.utils.menus import DEFAULT_CONTROLS, menu
_ = Translator("ModLog", __file__)
@cog_i18n(_)
class ModLog(commands.Cog):
"""Manage log channels for moderation actions."""
def __init__(self, bot: Red):
super().__init__()
self.bot = bot
async def red_delete_data_for_user(self, **kwargs):
""" Nothing to delete """
return
@commands.group()
@checks.guildowner_or_permissions(administrator=True)
async def modlogset(self, ctx: commands.Context):
"""Manage modlog settings."""
pass
@checks.is_owner()
@modlogset.command(hidden=True, name="fixcasetypes")
async def reapply_audittype_migration(self, ctx: commands.Context):
"""Command to fix misbehaving casetypes."""
await modlog.handle_auditype_key()
await ctx.tick()
@modlogset.command(aliases=["channel"])
@commands.guild_only()
async def modlog(self, ctx: commands.Context, channel: discord.TextChannel = None):
"""Set a channel as the modlog.
Omit `<channel>` to disable the modlog.
"""
guild = ctx.guild
if channel:
if channel.permissions_for(guild.me).send_messages:
await modlog.set_modlog_channel(guild, channel)
await ctx.send(
_("Mod events will be sent to {channel}.").format(channel=channel.mention)
)
else:
await ctx.send(
_("I do not have permissions to send messages in {channel}!").format(
channel=channel.mention
)
)
else:
try:
await modlog.get_modlog_channel(guild)
except RuntimeError:
await ctx.send(_("Mod log is already disabled."))
else:
await modlog.set_modlog_channel(guild, None)
await ctx.send(_("Mod log deactivated."))
@modlogset.command(name="cases")
@commands.guild_only()
async def set_cases(self, ctx: commands.Context, action: str = None):
"""Enable or disable case creation for a mod action."""
guild = ctx.guild
if action is None: # No args given
casetypes = await modlog.get_all_casetypes(guild)
await ctx.send_help()
lines = []
for ct in casetypes:
enabled = _("enabled") if await ct.is_enabled() else _("disabled")
lines.append(f"{ct.name} : {enabled}")
await ctx.send(_("Current settings:\n") + box("\n".join(lines)))
return
casetype = await modlog.get_casetype(action, guild)
if not casetype:
await ctx.send(_("That action is not registered."))
else:
enabled = await casetype.is_enabled()
await casetype.set_enabled(not enabled)
await ctx.send(
_("Case creation for {action_name} actions is now {enabled}.").format(
action_name=action, enabled=_("enabled") if not enabled else _("disabled")
)
)
@modlogset.command()
@commands.guild_only()
async def resetcases(self, ctx: commands.Context):
"""Reset all modlog cases in this server."""
guild = ctx.guild
await modlog.reset_cases(guild)
await ctx.send(_("Cases have been reset."))
@commands.command()
@commands.guild_only()
async def case(self, ctx: commands.Context, number: int):
"""Show the specified case."""
try:
case = await modlog.get_case(number, ctx.guild, self.bot)
except RuntimeError:
await ctx.send(_("That case does not exist for that server."))
return
else:
if await ctx.embed_requested():
await ctx.send(embed=await case.message_content(embed=True))
else:
message = _("{case}\n**Timestamp:** {timestamp}").format(
case=await case.message_content(embed=False),
timestamp=datetime.utcfromtimestamp(case.created_at).strftime(
"%Y-%m-%d %H:%M:%S UTC"
),
)
await ctx.send(message)
@commands.command()
@commands.guild_only()
async def casesfor(self, ctx: commands.Context, *, member: Union[discord.Member, int]):
"""Display cases for the specified member."""
try:
if isinstance(member, int):
cases = await modlog.get_cases_for_member(
bot=ctx.bot, guild=ctx.guild, member_id=member
)
else:
cases = await modlog.get_cases_for_member(
bot=ctx.bot, guild=ctx.guild, member=member
)
except discord.NotFound:
return await ctx.send(_("That user does not exist."))
except discord.HTTPException:
return await ctx.send(
_("Something unexpected went wrong while fetching that user by ID.")
)
if not cases:
return await ctx.send(_("That user does not have any cases."))
embed_requested = await ctx.embed_requested()
if embed_requested:
rendered_cases = [await case.message_content(embed=True) for case in cases]
elif not embed_requested:
rendered_cases = []
for case in cases:
message = _("{case}\n**Timestamp:** {timestamp}").format(
case=await case.message_content(embed=False),
timestamp=datetime.utcfromtimestamp(case.created_at).strftime(
"%Y-%m-%d %H:%M:%S UTC"
),
)
rendered_cases.append(message)
await menu(ctx, rendered_cases, DEFAULT_CONTROLS)
@commands.command()
@commands.guild_only()
async def reason(self, ctx: commands.Context, case: Optional[int], *, reason: str):
"""Specify a reason for a modlog case.
Please note that you can only edit cases you are
the owner of unless you are a mod, admin or server owner.
If no case number is specified, the latest case will be used.
"""
author = ctx.author
guild = ctx.guild
if case is None:
# get the latest case
case_obj = await modlog.get_latest_case(guild, self.bot)
if case_obj is None:
await ctx.send(_("There are no modlog cases in this server."))
return
else:
try:
case_obj = await modlog.get_case(case, guild, self.bot)
except RuntimeError:
await ctx.send(_("That case does not exist!"))
return
is_guild_owner = author == guild.owner
is_case_author = author == case_obj.moderator
author_is_mod = await ctx.bot.is_mod(author)
if not (is_guild_owner or is_case_author or author_is_mod):
await ctx.send(_("You are not authorized to modify that case!"))
return
to_modify = {"reason": reason}
if case_obj.moderator != author:
to_modify["amended_by"] = author
to_modify["modified_at"] = ctx.message.created_at.replace(tzinfo=timezone.utc).timestamp()
await case_obj.edit(to_modify)
await ctx.send(
_("Reason for case #{num} has been updated.").format(num=case_obj.case_number)
)
|
import datetime
import logging
import os
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import DATA_MEGABYTES
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.reload import setup_reload_service
from . import DOMAIN, PLATFORMS
_LOGGER = logging.getLogger(__name__)
CONF_FILE_PATHS = "file_paths"
ICON = "mdi:file"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_FILE_PATHS): vol.All(cv.ensure_list, [cv.isfile])}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the file size sensor."""
setup_reload_service(hass, DOMAIN, PLATFORMS)
sensors = []
for path in config.get(CONF_FILE_PATHS):
if not hass.config.is_allowed_path(path):
_LOGGER.error("Filepath %s is not valid or allowed", path)
continue
sensors.append(Filesize(path))
if sensors:
add_entities(sensors, True)
class Filesize(Entity):
"""Encapsulates file size information."""
def __init__(self, path):
"""Initialize the data object."""
self._path = path # Need to check its a valid path
self._size = None
self._last_updated = None
self._name = path.split("/")[-1]
self._unit_of_measurement = DATA_MEGABYTES
def update(self):
"""Update the sensor."""
statinfo = os.stat(self._path)
self._size = statinfo.st_size
last_updated = datetime.datetime.fromtimestamp(statinfo.st_mtime)
self._last_updated = last_updated.isoformat()
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the size of the file in MB."""
decimals = 2
state_mb = round(self._size / 1e6, decimals)
return state_mb
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return ICON
@property
def device_state_attributes(self):
"""Return other details about the sensor state."""
return {
"path": self._path,
"last_updated": self._last_updated,
"bytes": self._size,
}
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
|
import errno
from functools import partial
import logging
import socket
import broadlink as blk
from broadlink.exceptions import (
AuthenticationError,
BroadlinkException,
NetworkTimeoutError,
)
import voluptuous as vol
from homeassistant import config_entries, data_entry_flow
from homeassistant.const import CONF_HOST, CONF_MAC, CONF_NAME, CONF_TIMEOUT, CONF_TYPE
from homeassistant.helpers import config_validation as cv
from .const import ( # pylint: disable=unused-import
DEFAULT_PORT,
DEFAULT_TIMEOUT,
DOMAIN,
DOMAINS_AND_TYPES,
)
from .helpers import format_mac
_LOGGER = logging.getLogger(__name__)
class BroadlinkFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a Broadlink config flow."""
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL
VERSION = 1
def __init__(self):
"""Initialize the Broadlink flow."""
self.device = None
async def async_set_device(self, device, raise_on_progress=True):
"""Define a device for the config flow."""
supported_types = {
device_type
for _, device_types in DOMAINS_AND_TYPES
for device_type in device_types
}
if device.type not in supported_types:
_LOGGER.error(
"Unsupported device: %s. If it worked before, please open "
"an issue at https://github.com/home-assistant/core/issues",
hex(device.devtype),
)
raise data_entry_flow.AbortFlow("not_supported")
await self.async_set_unique_id(
device.mac.hex(), raise_on_progress=raise_on_progress
)
self.device = device
# pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167
self.context["title_placeholders"] = {
"name": device.name,
"model": device.model,
"host": device.host[0],
}
async def async_step_user(self, user_input=None):
"""Handle a flow initiated by the user."""
errors = {}
if user_input is not None:
host = user_input[CONF_HOST]
timeout = user_input.get(CONF_TIMEOUT, DEFAULT_TIMEOUT)
try:
hello = partial(blk.discover, discover_ip_address=host, timeout=timeout)
device = (await self.hass.async_add_executor_job(hello))[0]
except IndexError:
errors["base"] = "cannot_connect"
err_msg = "Device not found"
except OSError as err:
if err.errno in {errno.EINVAL, socket.EAI_NONAME}:
errors["base"] = "invalid_host"
err_msg = "Invalid hostname or IP address"
elif err.errno == errno.ENETUNREACH:
errors["base"] = "cannot_connect"
err_msg = str(err)
else:
errors["base"] = "unknown"
err_msg = str(err)
else:
device.timeout = timeout
if self.unique_id is None:
await self.async_set_device(device)
self._abort_if_unique_id_configured(
updates={CONF_HOST: device.host[0], CONF_TIMEOUT: timeout}
)
return await self.async_step_auth()
# The user came from a factory reset.
# We need to check whether the host is correct.
if device.mac == self.device.mac:
await self.async_set_device(device, raise_on_progress=False)
return await self.async_step_auth()
errors["base"] = "invalid_host"
err_msg = (
"Invalid host for this configuration flow. The MAC address should be "
f"{format_mac(self.device.mac)}, but {format_mac(device.mac)} was given"
)
_LOGGER.error("Failed to connect to the device at %s: %s", host, err_msg)
if self.source == config_entries.SOURCE_IMPORT:
return self.async_abort(reason=errors["base"])
data_schema = {
vol.Required(CONF_HOST): str,
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
}
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(data_schema),
errors=errors,
)
async def async_step_auth(self):
"""Authenticate to the device."""
device = self.device
errors = {}
try:
await self.hass.async_add_executor_job(device.auth)
except AuthenticationError:
errors["base"] = "invalid_auth"
await self.async_set_unique_id(device.mac.hex())
return await self.async_step_reset(errors=errors)
except NetworkTimeoutError as err:
errors["base"] = "cannot_connect"
err_msg = str(err)
except BroadlinkException as err:
errors["base"] = "unknown"
err_msg = str(err)
except OSError as err:
if err.errno == errno.ENETUNREACH:
errors["base"] = "cannot_connect"
err_msg = str(err)
else:
errors["base"] = "unknown"
err_msg = str(err)
else:
await self.async_set_unique_id(device.mac.hex())
if self.source == config_entries.SOURCE_IMPORT:
_LOGGER.warning(
"The %s at %s is ready to be configured. Please "
"click Configuration in the sidebar and click "
"Integrations. Then find the device there and click "
"Configure to finish the setup",
device.model,
device.host[0],
)
if device.is_locked:
return await self.async_step_unlock()
return await self.async_step_finish()
await self.async_set_unique_id(device.mac.hex())
_LOGGER.error(
"Failed to authenticate to the device at %s: %s", device.host[0], err_msg
)
return self.async_show_form(step_id="auth", errors=errors)
async def async_step_reset(self, user_input=None, errors=None):
"""Guide the user to unlock the device manually.
We are unable to authenticate because the device is locked.
The user needs to factory reset the device to make it work
with Home Assistant.
"""
if user_input is None:
return self.async_show_form(step_id="reset", errors=errors)
return await self.async_step_user(
{CONF_HOST: self.device.host[0], CONF_TIMEOUT: self.device.timeout}
)
async def async_step_unlock(self, user_input=None):
"""Unlock the device.
The authentication succeeded, but the device is locked.
We can offer an unlock to prevent authorization errors.
"""
device = self.device
errors = {}
if user_input is None:
pass
elif user_input["unlock"]:
try:
await self.hass.async_add_executor_job(device.set_lock, False)
except NetworkTimeoutError as err:
errors["base"] = "cannot_connect"
err_msg = str(err)
except BroadlinkException as err:
errors["base"] = "unknown"
err_msg = str(err)
except OSError as err:
if err.errno == errno.ENETUNREACH:
errors["base"] = "cannot_connect"
err_msg = str(err)
else:
errors["base"] = "unknown"
err_msg = str(err)
else:
return await self.async_step_finish()
_LOGGER.error(
"Failed to unlock the device at %s: %s", device.host[0], err_msg
)
else:
return await self.async_step_finish()
data_schema = {vol.Required("unlock", default=False): bool}
return self.async_show_form(
step_id="unlock", data_schema=vol.Schema(data_schema), errors=errors
)
async def async_step_finish(self, user_input=None):
"""Choose a name for the device and create config entry."""
device = self.device
errors = {}
# Abort reauthentication flow.
self._abort_if_unique_id_configured(
updates={CONF_HOST: device.host[0], CONF_TIMEOUT: device.timeout}
)
if user_input is not None:
return self.async_create_entry(
title=user_input[CONF_NAME],
data={
CONF_HOST: device.host[0],
CONF_MAC: device.mac.hex(),
CONF_TYPE: device.devtype,
CONF_TIMEOUT: device.timeout,
},
)
data_schema = {vol.Required(CONF_NAME, default=device.name): str}
return self.async_show_form(
step_id="finish", data_schema=vol.Schema(data_schema), errors=errors
)
async def async_step_import(self, import_info):
"""Import a device."""
if any(
import_info[CONF_HOST] == entry.data[CONF_HOST]
for entry in self._async_current_entries()
):
return self.async_abort(reason="already_configured")
return await self.async_step_user(import_info)
async def async_step_reauth(self, data):
"""Reauthenticate to the device."""
device = blk.gendevice(
data[CONF_TYPE],
(data[CONF_HOST], DEFAULT_PORT),
bytes.fromhex(data[CONF_MAC]),
name=data[CONF_NAME],
)
device.timeout = data[CONF_TIMEOUT]
await self.async_set_device(device)
return await self.async_step_reset()
|
import diamond.collector
import time
import os
try:
import redis
except ImportError:
redis = None
SOCKET_PREFIX = 'unix:'
SOCKET_PREFIX_LEN = len(SOCKET_PREFIX)
class RedisCollector(diamond.collector.Collector):
_DATABASE_COUNT = 16
_DEFAULT_DB = 0
_DEFAULT_HOST = 'localhost'
_DEFAULT_PORT = 6379
_DEFAULT_SOCK_TIMEOUT = 5
_KEYS = {'clients.blocked': 'blocked_clients',
'clients.connected': 'connected_clients',
'clients.longest_output_list': 'client_longest_output_list',
'cpu.parent.sys': 'used_cpu_sys',
'cpu.children.sys': 'used_cpu_sys_children',
'cpu.parent.user': 'used_cpu_user',
'cpu.children.user': 'used_cpu_user_children',
'hash_max_zipmap.entries': 'hash_max_zipmap_entries',
'hash_max_zipmap.value': 'hash_max_zipmap_value',
'keys.evicted': 'evicted_keys',
'keys.expired': 'expired_keys',
'keyspace.hits': 'keyspace_hits',
'keyspace.misses': 'keyspace_misses',
'last_save.changes_since': 'changes_since_last_save',
'last_save.time': 'last_save_time',
'memory.internal_view': 'used_memory',
'memory.external_view': 'used_memory_rss',
'memory.fragmentation_ratio': 'mem_fragmentation_ratio',
'process.commands_processed': 'total_commands_processed',
'process.connections_received': 'total_connections_received',
'process.uptime': 'uptime_in_seconds',
'pubsub.channels': 'pubsub_channels',
'pubsub.patterns': 'pubsub_patterns',
'replication.master_sync_in_progress': 'master_sync_in_progress',
'slaves.connected': 'connected_slaves',
'slaves.last_io': 'master_last_io_seconds_ago'}
_RENAMED_KEYS = {'last_save.changes_since': 'rdb_changes_since_last_save',
'last_save.time': 'rdb_last_save_time'}
def process_config(self):
super(RedisCollector, self).process_config()
instance_list = self.config['instances']
# configobj make str of single-element list, let's convert
if isinstance(instance_list, basestring):
instance_list = [instance_list]
# process original single redis instance
if len(instance_list) == 0:
host = self.config['host']
port = int(self.config['port'])
auth = self.config['auth']
if auth is not None:
instance_list.append('%s:%d/%s' % (host, port, auth))
else:
instance_list.append('%s:%d' % (host, port))
self.instances = {}
for instance in instance_list:
if '@' in instance:
(nickname, hostport) = instance.split('@', 1)
else:
nickname = None
hostport = instance
if hostport.startswith(SOCKET_PREFIX):
unix_socket, __, port_auth = hostport[
SOCKET_PREFIX_LEN:].partition(':')
auth = port_auth.partition('/')[2] or None
if nickname is None:
nickname = os.path.splitext(
os.path.basename(unix_socket))[0]
self.instances[nickname] = (self._DEFAULT_HOST,
self._DEFAULT_PORT,
unix_socket,
auth)
else:
if '/' in hostport:
parts = hostport.split('/')
hostport = parts[0]
auth = '/'.join(parts[1:])
else:
auth = None
if ':' in hostport:
if hostport[0] == ':':
host = self._DEFAULT_HOST
port = int(hostport[1:])
else:
parts = hostport.split(':')
host = parts[0]
port = int(parts[1])
else:
host = hostport
port = self._DEFAULT_PORT
if nickname is None:
nickname = str(port)
self.instances[nickname] = (host, port, None, auth)
self.log.debug("Configured instances: %s" % self.instances.items())
def get_default_config_help(self):
config_help = super(RedisCollector, self).get_default_config_help()
config_help.update({
'host': 'Hostname to collect from',
'port': 'Port number to collect from',
'timeout': 'Socket timeout',
'db': '',
'auth': 'Password?',
'databases': 'how many database instances to collect',
'instances': "Redis addresses, comma separated, syntax:" +
" nick1@host:port, nick2@:port or nick3@host"
})
return config_help
def get_default_config(self):
"""
Return default config
:rtype: dict
"""
config = super(RedisCollector, self).get_default_config()
config.update({
'host': self._DEFAULT_HOST,
'port': self._DEFAULT_PORT,
'timeout': self._DEFAULT_SOCK_TIMEOUT,
'db': self._DEFAULT_DB,
'auth': None,
'databases': self._DATABASE_COUNT,
'path': 'redis',
'instances': [],
})
return config
def _client(self, host, port, unix_socket, auth):
"""Return a redis client for the configuration.
:param str host: redis host
:param int port: redis port
:rtype: redis.Redis
"""
db = int(self.config['db'])
timeout = int(self.config['timeout'])
try:
cli = redis.Redis(host=host, port=port,
db=db, socket_timeout=timeout, password=auth,
unix_socket_path=unix_socket)
cli.ping()
return cli
except Exception as ex:
self.log.error("RedisCollector: failed to connect to %s:%i. %s.",
unix_socket or host, port, ex)
def _precision(self, value):
"""Return the precision of the number
:param str value: The value to find the precision of
:rtype: int
"""
value = str(value)
decimal = value.rfind('.')
if decimal == -1:
return 0
return len(value) - decimal - 1
def _publish_key(self, nick, key):
"""Return the full key for the partial key.
:param str nick: Nickname for Redis instance
:param str key: The key name
:rtype: str
"""
return '%s.%s' % (nick, key)
def _get_info(self, host, port, unix_socket, auth):
"""Return info dict from specified Redis instance
:param str host: redis host
:param int port: redis port
:rtype: dict
"""
client = self._client(host, port, unix_socket, auth)
if client is None:
return None
info = client.info()
del client
return info
def _get_config(self, host, port, unix_socket, auth, config_key):
"""Return config string from specified Redis instance and config key
:param str host: redis host
:param int port: redis port
:param str host: redis config_key
:rtype: str
"""
client = self._client(host, port, unix_socket, auth)
if client is None:
return None
config_value = client.config_get(config_key)
del client
return config_value
def collect_instance(self, nick, host, port, unix_socket, auth):
"""Collect metrics from a single Redis instance
:param str nick: nickname of redis instance
:param str host: redis host
:param int port: redis port
:param str unix_socket: unix socket, if applicable
:param str auth: authentication password
"""
# Connect to redis and get the info
info = self._get_info(host, port, unix_socket, auth)
if info is None:
return
# The structure should include the port for multiple instances per
# server
data = dict()
# Role needs to be handled outside the the _KEYS dict
# since the value is a string, not a int / float
# Also, master_sync_in_progress is only available if the
# redis instance is a slave, so default it here so that
# the metric is cleared if the instance flips from slave
# to master
if 'role' in info:
if info['role'] == "master":
data['replication.master'] = 1
data['replication.master_sync_in_progress'] = 0
else:
data['replication.master'] = 0
# Connect to redis and get the maxmemory config value
# Then calculate the % maxmemory of memory used
maxmemory_config = self._get_config(host, port, unix_socket, auth,
'maxmemory')
if maxmemory_config and 'maxmemory' in maxmemory_config.keys():
maxmemory = float(maxmemory_config['maxmemory'])
# Only report % used if maxmemory is a non zero value
if maxmemory == 0:
maxmemory_percent = 0.0
else:
maxmemory_percent = info['used_memory'] / maxmemory * 100
maxmemory_percent = round(maxmemory_percent, 2)
data['memory.used_percent'] = float("%.2f" % maxmemory_percent)
# Iterate over the top level keys
for key in self._KEYS:
if self._KEYS[key] in info:
data[key] = info[self._KEYS[key]]
# Iterate over renamed keys for 2.6 support
for key in self._RENAMED_KEYS:
if self._RENAMED_KEYS[key] in info:
data[key] = info[self._RENAMED_KEYS[key]]
# Look for databaase speific stats
for dbnum in range(0, int(self.config.get('databases',
self._DATABASE_COUNT))):
db = 'db%i' % dbnum
if db in info:
for key in info[db]:
data['%s.%s' % (db, key)] = info[db][key]
# Time since last save
for key in ['last_save_time', 'rdb_last_save_time']:
if key in info:
data['last_save.time_since'] = int(time.time()) - info[key]
# Publish the data to graphite
for key in data:
self.publish(self._publish_key(nick, key),
data[key],
precision=self._precision(data[key]),
metric_type='GAUGE')
def collect(self):
"""Collect the stats from the redis instance and publish them.
"""
if redis is None:
self.log.error('Unable to import module redis')
return {}
for nick in self.instances.keys():
(host, port, unix_socket, auth) = self.instances[nick]
self.collect_instance(nick, host, int(port), unix_socket, auth)
|
import sys
import textwrap
import cherrypy
from cherrypy.test import helper
class ParamsTest(helper.CPWebCase):
@staticmethod
def setup_server():
class Root:
@cherrypy.expose
@cherrypy.tools.json_out()
@cherrypy.tools.params()
def resource(self, limit=None, sort=None):
return type(limit).__name__
# for testing on Py 2
resource.__annotations__ = {'limit': int}
conf = {'/': {'tools.params.on': True}}
cherrypy.tree.mount(Root(), config=conf)
def test_pass(self):
self.getPage('/resource')
self.assertStatus(200)
self.assertBody('"NoneType"')
self.getPage('/resource?limit=0')
self.assertStatus(200)
self.assertBody('"int"')
def test_error(self):
self.getPage('/resource?limit=')
self.assertStatus(400)
self.assertInBody('invalid literal for int')
cherrypy.config['tools.params.error'] = 422
self.getPage('/resource?limit=')
self.assertStatus(422)
self.assertInBody('invalid literal for int')
cherrypy.config['tools.params.exception'] = TypeError
self.getPage('/resource?limit=')
self.assertStatus(500)
def test_syntax(self):
if sys.version_info < (3,):
return self.skip('skipped (Python 3 only)')
code = textwrap.dedent("""
class Root:
@cherrypy.expose
@cherrypy.tools.params()
def resource(self, limit: int):
return type(limit).__name__
conf = {'/': {'tools.params.on': True}}
cherrypy.tree.mount(Root(), config=conf)
""")
exec(code)
self.getPage('/resource?limit=0')
self.assertStatus(200)
self.assertBody('int')
|
import warnings
from importlib import import_module
from django.core.exceptions import ImproperlyConfigured
from zinnia.settings import URL_SHORTENER_BACKEND
from zinnia.url_shortener.backends.default import backend as default_backend
def get_url_shortener():
"""
Return the selected URL shortener backend.
"""
try:
backend_module = import_module(URL_SHORTENER_BACKEND)
backend = getattr(backend_module, 'backend')
except (ImportError, AttributeError):
warnings.warn('%s backend cannot be imported' % URL_SHORTENER_BACKEND,
RuntimeWarning)
backend = default_backend
except ImproperlyConfigured as e:
warnings.warn(str(e), RuntimeWarning)
backend = default_backend
return backend
|
import pandas as pd
import numpy as np
from scattertext.termranking import AbsoluteFrequencyRanker
from scattertext.termsignificance.LogOddsRatioInformativeDirichletPiror import LogOddsRatioInformativeDirichletPrior
class PriorFactory(object):
def __init__(self,
term_doc_mat,
category=None,
not_categories=None,
starting_count=0.0001,
term_ranker=AbsoluteFrequencyRanker):
'''
Parameters
----------
term_doc_mat : TermDocMatrix
Basis for scores
category : str
Category to score. Only important when finding neutral categories.
not_categories : list
List of categories to score against. If None (the default) the list
is the remaining
starting_count : float
Default 0.01. Add this count to each term seen. If zero, terms not in background counts will be removed.
term_ranker : TermRanker
Function to get term frequency convention_df
'''
self.term_doc_mat = term_doc_mat
self.relevant_categories = []
if category:
self.category = category
assert category in term_doc_mat.get_categories()
self.relevant_categories += [category]
else:
self.category = None
if not_categories == None:
not_categories = [c for c in term_doc_mat.get_categories()
if c != category]
else:
assert set(not_categories) - set(term_doc_mat.get_categories()) == set()
self.relevant_categories += not_categories
self.not_categories = not_categories
self.starting_count = starting_count
self.term_ranker = term_ranker(term_doc_mat)
self.use_preset_term_frequencies = False
self.priors = pd.Series(np.zeros(len(self.term_doc_mat.get_terms())),
index=term_doc_mat.get_terms())
def use_general_term_frequencies(self):
'''
Returns
-------
PriorFactory
'''
tdf = self._get_relevant_term_freq()
bg_df = self.term_doc_mat.get_term_and_background_counts()[['background']]
bg_df = pd.merge(tdf,
bg_df,
left_index=True,
right_index=True,
how='left').fillna(0.)
self._store_priors_from_background_dataframe(bg_df)
return self
def _get_relevant_term_freq(self):
return pd.DataFrame({
'corpus': self.term_ranker.get_ranks()
[[c + ' freq' for c in self.relevant_categories]]
.sum(axis=1)
})
def _store_priors_from_background_dataframe(self, bg_df):
self.priors += bg_df.reindex(self.priors.index).fillna(0)['background']
def use_custom_term_frequencies(self, custom_term_frequencies):
'''
Parameters
----------
pd.Series
term -> frequency
Returns
-------
PriorFactory
'''
self.priors += custom_term_frequencies.reindex(self.priors.index).fillna(0)
return self
def use_categories(self, categories):
self.priors += self.term_ranker.get_ranks()[
[c + ' freq' for c in categories]].sum(axis=1)
return self
def use_all_categories(self):
'''
Returns
-------
PriorFactory
'''
term_df = self.term_ranker.get_ranks()
self.priors += term_df.sum(axis=1).fillna(0.)
return self
def use_neutral_categories(self):
'''
Returns
-------
PriorFactory
'''
term_df = self.term_ranker.get_ranks()
self.priors += term_df[[c + ' freq' for c in self._get_neutral_categories()]].sum(axis=1)
return self
def drop_neutral_categories_from_corpus(self):
'''
Returns
-------
PriorFactory
'''
neutral_categories = self._get_neutral_categories()
self.term_doc_mat = self.term_doc_mat.remove_categories(neutral_categories)
self._reindex_priors()
return self
def _get_neutral_categories(self):
return [c for c in self.term_doc_mat.get_categories()
if c not in self.relevant_categories]
def _reindex_priors(self):
self.priors = self.priors.reindex(self.term_doc_mat.get_terms()).dropna()
def drop_unused_terms(self):
'''
Returns
-------
PriorFactory
'''
self.term_doc_mat = self.term_doc_mat.remove_terms(
set(self.term_doc_mat.get_terms()) - set(self.priors.index)
)
self._reindex_priors()
return self
def drop_zero_priors(self):
'''
Returns
-------
PriorFactory
'''
self.term_doc_mat = self.term_doc_mat.remove_terms(
self.priors[self.priors == 0].index
)
self._reindex_priors()
return self
def align_to_target(self, target_term_doc_mat):
'''
Parameters
----------
target_term_doc_mat : TermDocMatrix
Returns
-------
PriorFactory
'''
self.priors = self.priors[target_term_doc_mat.get_terms()].fillna(0)
return self
def build(self):
'''
Returns
-------
pd.Series, TermDocMatrix
'''
return self.get_priors(), self.term_doc_mat
def get_priors(self):
'''
Returns
-------
pd.Series
'''
priors = self.priors
priors[~np.isfinite(priors)] = 0
priors += self.starting_count
return priors
|
import tensorflow as tf
from keras import backend as K
from keras.engine import Layer
class AttentionLayer(Layer):
"""
Layer that compute attention for BiMPM model.
For detailed information, see Bilateral Multi-Perspective Matching for
Natural Language Sentences, section 3.2.
Reference:
https://github.com/zhiguowang/BiMPM/blob/master/src/layer_utils.py#L145-L196
Examples:
>>> import matchzoo as mz
>>> layer = mz.contrib.layers.AttentionLayer(att_dim=50)
>>> layer.compute_output_shape([(32, 10, 100), (32, 40, 100)])
(32, 10, 40)
"""
def __init__(self,
att_dim: int,
att_type: str = 'default',
dropout_rate: float = 0.0):
"""
class: `AttentionLayer` constructor.
:param att_dim: int
:param att_type: int
"""
super(AttentionLayer, self).__init__()
self._att_dim = att_dim
self._att_type = att_type
self._dropout_rate = dropout_rate
@property
def att_dim(self):
"""Get the attention dimension."""
return self._att_dim
@property
def att_type(self):
"""Get the attention type."""
return self._att_type
def build(self, input_shapes):
"""
Build the layer.
:param input_shapes: input_shape_lt, input_shape_rt
"""
if not isinstance(input_shapes, list):
raise ValueError('A attention layer should be called '
'on a list of inputs.')
hidden_dim_lt = input_shapes[0][2]
hidden_dim_rt = input_shapes[1][2]
self.attn_w1 = self.add_weight(name='attn_w1',
shape=(hidden_dim_lt,
self._att_dim),
initializer='uniform',
trainable=True)
if hidden_dim_lt == hidden_dim_rt:
self.attn_w2 = self.attn_w1
else:
self.attn_w2 = self.add_weight(name='attn_w2',
shape=(hidden_dim_rt,
self._att_dim),
initializer='uniform',
trainable=True)
# diagonal_W: (1, 1, a)
self.diagonal_W = self.add_weight(name='diagonal_W',
shape=(1,
1,
self._att_dim),
initializer='uniform',
trainable=True)
self.built = True
def call(self, x: list, **kwargs):
"""
Calculate attention.
:param x: [reps_lt, reps_rt]
:return attn_prob: [b, s_lt, s_rt]
"""
if not isinstance(x, list):
raise ValueError('A attention layer should be called '
'on a list of inputs.')
reps_lt, reps_rt = x
attn_w1 = self.attn_w1
attn_w1 = tf.expand_dims(tf.expand_dims(attn_w1, axis=0), axis=0)
# => [1, 1, d, a]
reps_lt = tf.expand_dims(reps_lt, axis=-1)
attn_reps_lt = tf.reduce_sum(reps_lt * attn_w1, axis=2)
# => [b, s_lt, d, -1]
attn_w2 = self.attn_w2
attn_w2 = tf.expand_dims(tf.expand_dims(attn_w2, axis=0), axis=0)
# => [1, 1, d, a]
reps_rt = tf.expand_dims(reps_rt, axis=-1)
attn_reps_rt = tf.reduce_sum(reps_rt * attn_w2, axis=2) # [b, s_rt, d, -1]
attn_reps_lt = tf.tanh(attn_reps_lt) # [b, s_lt, a]
attn_reps_rt = tf.tanh(attn_reps_rt) # [b, s_rt, a]
# diagonal_W
attn_reps_lt = attn_reps_lt * self.diagonal_W # [b, s_lt, a]
attn_reps_rt = tf.transpose(attn_reps_rt, (0, 2, 1))
# => [b, a, s_rt]
attn_value = K.batch_dot(attn_reps_lt, attn_reps_rt) # [b, s_lt, s_rt]
# Softmax operation
attn_prob = tf.nn.softmax(attn_value) # [b, s_lt, s_rt]
# TODO(tjf) remove diagonal or not for normalization
# if remove_diagonal: attn_value = attn_value * diagonal
if len(x) == 4:
mask_lt, mask_rt = x[2], x[3]
attn_prob *= tf.expand_dims(mask_lt, axis=2)
attn_prob *= tf.expand_dims(mask_rt, axis=1)
return attn_prob
def compute_output_shape(self, input_shapes):
"""Calculate the layer output shape."""
if not isinstance(input_shapes, list):
raise ValueError('A attention layer should be called '
'on a list of inputs.')
input_shape_lt, input_shape_rt = input_shapes[0], input_shapes[1]
return input_shape_lt[0], input_shape_lt[1], input_shape_rt[1]
|
import glob
import os
import sys
from nikola.plugin_categories import PostScanner
from nikola import utils
from nikola.post import Post
LOGGER = utils.get_logger('scan_posts')
class ScanPosts(PostScanner):
"""Scan posts in the site."""
name = "scan_posts"
def scan(self):
"""Create list of posts from POSTS and PAGES options."""
seen = set([])
if not self.site.quiet:
print("Scanning posts", end='', file=sys.stderr)
timeline = []
for wildcard, destination, template_name, use_in_feeds in \
self.site.config['post_pages']:
if not self.site.quiet:
print(".", end='', file=sys.stderr)
destination_translatable = utils.TranslatableSetting('destination', destination, self.site.config['TRANSLATIONS'])
dirname = os.path.dirname(wildcard)
for dirpath, _, _ in os.walk(dirname, followlinks=True):
rel_dest_dir = os.path.relpath(dirpath, dirname)
# Get all the untranslated paths
dir_glob = os.path.join(dirpath, os.path.basename(wildcard)) # posts/foo/*.rst
untranslated = glob.glob(dir_glob)
# And now get all the translated paths
translated = set([])
for lang in self.site.config['TRANSLATIONS'].keys():
if lang == self.site.config['DEFAULT_LANG']:
continue
lang_glob = utils.get_translation_candidate(self.site.config, dir_glob, lang) # posts/foo/*.LANG.rst
translated = translated.union(set(glob.glob(lang_glob)))
# untranslated globs like *.rst often match translated paths too, so remove them
# and ensure x.rst is not in the translated set
untranslated = set(untranslated) - translated
# also remove from translated paths that are translations of
# paths in untranslated_list, so x.es.rst is not in the untranslated set
for p in untranslated:
translated = translated - set([utils.get_translation_candidate(self.site.config, p, l) for l in self.site.config['TRANSLATIONS'].keys()])
full_list = list(translated) + list(untranslated)
# We eliminate from the list the files inside any .ipynb folder
full_list = [p for p in full_list
if not any([x.startswith('.')
for x in p.split(os.sep)])]
for base_path in sorted(full_list):
if base_path in seen:
continue
try:
post = Post(
base_path,
self.site.config,
rel_dest_dir,
use_in_feeds,
self.site.MESSAGES,
template_name,
self.site.get_compiler(base_path),
destination_base=destination_translatable,
metadata_extractors_by=self.site.metadata_extractors_by
)
for lang in post.translated_to:
seen.add(post.translated_source_path(lang))
timeline.append(post)
except Exception:
LOGGER.error('Error reading post {}'.format(base_path))
raise
return timeline
def supported_extensions(self):
"""Return a list of supported file extensions, or None if such a list isn't known beforehand."""
return list({os.path.splitext(x[0])[1] for x in self.site.config['post_pages']})
|
from binascii import Error as HexError, unhexlify
from typing import Dict
from pyinsteon.address import Address
from pyinsteon.constants import HC_LOOKUP
import voluptuous as vol
from homeassistant.const import (
CONF_ADDRESS,
CONF_DEVICE,
CONF_ENTITY_ID,
CONF_HOST,
CONF_PASSWORD,
CONF_PLATFORM,
CONF_PORT,
CONF_USERNAME,
ENTITY_MATCH_ALL,
)
import homeassistant.helpers.config_validation as cv
from .const import (
CONF_CAT,
CONF_DIM_STEPS,
CONF_FIRMWARE,
CONF_HOUSECODE,
CONF_HUB_PASSWORD,
CONF_HUB_USERNAME,
CONF_HUB_VERSION,
CONF_IP_PORT,
CONF_OVERRIDE,
CONF_PLM_HUB_MSG,
CONF_PRODUCT_KEY,
CONF_SUBCAT,
CONF_UNITCODE,
CONF_X10,
CONF_X10_ALL_LIGHTS_OFF,
CONF_X10_ALL_LIGHTS_ON,
CONF_X10_ALL_UNITS_OFF,
DOMAIN,
HOUSECODES,
PORT_HUB_V1,
PORT_HUB_V2,
SRV_ALL_LINK_GROUP,
SRV_ALL_LINK_MODE,
SRV_CONTROLLER,
SRV_HOUSECODE,
SRV_LOAD_DB_RELOAD,
SRV_RESPONDER,
X10_PLATFORMS,
)
def set_default_port(schema: Dict) -> Dict:
"""Set the default port based on the Hub version."""
# If the ip_port is found do nothing
# If it is not found the set the default
ip_port = schema.get(CONF_IP_PORT)
if not ip_port:
hub_version = schema.get(CONF_HUB_VERSION)
# Found hub_version but not ip_port
schema[CONF_IP_PORT] = PORT_HUB_V1 if hub_version == 1 else PORT_HUB_V2
return schema
CONF_DEVICE_OVERRIDE_SCHEMA = vol.All(
vol.Schema(
{
vol.Required(CONF_ADDRESS): cv.string,
vol.Optional(CONF_CAT): cv.byte,
vol.Optional(CONF_SUBCAT): cv.byte,
vol.Optional(CONF_FIRMWARE): cv.byte,
vol.Optional(CONF_PRODUCT_KEY): cv.byte,
vol.Optional(CONF_PLATFORM): cv.string,
}
),
)
CONF_X10_SCHEMA = vol.All(
vol.Schema(
{
vol.Required(CONF_HOUSECODE): cv.string,
vol.Required(CONF_UNITCODE): vol.Range(min=1, max=16),
vol.Required(CONF_PLATFORM): cv.string,
vol.Optional(CONF_DIM_STEPS): vol.Range(min=2, max=255),
}
)
)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.All(
cv.deprecated(CONF_X10_ALL_UNITS_OFF),
cv.deprecated(CONF_X10_ALL_LIGHTS_ON),
cv.deprecated(CONF_X10_ALL_LIGHTS_OFF),
vol.Schema(
{
vol.Exclusive(
CONF_PORT, "plm_or_hub", msg=CONF_PLM_HUB_MSG
): cv.string,
vol.Exclusive(
CONF_HOST, "plm_or_hub", msg=CONF_PLM_HUB_MSG
): cv.string,
vol.Optional(CONF_IP_PORT): cv.port,
vol.Optional(CONF_HUB_USERNAME): cv.string,
vol.Optional(CONF_HUB_PASSWORD): cv.string,
vol.Optional(CONF_HUB_VERSION, default=2): vol.In([1, 2]),
vol.Optional(CONF_OVERRIDE): vol.All(
cv.ensure_list_csv, [CONF_DEVICE_OVERRIDE_SCHEMA]
),
vol.Optional(CONF_X10): vol.All(
cv.ensure_list_csv, [CONF_X10_SCHEMA]
),
},
extra=vol.ALLOW_EXTRA,
required=True,
),
cv.has_at_least_one_key(CONF_PORT, CONF_HOST),
set_default_port,
)
},
extra=vol.ALLOW_EXTRA,
)
ADD_ALL_LINK_SCHEMA = vol.Schema(
{
vol.Required(SRV_ALL_LINK_GROUP): vol.Range(min=0, max=255),
vol.Required(SRV_ALL_LINK_MODE): vol.In([SRV_CONTROLLER, SRV_RESPONDER]),
}
)
DEL_ALL_LINK_SCHEMA = vol.Schema(
{vol.Required(SRV_ALL_LINK_GROUP): vol.Range(min=0, max=255)}
)
LOAD_ALDB_SCHEMA = vol.Schema(
{
vol.Required(CONF_ENTITY_ID): vol.Any(cv.entity_id, ENTITY_MATCH_ALL),
vol.Optional(SRV_LOAD_DB_RELOAD, default=False): cv.boolean,
}
)
PRINT_ALDB_SCHEMA = vol.Schema({vol.Required(CONF_ENTITY_ID): cv.entity_id})
X10_HOUSECODE_SCHEMA = vol.Schema({vol.Required(SRV_HOUSECODE): vol.In(HOUSECODES)})
TRIGGER_SCENE_SCHEMA = vol.Schema(
{vol.Required(SRV_ALL_LINK_GROUP): vol.Range(min=0, max=255)}
)
ADD_DEFAULT_LINKS_SCHEMA = vol.Schema({vol.Required(CONF_ENTITY_ID): cv.entity_id})
def normalize_byte_entry_to_int(entry: [int, bytes, str]):
"""Format a hex entry value."""
if isinstance(entry, int):
if entry in range(0, 256):
return entry
raise ValueError("Must be single byte")
if isinstance(entry, str):
if entry[0:2].lower() == "0x":
entry = entry[2:]
if len(entry) != 2:
raise ValueError("Not a valid hex code")
try:
entry = unhexlify(entry)
except HexError as err:
raise ValueError("Not a valid hex code") from err
return int.from_bytes(entry, byteorder="big")
def add_device_override(config_data, new_override):
"""Add a new device override."""
try:
address = str(Address(new_override[CONF_ADDRESS]))
cat = normalize_byte_entry_to_int(new_override[CONF_CAT])
subcat = normalize_byte_entry_to_int(new_override[CONF_SUBCAT])
except ValueError as err:
raise ValueError("Incorrect values") from err
overrides = config_data.get(CONF_OVERRIDE, [])
curr_override = {}
# If this address has an override defined, remove it
for override in overrides:
if override[CONF_ADDRESS] == address:
curr_override = override
break
if curr_override:
overrides.remove(curr_override)
curr_override[CONF_ADDRESS] = address
curr_override[CONF_CAT] = cat
curr_override[CONF_SUBCAT] = subcat
overrides.append(curr_override)
config_data[CONF_OVERRIDE] = overrides
return config_data
def add_x10_device(config_data, new_x10):
"""Add a new X10 device to X10 device list."""
curr_device = {}
x10_devices = config_data.get(CONF_X10, [])
for x10_device in x10_devices:
if (
x10_device[CONF_HOUSECODE] == new_x10[CONF_HOUSECODE]
and x10_device[CONF_UNITCODE] == new_x10[CONF_UNITCODE]
):
curr_device = x10_device
break
if curr_device:
x10_devices.remove(curr_device)
curr_device[CONF_HOUSECODE] = new_x10[CONF_HOUSECODE]
curr_device[CONF_UNITCODE] = new_x10[CONF_UNITCODE]
curr_device[CONF_PLATFORM] = new_x10[CONF_PLATFORM]
curr_device[CONF_DIM_STEPS] = new_x10[CONF_DIM_STEPS]
x10_devices.append(curr_device)
config_data[CONF_X10] = x10_devices
return config_data
def build_device_override_schema(
address=vol.UNDEFINED,
cat=vol.UNDEFINED,
subcat=vol.UNDEFINED,
firmware=vol.UNDEFINED,
):
"""Build the device override schema for config flow."""
return vol.Schema(
{
vol.Required(CONF_ADDRESS, default=address): str,
vol.Optional(CONF_CAT, default=cat): str,
vol.Optional(CONF_SUBCAT, default=subcat): str,
}
)
def build_x10_schema(
housecode=vol.UNDEFINED,
unitcode=vol.UNDEFINED,
platform=vol.UNDEFINED,
dim_steps=22,
):
"""Build the X10 schema for config flow."""
return vol.Schema(
{
vol.Required(CONF_HOUSECODE, default=housecode): vol.In(HC_LOOKUP.keys()),
vol.Required(CONF_UNITCODE, default=unitcode): vol.In(range(1, 17)),
vol.Required(CONF_PLATFORM, default=platform): vol.In(X10_PLATFORMS),
vol.Optional(CONF_DIM_STEPS, default=dim_steps): vol.In(range(1, 255)),
}
)
def build_plm_schema(device=vol.UNDEFINED):
"""Build the PLM schema for config flow."""
return vol.Schema({vol.Required(CONF_DEVICE, default=device): str})
def build_hub_schema(
hub_version,
host=vol.UNDEFINED,
port=vol.UNDEFINED,
username=vol.UNDEFINED,
password=vol.UNDEFINED,
):
"""Build the Hub schema for config flow."""
if port == vol.UNDEFINED:
port = PORT_HUB_V2 if hub_version == 2 else PORT_HUB_V1
schema = {
vol.Required(CONF_HOST, default=host): str,
vol.Required(CONF_PORT, default=port): int,
}
if hub_version == 2:
schema[vol.Required(CONF_USERNAME, default=username)] = str
schema[vol.Required(CONF_PASSWORD, default=password)] = str
return vol.Schema(schema)
def build_remove_override_schema(data):
"""Build the schema to remove device overrides in config flow options."""
selection = []
for override in data:
selection.append(override[CONF_ADDRESS])
return vol.Schema({vol.Required(CONF_ADDRESS): vol.In(selection)})
def build_remove_x10_schema(data):
"""Build the schema to remove an X10 device in config flow options."""
selection = []
for device in data:
housecode = device[CONF_HOUSECODE].upper()
unitcode = device[CONF_UNITCODE]
selection.append(f"Housecode: {housecode}, Unitcode: {unitcode}")
return vol.Schema({vol.Required(CONF_DEVICE): vol.In(selection)})
def convert_yaml_to_config_flow(yaml_config):
"""Convert the YAML based configuration to a config flow configuration."""
config = {}
if yaml_config.get(CONF_HOST):
hub_version = yaml_config.get(CONF_HUB_VERSION, 2)
default_port = PORT_HUB_V2 if hub_version == 2 else PORT_HUB_V1
config[CONF_HOST] = yaml_config.get(CONF_HOST)
config[CONF_PORT] = yaml_config.get(CONF_PORT, default_port)
config[CONF_HUB_VERSION] = hub_version
if hub_version == 2:
config[CONF_USERNAME] = yaml_config[CONF_USERNAME]
config[CONF_PASSWORD] = yaml_config[CONF_PASSWORD]
else:
config[CONF_DEVICE] = yaml_config[CONF_PORT]
options = {}
for old_override in yaml_config.get(CONF_OVERRIDE, []):
override = {}
override[CONF_ADDRESS] = str(Address(old_override[CONF_ADDRESS]))
override[CONF_CAT] = normalize_byte_entry_to_int(old_override[CONF_CAT])
override[CONF_SUBCAT] = normalize_byte_entry_to_int(old_override[CONF_SUBCAT])
options = add_device_override(options, override)
for x10_device in yaml_config.get(CONF_X10, []):
options = add_x10_device(options, x10_device)
return config, options
|
import json
import logging
from perfkitbenchmarker import vm_util
# Default configuration for action status polling.
DEFAULT_ACTION_WAIT_SECONDS = 10
DEFAULT_ACTION_MAX_TRIES = 90
def DoctlAndParse(arg_list):
"""Run a doctl command and parse the output.
Args:
arg_list: a list of arguments for doctl. Will be formated with
str() before being sent to the process.
Returns:
A tuple of
- doctl's JSON output, pre-parsed, or None if output is empty.
- doctl's return code
Raises:
errors.VmUtil.CalledProcessError if doctl fails.
"""
stdout, _, retcode = vm_util.IssueCommand(
['doctl'] +
[str(arg) for arg in arg_list] +
['--output=json'],
raise_on_failure=False)
# In case of error, doctl sometimes prints "null" before printing a
# JSON error string to stdout. TODO(user): improve parsing of
# error messages.
if retcode and stdout.startswith('null'):
output = stdout[4:]
else:
output = stdout
if output:
return json.loads(output), retcode
else:
return None, retcode
class ActionInProgressException(Exception):
"""Exception to indicate that a DigitalOcean action is in-progress."""
pass
class ActionFailedError(Exception):
"""Exception raised when a DigitalOcean action fails."""
pass
class UnknownStatusError(Exception):
"""Exception raised for an unknown status message."""
pass
@vm_util.Retry(poll_interval=DEFAULT_ACTION_WAIT_SECONDS,
max_retries=DEFAULT_ACTION_MAX_TRIES,
retryable_exceptions=ActionInProgressException)
def WaitForAction(action_id):
"""Wait until a VM action completes."""
response, retcode = DoctlAndParse(
['compute', 'action', 'get', action_id])
if retcode:
logging.warn('Unexpected action lookup failure.')
raise ActionFailedError('Failed to get action %s' % action_id)
status = response[0]['status']
logging.debug('action %d: status is "%s".', action_id, status)
if status == 'completed':
return
elif status == 'in-progress':
raise ActionInProgressException()
elif status == 'errored':
raise ActionFailedError('Action %s errored' % action_id)
else:
raise UnknownStatusError('Action %s had unknown status "%s"' %
(action_id, status))
|
import asyncio
import logging
import voluptuous as vol
import xs1_api_client
from homeassistant.const import (
CONF_HOST,
CONF_PASSWORD,
CONF_PORT,
CONF_SSL,
CONF_USERNAME,
)
from homeassistant.helpers import discovery
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
DOMAIN = "xs1"
ACTUATORS = "actuators"
SENSORS = "sensors"
# define configuration parameters
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_PASSWORD): cv.string,
vol.Optional(CONF_PORT, default=80): cv.string,
vol.Optional(CONF_SSL, default=False): cv.boolean,
vol.Optional(CONF_USERNAME): cv.string,
}
)
},
extra=vol.ALLOW_EXTRA,
)
XS1_COMPONENTS = ["climate", "sensor", "switch"]
# Lock used to limit the amount of concurrent update requests
# as the XS1 Gateway can only handle a very
# small amount of concurrent requests
UPDATE_LOCK = asyncio.Lock()
def setup(hass, config):
"""Set up XS1 Component."""
_LOGGER.debug("Initializing XS1")
host = config[DOMAIN][CONF_HOST]
port = config[DOMAIN][CONF_PORT]
ssl = config[DOMAIN][CONF_SSL]
user = config[DOMAIN].get(CONF_USERNAME)
password = config[DOMAIN].get(CONF_PASSWORD)
# initialize XS1 API
try:
xs1 = xs1_api_client.XS1(
host=host, port=port, ssl=ssl, user=user, password=password
)
except ConnectionError as error:
_LOGGER.error(
"Failed to create XS1 API client because of a connection error: %s",
error,
)
return False
_LOGGER.debug("Establishing connection to XS1 gateway and retrieving data...")
hass.data[DOMAIN] = {}
actuators = xs1.get_all_actuators(enabled=True)
sensors = xs1.get_all_sensors(enabled=True)
hass.data[DOMAIN][ACTUATORS] = actuators
hass.data[DOMAIN][SENSORS] = sensors
_LOGGER.debug("Loading components for XS1 platform...")
# Load components for supported devices
for component in XS1_COMPONENTS:
discovery.load_platform(hass, component, DOMAIN, {}, config)
return True
class XS1DeviceEntity(Entity):
"""Representation of a base XS1 device."""
def __init__(self, device):
"""Initialize the XS1 device."""
self.device = device
async def async_update(self):
"""Retrieve latest device state."""
async with UPDATE_LOCK:
await self.hass.async_add_executor_job(self.device.update)
|
import unittest
import pandas as pd
import numpy as np
from pgmpy.estimators import HillClimbSearch, K2Score
from pgmpy.models import BayesianModel
class TestHillClimbEstimator(unittest.TestCase):
def setUp(self):
self.rand_data = pd.DataFrame(
np.random.randint(0, 5, size=(int(1e4), 2)), columns=list("AB")
)
self.rand_data["C"] = self.rand_data["B"]
self.est_rand = HillClimbSearch(self.rand_data)
self.score_rand = K2Score(self.rand_data).local_score
self.model1 = BayesianModel()
self.model1.add_nodes_from(["A", "B", "C"])
self.model1_possible_edges = set(
[(u, v) for u in self.model1.nodes() for v in self.model1.nodes()]
)
self.model2 = self.model1.copy()
self.model2.add_edge("A", "B")
self.model2_possible_edges = set(
[(u, v) for u in self.model2.nodes() for v in self.model2.nodes()]
)
# link to dataset: "https://www.kaggle.com/c/titanic/download/train.csv"
self.titanic_data = pd.read_csv(
"pgmpy/tests/test_estimators/testdata/titanic_train.csv"
)
self.titanic_data1 = self.titanic_data[
["Survived", "Sex", "Pclass", "Age", "Embarked"]
]
self.est_titanic1 = HillClimbSearch(self.titanic_data1)
self.score_titanic1 = K2Score(self.titanic_data1).local_score
self.titanic_data2 = self.titanic_data[["Survived", "Sex", "Pclass"]]
self.est_titanic2 = HillClimbSearch(self.titanic_data2)
self.score_titanic2 = K2Score(self.titanic_data2).local_score
def test_legal_operations(self):
model2_legal_ops = list(
self.est_rand._legal_operations(
model=self.model2,
score=self.score_rand,
tabu_list=set(),
max_indegree=float("inf"),
black_list=set(),
white_list=self.model2_possible_edges,
fixed_edges=set(),
)
)
model2_legal_ops_ref = [
(("+", ("C", "A")), -28.15602208305154),
(("+", ("A", "C")), -28.155467430966382),
(("+", ("C", "B")), 7636.947544933631),
(("+", ("B", "C")), 7937.805375579936),
(("-", ("A", "B")), 28.155467430966382),
(("flip", ("A", "B")), -0.0005546520851567038),
]
self.assertSetEqual(
set([op for op, score in model2_legal_ops]),
set([op for op, score in model2_legal_ops_ref]),
)
def test_legal_operations_blacklist_whitelist(self):
model2_legal_ops_bl = list(
self.est_rand._legal_operations(
model=self.model2,
score=self.score_rand,
tabu_list=set(),
max_indegree=float("inf"),
black_list=set([("A", "B"), ("A", "C"), ("C", "A"), ("C", "B")]),
white_list=self.model2_possible_edges,
fixed_edges=set(),
)
)
model2_legal_ops_bl_ref = [
("+", ("B", "C")),
("-", ("A", "B")),
("flip", ("A", "B")),
]
self.assertSetEqual(
set([op for op, score in model2_legal_ops_bl]), set(model2_legal_ops_bl_ref)
)
model2_legal_ops_wl = list(
self.est_rand._legal_operations(
model=self.model2,
score=self.score_rand,
tabu_list=set(),
max_indegree=float("inf"),
black_list=set(),
white_list=set([("A", "B"), ("A", "C"), ("C", "A"), ("A", "B")]),
fixed_edges=set(),
)
)
model2_legal_ops_wl_ref = [
("+", ("A", "C")),
("+", ("C", "A")),
("-", ("A", "B")),
]
self.assertSetEqual(
set([op for op, score in model2_legal_ops_wl]), set(model2_legal_ops_wl_ref)
)
def test_legal_operations_titanic(self):
start_model = BayesianModel(
[("Survived", "Sex"), ("Pclass", "Age"), ("Pclass", "Embarked")]
)
all_possible_edges = set(
[(u, v) for u in start_model.nodes() for v in start_model.nodes()]
)
legal_ops = self.est_titanic1._legal_operations(
model=start_model,
score=self.score_titanic1,
tabu_list=[],
max_indegree=float("inf"),
black_list=set(),
white_list=all_possible_edges,
fixed_edges=set(),
)
self.assertEqual(len(list(legal_ops)), 20)
tabu_list = [
("-", ("Survived", "Sex")),
("-", ("Survived", "Pclass")),
("flip", ("Age", "Pclass")),
]
legal_ops_tabu = self.est_titanic1._legal_operations(
model=start_model,
score=self.score_titanic1,
tabu_list=tabu_list,
max_indegree=float("inf"),
black_list=set(),
white_list=all_possible_edges,
fixed_edges=set(),
)
self.assertEqual(len(list(legal_ops_tabu)), 18)
legal_ops_indegree = self.est_titanic1._legal_operations(
model=start_model,
score=self.score_titanic1,
tabu_list=[],
max_indegree=1,
black_list=set(),
white_list=all_possible_edges,
fixed_edges=set(),
)
self.assertEqual(len(list(legal_ops_indegree)), 11)
legal_ops_both = self.est_titanic1._legal_operations(
model=start_model,
score=self.score_titanic1,
tabu_list=tabu_list,
max_indegree=1,
black_list=set(),
white_list=all_possible_edges,
fixed_edges=set(),
)
legal_ops_both_ref = {
("+", ("Embarked", "Survived")): 10.050632580087495,
("+", ("Survived", "Pclass")): 41.8886804654893,
("+", ("Age", "Survived")): -23.635716036430722,
("+", ("Pclass", "Survived")): 41.81314459373152,
("+", ("Sex", "Pclass")): 4.772261678791324,
("-", ("Pclass", "Age")): 11.546515590730905,
("-", ("Pclass", "Embarked")): -32.17148283253266,
("flip", ("Pclass", "Embarked")): 3.3563814191275583,
("flip", ("Survived", "Sex")): 0.0397370279797542,
}
self.assertSetEqual(
set([op for op, score in legal_ops_both]), set(legal_ops_both_ref)
)
for op, score in legal_ops_both:
self.assertAlmostEqual(score, legal_ops_both_ref[op])
def test_estimate_rand(self):
est1 = self.est_rand.estimate()
self.assertSetEqual(set(est1.nodes()), set(["A", "B", "C"]))
self.assertTrue(
list(est1.edges()) == [("B", "C")] or list(est1.edges()) == [("C", "B")]
)
est2 = self.est_rand.estimate(start_dag=BayesianModel([("A", "B"), ("A", "C")]))
self.assertTrue(
list(est2.edges()) == [("B", "C")] or list(est2.edges()) == [("C", "B")]
)
est3 = self.est_rand.estimate(fixed_edges=[("B", "C")])
self.assertTrue([("B", "C")] == list(est3.edges()))
def test_estimate_titanic(self):
self.assertSetEqual(
set(self.est_titanic2.estimate().edges()),
set([("Survived", "Pclass"), ("Sex", "Pclass"), ("Sex", "Survived")]),
)
self.assertTrue(
("Pclass", "Survived")
in self.est_titanic2.estimate(fixed_edges=[("Pclass", "Survived")]).edges()
)
def test_no_legal_operation(self):
data = pd.DataFrame(
[
[1, 0, 0, 1, 0, 0, 1, 1, 0],
[1, 0, 1, 0, 0, 1, 0, 1, 0],
[1, 0, 0, 0, 0, 1, 0, 1, 1],
[1, 1, 0, 1, 0, 1, 1, 0, 0],
[0, 0, 1, 0, 0, 1, 1, 0, 0],
],
columns=list("ABCDEFGHI"),
)
est = HillClimbSearch(data)
best_model = est.estimate(
fixed_edges=[("A", "B"), ("B", "C")], white_list=[("F", "C")]
)
self.assertEqual(
set(best_model.edges()), set([("A", "B"), ("B", "C"), ("F", "C")])
)
def tearDown(self):
del self.rand_data
del self.est_rand
del self.model1
del self.titanic_data
del self.titanic_data1
del self.titanic_data2
del self.est_titanic1
del self.est_titanic2
|
import asyncio
from collections import OrderedDict
import logging
from typing import Any, Dict, List, Optional
import attr
import voluptuous as vol
from homeassistant.const import CONF_EXCLUDE, CONF_INCLUDE
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import ServiceNotFound
from homeassistant.helpers import config_validation as cv
from . import (
MULTI_FACTOR_AUTH_MODULE_SCHEMA,
MULTI_FACTOR_AUTH_MODULES,
MultiFactorAuthModule,
SetupFlow,
)
REQUIREMENTS = ["pyotp==2.3.0"]
CONF_MESSAGE = "message"
CONFIG_SCHEMA = MULTI_FACTOR_AUTH_MODULE_SCHEMA.extend(
{
vol.Optional(CONF_INCLUDE): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_EXCLUDE): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_MESSAGE, default="{} is your Home Assistant login code"): str,
},
extra=vol.PREVENT_EXTRA,
)
STORAGE_VERSION = 1
STORAGE_KEY = "auth_module.notify"
STORAGE_USERS = "users"
STORAGE_USER_ID = "user_id"
INPUT_FIELD_CODE = "code"
_LOGGER = logging.getLogger(__name__)
def _generate_secret() -> str:
"""Generate a secret."""
import pyotp # pylint: disable=import-outside-toplevel
return str(pyotp.random_base32())
def _generate_random() -> int:
"""Generate a 8 digit number."""
import pyotp # pylint: disable=import-outside-toplevel
return int(pyotp.random_base32(length=8, chars=list("1234567890")))
def _generate_otp(secret: str, count: int) -> str:
"""Generate one time password."""
import pyotp # pylint: disable=import-outside-toplevel
return str(pyotp.HOTP(secret).at(count))
def _verify_otp(secret: str, otp: str, count: int) -> bool:
"""Verify one time password."""
import pyotp # pylint: disable=import-outside-toplevel
return bool(pyotp.HOTP(secret).verify(otp, count))
@attr.s(slots=True)
class NotifySetting:
"""Store notify setting for one user."""
secret: str = attr.ib(factory=_generate_secret) # not persistent
counter: int = attr.ib(factory=_generate_random) # not persistent
notify_service: Optional[str] = attr.ib(default=None)
target: Optional[str] = attr.ib(default=None)
_UsersDict = Dict[str, NotifySetting]
@MULTI_FACTOR_AUTH_MODULES.register("notify")
class NotifyAuthModule(MultiFactorAuthModule):
"""Auth module send hmac-based one time password by notify service."""
DEFAULT_TITLE = "Notify One-Time Password"
def __init__(self, hass: HomeAssistant, config: Dict[str, Any]) -> None:
"""Initialize the user data store."""
super().__init__(hass, config)
self._user_settings: Optional[_UsersDict] = None
self._user_store = hass.helpers.storage.Store(
STORAGE_VERSION, STORAGE_KEY, private=True
)
self._include = config.get(CONF_INCLUDE, [])
self._exclude = config.get(CONF_EXCLUDE, [])
self._message_template = config[CONF_MESSAGE]
self._init_lock = asyncio.Lock()
@property
def input_schema(self) -> vol.Schema:
"""Validate login flow input data."""
return vol.Schema({INPUT_FIELD_CODE: str})
async def _async_load(self) -> None:
"""Load stored data."""
async with self._init_lock:
if self._user_settings is not None:
return
data = await self._user_store.async_load()
if data is None:
data = {STORAGE_USERS: {}}
self._user_settings = {
user_id: NotifySetting(**setting)
for user_id, setting in data.get(STORAGE_USERS, {}).items()
}
async def _async_save(self) -> None:
"""Save data."""
if self._user_settings is None:
return
await self._user_store.async_save(
{
STORAGE_USERS: {
user_id: attr.asdict(
notify_setting,
filter=attr.filters.exclude(
attr.fields(NotifySetting).secret,
attr.fields(NotifySetting).counter,
),
)
for user_id, notify_setting in self._user_settings.items()
}
}
)
@callback
def aync_get_available_notify_services(self) -> List[str]:
"""Return list of notify services."""
unordered_services = set()
for service in self.hass.services.async_services().get("notify", {}):
if service not in self._exclude:
unordered_services.add(service)
if self._include:
unordered_services &= set(self._include)
return sorted(unordered_services)
async def async_setup_flow(self, user_id: str) -> SetupFlow:
"""Return a data entry flow handler for setup module.
Mfa module should extend SetupFlow
"""
return NotifySetupFlow(
self, self.input_schema, user_id, self.aync_get_available_notify_services()
)
async def async_setup_user(self, user_id: str, setup_data: Any) -> Any:
"""Set up auth module for user."""
if self._user_settings is None:
await self._async_load()
assert self._user_settings is not None
self._user_settings[user_id] = NotifySetting(
notify_service=setup_data.get("notify_service"),
target=setup_data.get("target"),
)
await self._async_save()
async def async_depose_user(self, user_id: str) -> None:
"""Depose auth module for user."""
if self._user_settings is None:
await self._async_load()
assert self._user_settings is not None
if self._user_settings.pop(user_id, None):
await self._async_save()
async def async_is_user_setup(self, user_id: str) -> bool:
"""Return whether user is setup."""
if self._user_settings is None:
await self._async_load()
assert self._user_settings is not None
return user_id in self._user_settings
async def async_validate(self, user_id: str, user_input: Dict[str, Any]) -> bool:
"""Return True if validation passed."""
if self._user_settings is None:
await self._async_load()
assert self._user_settings is not None
notify_setting = self._user_settings.get(user_id)
if notify_setting is None:
return False
# user_input has been validate in caller
return await self.hass.async_add_executor_job(
_verify_otp,
notify_setting.secret,
user_input.get(INPUT_FIELD_CODE, ""),
notify_setting.counter,
)
async def async_initialize_login_mfa_step(self, user_id: str) -> None:
"""Generate code and notify user."""
if self._user_settings is None:
await self._async_load()
assert self._user_settings is not None
notify_setting = self._user_settings.get(user_id)
if notify_setting is None:
raise ValueError("Cannot find user_id")
def generate_secret_and_one_time_password() -> str:
"""Generate and send one time password."""
assert notify_setting
# secret and counter are not persistent
notify_setting.secret = _generate_secret()
notify_setting.counter = _generate_random()
return _generate_otp(notify_setting.secret, notify_setting.counter)
code = await self.hass.async_add_executor_job(
generate_secret_and_one_time_password
)
await self.async_notify_user(user_id, code)
async def async_notify_user(self, user_id: str, code: str) -> None:
"""Send code by user's notify service."""
if self._user_settings is None:
await self._async_load()
assert self._user_settings is not None
notify_setting = self._user_settings.get(user_id)
if notify_setting is None:
_LOGGER.error("Cannot find user %s", user_id)
return
await self.async_notify(
code,
notify_setting.notify_service, # type: ignore
notify_setting.target,
)
async def async_notify(
self, code: str, notify_service: str, target: Optional[str] = None
) -> None:
"""Send code by notify service."""
data = {"message": self._message_template.format(code)}
if target:
data["target"] = [target]
await self.hass.services.async_call("notify", notify_service, data)
class NotifySetupFlow(SetupFlow):
"""Handler for the setup flow."""
def __init__(
self,
auth_module: NotifyAuthModule,
setup_schema: vol.Schema,
user_id: str,
available_notify_services: List[str],
) -> None:
"""Initialize the setup flow."""
super().__init__(auth_module, setup_schema, user_id)
# to fix typing complaint
self._auth_module: NotifyAuthModule = auth_module
self._available_notify_services = available_notify_services
self._secret: Optional[str] = None
self._count: Optional[int] = None
self._notify_service: Optional[str] = None
self._target: Optional[str] = None
async def async_step_init(
self, user_input: Optional[Dict[str, str]] = None
) -> Dict[str, Any]:
"""Let user select available notify services."""
errors: Dict[str, str] = {}
hass = self._auth_module.hass
if user_input:
self._notify_service = user_input["notify_service"]
self._target = user_input.get("target")
self._secret = await hass.async_add_executor_job(_generate_secret)
self._count = await hass.async_add_executor_job(_generate_random)
return await self.async_step_setup()
if not self._available_notify_services:
return self.async_abort(reason="no_available_service")
schema: Dict[str, Any] = OrderedDict()
schema["notify_service"] = vol.In(self._available_notify_services)
schema["target"] = vol.Optional(str)
return self.async_show_form(
step_id="init", data_schema=vol.Schema(schema), errors=errors
)
async def async_step_setup(
self, user_input: Optional[Dict[str, str]] = None
) -> Dict[str, Any]:
"""Verify user can receive one-time password."""
errors: Dict[str, str] = {}
hass = self._auth_module.hass
if user_input:
verified = await hass.async_add_executor_job(
_verify_otp, self._secret, user_input["code"], self._count
)
if verified:
await self._auth_module.async_setup_user(
self._user_id,
{"notify_service": self._notify_service, "target": self._target},
)
return self.async_create_entry(title=self._auth_module.name, data={})
errors["base"] = "invalid_code"
# generate code every time, no retry logic
assert self._secret and self._count
code = await hass.async_add_executor_job(
_generate_otp, self._secret, self._count
)
assert self._notify_service
try:
await self._auth_module.async_notify(
code, self._notify_service, self._target
)
except ServiceNotFound:
return self.async_abort(reason="notify_service_not_exist")
return self.async_show_form(
step_id="setup",
data_schema=self._setup_schema,
description_placeholders={"notify_service": self._notify_service},
errors=errors,
)
|
from babelfish import Language
import os
import pytest
from subliminal.providers.argenteam import ArgenteamSubtitle, ArgenteamProvider
from vcr import VCR
vcr = VCR(path_transformer=lambda path: path + '.yaml',
record_mode=os.environ.get('VCR_RECORD_MODE', 'once'),
match_on=['method', 'scheme', 'host', 'port', 'path', 'query', 'body'],
cassette_library_dir=os.path.join('tests', 'cassettes', 'argenteam'))
def test_get_matches_episode(episodes):
subtitle = ArgenteamSubtitle(Language.fromalpha2('es'), None, 'Game of Thrones', 3, 10, 'EVOLVE', '720p')
matches = subtitle.get_matches(episodes['got_s03e10'])
assert matches == {'resolution', 'series', 'season', 'episode', 'year', 'country'}
def test_get_matches_no_match(episodes):
subtitle = ArgenteamSubtitle(Language.fromalpha2('es'),
None,
'Marvels Agents Of S.H.I.E.L.D.',
2, 6, 'KILLERS', '1080p')
matches = subtitle.get_matches(episodes['house_of_cards_us_s06e01'])
assert matches == set()
@pytest.mark.integration
@vcr.use_cassette
def test_download_subtitle(episodes):
video = episodes['bbt_s07e05']
languages = {Language.fromalpha2('es')}
with ArgenteamProvider() as provider:
subtitles = provider.list_subtitles(video, languages)
provider.download_subtitle(subtitles[0])
assert subtitles[0].content is not None
assert subtitles[0].is_valid() is True
@pytest.mark.integration
@vcr.use_cassette
def test_list_subtitles_episode_alternative_series(episodes):
video = episodes['turn_s04e03']
languages = {Language.fromalpha2('es')}
expected_subtitles = {
'http://www.argenteam.net/subtitles/67263/TURN.Washingtons.Spies.%282014%29.S04E03'
'-Blood.for.Blood.HDTV.x264-SVA',
'http://www.argenteam.net/subtitles/67264/TURN.Washingtons.Spies.%282014%29.S04E03'
'-Blood.for.Blood.HDTV.x264.720p-AVS'
}
with ArgenteamProvider() as provider:
subtitles = provider.list_subtitles(video, languages)
assert {s.id for s in subtitles} == expected_subtitles
|
from datetime import timedelta
import logging
from homeassistant.components.climate import (
DOMAIN as SENSOR_DOMAIN,
ENTITY_ID_FORMAT,
ClimateEntity,
)
from homeassistant.components.climate.const import (
FAN_HIGH,
FAN_LOW,
FAN_MEDIUM,
HVAC_MODE_AUTO,
HVAC_MODE_COOL,
HVAC_MODE_FAN_ONLY,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
SUPPORT_FAN_MODE,
SUPPORT_TARGET_TEMPERATURE,
)
from homeassistant.const import (
ATTR_TEMPERATURE,
CONF_PLATFORM,
CONF_UNIT_OF_MEASUREMENT,
ENTITY_MATCH_NONE,
PRECISION_TENTHS,
PRECISION_WHOLE,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
)
from homeassistant.core import callback, valid_entity_id
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from . import TuyaDevice
from .const import (
CONF_CURR_TEMP_DIVIDER,
CONF_EXT_TEMP_SENSOR,
CONF_MAX_TEMP,
CONF_MIN_TEMP,
CONF_TEMP_DIVIDER,
DOMAIN,
SIGNAL_CONFIG_ENTITY,
TUYA_DATA,
TUYA_DISCOVERY_NEW,
)
DEVICE_TYPE = "climate"
SCAN_INTERVAL = timedelta(seconds=15)
HA_STATE_TO_TUYA = {
HVAC_MODE_AUTO: "auto",
HVAC_MODE_COOL: "cold",
HVAC_MODE_FAN_ONLY: "wind",
HVAC_MODE_HEAT: "hot",
}
TUYA_STATE_TO_HA = {value: key for key, value in HA_STATE_TO_TUYA.items()}
FAN_MODES = {FAN_LOW, FAN_MEDIUM, FAN_HIGH}
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up tuya sensors dynamically through tuya discovery."""
platform = config_entry.data[CONF_PLATFORM]
async def async_discover_sensor(dev_ids):
"""Discover and add a discovered tuya sensor."""
if not dev_ids:
return
entities = await hass.async_add_executor_job(
_setup_entities,
hass,
dev_ids,
platform,
)
async_add_entities(entities)
async_dispatcher_connect(
hass, TUYA_DISCOVERY_NEW.format(SENSOR_DOMAIN), async_discover_sensor
)
devices_ids = hass.data[DOMAIN]["pending"].pop(SENSOR_DOMAIN)
await async_discover_sensor(devices_ids)
def _setup_entities(hass, dev_ids, platform):
"""Set up Tuya Climate device."""
tuya = hass.data[DOMAIN][TUYA_DATA]
entities = []
for dev_id in dev_ids:
device = tuya.get_device_by_id(dev_id)
if device is None:
continue
entities.append(TuyaClimateEntity(device, platform))
return entities
class TuyaClimateEntity(TuyaDevice, ClimateEntity):
"""Tuya climate devices,include air conditioner,heater."""
def __init__(self, tuya, platform):
"""Init climate device."""
super().__init__(tuya, platform)
self.entity_id = ENTITY_ID_FORMAT.format(tuya.object_id())
self.operations = [HVAC_MODE_OFF]
self._has_operation = False
self._def_hvac_mode = HVAC_MODE_AUTO
self._min_temp = None
self._max_temp = None
self._temp_entity = None
self._temp_entity_error = False
@callback
def _process_config(self):
"""Set device config parameter."""
config = self._get_device_config()
if not config:
return
unit = config.get(CONF_UNIT_OF_MEASUREMENT)
if unit:
self._tuya.set_unit("FAHRENHEIT" if unit == TEMP_FAHRENHEIT else "CELSIUS")
self._tuya.temp_divider = config.get(CONF_TEMP_DIVIDER, 0)
self._tuya.curr_temp_divider = config.get(CONF_CURR_TEMP_DIVIDER, 0)
min_temp = config.get(CONF_MIN_TEMP, 0)
max_temp = config.get(CONF_MAX_TEMP, 0)
if min_temp >= max_temp:
self._min_temp = self._max_temp = None
else:
self._min_temp = min_temp
self._max_temp = max_temp
self._temp_entity = config.get(CONF_EXT_TEMP_SENSOR)
async def async_added_to_hass(self):
"""Create operation list when add to hass."""
await super().async_added_to_hass()
self._process_config()
self.async_on_remove(
async_dispatcher_connect(
self.hass, SIGNAL_CONFIG_ENTITY, self._process_config
)
)
modes = self._tuya.operation_list()
if modes is None:
if self._def_hvac_mode not in self.operations:
self.operations.append(self._def_hvac_mode)
return
for mode in modes:
if mode not in TUYA_STATE_TO_HA:
continue
ha_mode = TUYA_STATE_TO_HA[mode]
if ha_mode not in self.operations:
self.operations.append(ha_mode)
self._has_operation = True
@property
def precision(self):
"""Return the precision of the system."""
if self._tuya.has_decimal():
return PRECISION_TENTHS
return PRECISION_WHOLE
@property
def temperature_unit(self):
"""Return the unit of measurement used by the platform."""
unit = self._tuya.temperature_unit()
if unit == "FAHRENHEIT":
return TEMP_FAHRENHEIT
return TEMP_CELSIUS
@property
def hvac_mode(self):
"""Return current operation ie. heat, cool, idle."""
if not self._tuya.state():
return HVAC_MODE_OFF
if not self._has_operation:
return self._def_hvac_mode
mode = self._tuya.current_operation()
if mode is None:
return None
return TUYA_STATE_TO_HA.get(mode)
@property
def hvac_modes(self):
"""Return the list of available operation modes."""
return self.operations
@property
def current_temperature(self):
"""Return the current temperature."""
curr_temp = self._tuya.current_temperature()
if curr_temp is None:
return self._get_ext_temperature()
return curr_temp
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return self._tuya.target_temperature()
@property
def target_temperature_step(self):
"""Return the supported step of target temperature."""
return self._tuya.target_temperature_step()
@property
def fan_mode(self):
"""Return the fan setting."""
return self._tuya.current_fan_mode()
@property
def fan_modes(self):
"""Return the list of available fan modes."""
return self._tuya.fan_list()
def set_temperature(self, **kwargs):
"""Set new target temperature."""
if ATTR_TEMPERATURE in kwargs:
self._tuya.set_temperature(kwargs[ATTR_TEMPERATURE])
def set_fan_mode(self, fan_mode):
"""Set new target fan mode."""
self._tuya.set_fan_mode(fan_mode)
def set_hvac_mode(self, hvac_mode):
"""Set new target operation mode."""
if hvac_mode == HVAC_MODE_OFF:
self._tuya.turn_off()
return
if not self._tuya.state():
self._tuya.turn_on()
if self._has_operation:
self._tuya.set_operation_mode(HA_STATE_TO_TUYA.get(hvac_mode))
@property
def supported_features(self):
"""Return the list of supported features."""
supports = 0
if self._tuya.support_target_temperature():
supports = supports | SUPPORT_TARGET_TEMPERATURE
if self._tuya.support_wind_speed():
supports = supports | SUPPORT_FAN_MODE
return supports
@property
def min_temp(self):
"""Return the minimum temperature."""
min_temp = (
self._min_temp if self._min_temp is not None else self._tuya.min_temp()
)
if min_temp is not None:
return min_temp
return super().min_temp
@property
def max_temp(self):
"""Return the maximum temperature."""
max_temp = (
self._max_temp if self._max_temp is not None else self._tuya.max_temp()
)
if max_temp is not None:
return max_temp
return super().max_temp
def _set_and_log_temp_error(self, error_msg):
if not self._temp_entity_error:
_LOGGER.warning(
"Error on Tuya external temperature sensor %s: %s",
self._temp_entity,
error_msg,
)
self._temp_entity_error = True
def _get_ext_temperature(self):
"""Get external temperature entity current state."""
if not self._temp_entity or self._temp_entity == ENTITY_MATCH_NONE:
return None
entity_name = self._temp_entity
if not valid_entity_id(entity_name):
self._set_and_log_temp_error("entity name is invalid")
return None
state_obj = self.hass.states.get(entity_name)
if state_obj:
temperature = state_obj.state
try:
float(temperature)
except (TypeError, ValueError):
self._set_and_log_temp_error(
"entity state is not available or is not a number"
)
return None
self._temp_entity_error = False
return temperature
self._set_and_log_temp_error("entity not found")
return None
|
from typing import Dict, List
import voluptuous as vol
from homeassistant.components.device_automation.exceptions import (
InvalidDeviceAutomationConfig,
)
from homeassistant.const import (
ATTR_DEVICE_CLASS,
ATTR_UNIT_OF_MEASUREMENT,
CONF_ABOVE,
CONF_BELOW,
CONF_ENTITY_ID,
CONF_TYPE,
DEVICE_CLASS_BATTERY,
DEVICE_CLASS_CURRENT,
DEVICE_CLASS_ENERGY,
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_ILLUMINANCE,
DEVICE_CLASS_POWER,
DEVICE_CLASS_POWER_FACTOR,
DEVICE_CLASS_PRESSURE,
DEVICE_CLASS_SIGNAL_STRENGTH,
DEVICE_CLASS_TEMPERATURE,
DEVICE_CLASS_TIMESTAMP,
DEVICE_CLASS_VOLTAGE,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import condition, config_validation as cv
from homeassistant.helpers.entity_registry import (
async_entries_for_device,
async_get_registry,
)
from homeassistant.helpers.typing import ConfigType
from . import DOMAIN
# mypy: allow-untyped-defs, no-check-untyped-defs
DEVICE_CLASS_NONE = "none"
CONF_IS_BATTERY_LEVEL = "is_battery_level"
CONF_IS_CURRENT = "is_current"
CONF_IS_ENERGY = "is_energy"
CONF_IS_HUMIDITY = "is_humidity"
CONF_IS_ILLUMINANCE = "is_illuminance"
CONF_IS_POWER = "is_power"
CONF_IS_POWER_FACTOR = "is_power_factor"
CONF_IS_PRESSURE = "is_pressure"
CONF_IS_SIGNAL_STRENGTH = "is_signal_strength"
CONF_IS_TEMPERATURE = "is_temperature"
CONF_IS_TIMESTAMP = "is_timestamp"
CONF_IS_VOLTAGE = "is_voltage"
CONF_IS_VALUE = "is_value"
ENTITY_CONDITIONS = {
DEVICE_CLASS_BATTERY: [{CONF_TYPE: CONF_IS_BATTERY_LEVEL}],
DEVICE_CLASS_CURRENT: [{CONF_TYPE: CONF_IS_CURRENT}],
DEVICE_CLASS_ENERGY: [{CONF_TYPE: CONF_IS_ENERGY}],
DEVICE_CLASS_HUMIDITY: [{CONF_TYPE: CONF_IS_HUMIDITY}],
DEVICE_CLASS_ILLUMINANCE: [{CONF_TYPE: CONF_IS_ILLUMINANCE}],
DEVICE_CLASS_POWER: [{CONF_TYPE: CONF_IS_POWER}],
DEVICE_CLASS_POWER_FACTOR: [{CONF_TYPE: CONF_IS_POWER_FACTOR}],
DEVICE_CLASS_PRESSURE: [{CONF_TYPE: CONF_IS_PRESSURE}],
DEVICE_CLASS_SIGNAL_STRENGTH: [{CONF_TYPE: CONF_IS_SIGNAL_STRENGTH}],
DEVICE_CLASS_TEMPERATURE: [{CONF_TYPE: CONF_IS_TEMPERATURE}],
DEVICE_CLASS_TIMESTAMP: [{CONF_TYPE: CONF_IS_TIMESTAMP}],
DEVICE_CLASS_VOLTAGE: [{CONF_TYPE: CONF_IS_VOLTAGE}],
DEVICE_CLASS_NONE: [{CONF_TYPE: CONF_IS_VALUE}],
}
CONDITION_SCHEMA = vol.All(
cv.DEVICE_CONDITION_BASE_SCHEMA.extend(
{
vol.Required(CONF_ENTITY_ID): cv.entity_id,
vol.Required(CONF_TYPE): vol.In(
[
CONF_IS_BATTERY_LEVEL,
CONF_IS_CURRENT,
CONF_IS_ENERGY,
CONF_IS_HUMIDITY,
CONF_IS_ILLUMINANCE,
CONF_IS_POWER,
CONF_IS_POWER_FACTOR,
CONF_IS_PRESSURE,
CONF_IS_SIGNAL_STRENGTH,
CONF_IS_TEMPERATURE,
CONF_IS_TIMESTAMP,
CONF_IS_VOLTAGE,
CONF_IS_VALUE,
]
),
vol.Optional(CONF_BELOW): vol.Any(vol.Coerce(float)),
vol.Optional(CONF_ABOVE): vol.Any(vol.Coerce(float)),
}
),
cv.has_at_least_one_key(CONF_BELOW, CONF_ABOVE),
)
async def async_get_conditions(
hass: HomeAssistant, device_id: str
) -> List[Dict[str, str]]:
"""List device conditions."""
conditions: List[Dict[str, str]] = []
entity_registry = await async_get_registry(hass)
entries = [
entry
for entry in async_entries_for_device(entity_registry, device_id)
if entry.domain == DOMAIN
]
for entry in entries:
device_class = DEVICE_CLASS_NONE
state = hass.states.get(entry.entity_id)
unit_of_measurement = (
state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) if state else None
)
if not state or not unit_of_measurement:
continue
if ATTR_DEVICE_CLASS in state.attributes:
device_class = state.attributes[ATTR_DEVICE_CLASS]
templates = ENTITY_CONDITIONS.get(
device_class, ENTITY_CONDITIONS[DEVICE_CLASS_NONE]
)
conditions.extend(
{
**template,
"condition": "device",
"device_id": device_id,
"entity_id": entry.entity_id,
"domain": DOMAIN,
}
for template in templates
)
return conditions
@callback
def async_condition_from_config(
config: ConfigType, config_validation: bool
) -> condition.ConditionCheckerType:
"""Evaluate state based on configuration."""
if config_validation:
config = CONDITION_SCHEMA(config)
numeric_state_config = {
condition.CONF_CONDITION: "numeric_state",
condition.CONF_ENTITY_ID: config[CONF_ENTITY_ID],
}
if CONF_ABOVE in config:
numeric_state_config[condition.CONF_ABOVE] = config[CONF_ABOVE]
if CONF_BELOW in config:
numeric_state_config[condition.CONF_BELOW] = config[CONF_BELOW]
return condition.async_numeric_state_from_config(numeric_state_config)
async def async_get_condition_capabilities(hass, config):
"""List condition capabilities."""
state = hass.states.get(config[CONF_ENTITY_ID])
unit_of_measurement = (
state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) if state else None
)
if not state or not unit_of_measurement:
raise InvalidDeviceAutomationConfig
return {
"extra_fields": vol.Schema(
{
vol.Optional(
CONF_ABOVE, description={"suffix": unit_of_measurement}
): vol.Coerce(float),
vol.Optional(
CONF_BELOW, description={"suffix": unit_of_measurement}
): vol.Coerce(float),
}
)
}
|
from datetime import datetime as dt, timedelta as dtd
import bson
import numpy as np
import pytest
from mock import patch
from pymongo.server_type import SERVER_TYPE
from arctic._config import FwPointersCfg, FW_POINTERS_REFS_KEY
from arctic._util import mongo_count
from arctic.store._ndarray_store import NdarrayStore
from arctic.store.version_store import register_versioned_storage
from tests.integration.store.test_version_store import _query, FwPointersCtx
register_versioned_storage(NdarrayStore)
def test_write_new_column_name_to_arctic_1_40_data(ndarray_store_with_uncompressed_write):
store = ndarray_store_with_uncompressed_write['store']
symbol = ndarray_store_with_uncompressed_write['symbol']
arr = store.read(symbol).data
new_arr = np.array(list(arr) + [(2,)], dtype=[('fgh', '<i8')])
store.write(symbol, new_arr)
assert np.all(store.read(symbol).data == new_arr)
def test_save_read_simple_ndarray(library):
ndarr = np.ones(1000)
library.write('MYARR', ndarr)
saved_arr = library.read('MYARR').data
assert np.all(ndarr == saved_arr)
@pytest.mark.xfail(reason="code paths in mongo/pymongo have changed and query no longer called")
def test_read_simple_ndarray_from_secondary(library_secondary, library_name):
ndarr = np.ones(1000)
library_secondary.write('MYARR', ndarr)
with patch('pymongo.message.query', side_effect=_query(True, library_name)) as query, \
patch('pymongo.server_description.ServerDescription.server_type', SERVER_TYPE.Mongos):
saved_arr = library_secondary.read('MYARR').data
assert query.call_count > 0
assert np.all(ndarr == saved_arr)
@pytest.mark.parametrize('fw_pointers_cfg', [FwPointersCfg.DISABLED, FwPointersCfg.HYBRID, FwPointersCfg.ENABLED])
def test_save_read_big_1darray(library, fw_pointers_cfg):
with FwPointersCtx(fw_pointers_cfg):
ndarr = np.random.rand(5326, 6020).ravel()
library.write('MYARR', ndarr)
saved_arr = library.read('MYARR').data
assert np.all(ndarr == saved_arr)
@pytest.mark.parametrize('fw_pointers_cfg', [FwPointersCfg.DISABLED, FwPointersCfg.HYBRID, FwPointersCfg.ENABLED])
def test_save_and_resave_reuses_chunks(library, fw_pointers_cfg):
with FwPointersCtx(fw_pointers_cfg):
with patch('arctic.store._ndarray_store._CHUNK_SIZE', 1000):
ndarr = np.random.rand(1024)
library.write('MYARR', ndarr)
saved_arr = library.read('MYARR').data
assert np.all(ndarr == saved_arr)
orig_chunks = mongo_count(library._collection)
assert orig_chunks == 9
# Concatenate more values
ndarr = np.concatenate([ndarr, np.random.rand(10)])
# And change the original values - we're not a simple append
ndarr[0] = ndarr[1] = ndarr[2] = 0
library.write('MYARR', ndarr)
saved_arr = library.read('MYARR').data
assert np.all(ndarr == saved_arr)
# Should contain the original chunks, but not double the number
# of chunks
new_chunks = mongo_count(library._collection)
assert new_chunks == 11
if fw_pointers_cfg in (FwPointersCfg.DISABLED, FwPointersCfg.HYBRID):
# We hit the update (rather than upsert) code path
assert mongo_count(library._collection, filter={'parent': {'$size': 2}}) == 7
if fw_pointers_cfg in (FwPointersCfg.HYBRID, FwPointersCfg.ENABLED):
assert len(library._versions.find_one({'symbol': 'MYARR', 'version': 2})[FW_POINTERS_REFS_KEY]) == 9
@pytest.mark.parametrize('fw_pointers_cfg', [FwPointersCfg.DISABLED, FwPointersCfg.HYBRID, FwPointersCfg.ENABLED])
def test_save_read_big_2darray(library, fw_pointers_cfg):
with FwPointersCtx(fw_pointers_cfg):
ndarr = np.random.rand(5326, 6020)
library.write('MYARR', ndarr)
saved_arr = library.read('MYARR').data
assert np.all(ndarr == saved_arr)
def test_get_info_bson_object(library):
ndarr = np.ones(1000)
library.write('MYARR', ndarr)
assert library.get_info('MYARR')['handler'] == 'NdarrayStore'
def test_save_read_ndarray_with_array_field(library):
ndarr = np.empty(10, dtype=[('A', 'int64'), ('B', 'float64', (2,))])
ndarr['A'] = 1
ndarr['B'] = 2
library.write('MYARR', ndarr)
saved_arr = library.read('MYARR').data
assert np.all(ndarr == saved_arr)
def test_save_read_ndarray(library):
ndarr = np.empty(1000, dtype=[('abc', 'int64')])
library.write('MYARR', ndarr)
saved_arr = library.read('MYARR').data
assert np.all(ndarr == saved_arr)
@pytest.mark.parametrize('fw_pointers_cfg', [FwPointersCfg.DISABLED, FwPointersCfg.HYBRID, FwPointersCfg.ENABLED])
def test_multiple_write(library, fw_pointers_cfg):
with FwPointersCtx(fw_pointers_cfg):
ndarr = np.empty(1000, dtype=[('abc', 'int64')])
foo = np.empty(900, dtype=[('abc', 'int64')])
library.write('MYARR', foo)
v1 = library.read('MYARR').version
library.write('MYARR', ndarr[:900])
v2 = library.read('MYARR').version
library.append('MYARR', ndarr[-100:])
v3 = library.read('MYARR').version
assert np.all(ndarr == library.read('MYARR').data)
assert np.all(ndarr == library.read('MYARR', as_of=v3).data)
assert np.all(foo == library.read('MYARR', as_of=v1).data)
assert np.all(ndarr[:900] == library.read('MYARR', as_of=v2).data)
def test_cant_write_objects():
store = NdarrayStore()
assert not store.can_write(None, None, np.array([object()]))
def test_save_read_large_ndarray(library):
dtype = np.dtype([('abc', 'int64')])
ndarr = np.arange(30 * 1024 * 1024 / dtype.itemsize).view(dtype=dtype)
assert len(ndarr.tostring()) > 16 * 1024 * 1024
library.write('MYARR', ndarr)
saved_arr = library.read('MYARR').data
assert np.all(ndarr == saved_arr)
def test_mutable_ndarray(library):
dtype = np.dtype([('abc', 'int64')])
ndarr = np.arange(32).view(dtype=dtype)
ndarr.setflags(write=True)
library.write('MYARR', ndarr)
saved_arr = library.read('MYARR').data
assert saved_arr.flags['WRITEABLE']
@pytest.mark.xfail(reason="delete_version not safe with append...")
def test_delete_version_shouldnt_break_read(library):
data = np.arange(30)
yesterday = dt.utcnow() - dtd(days=1, seconds=1)
_id = bson.ObjectId.from_datetime(yesterday)
with patch("bson.ObjectId", return_value=_id):
library.write('symbol', data, prune_previous_version=False)
# Re-Write the data again
library.write('symbol', data, prune_previous_version=False)
library._delete_version('symbol', 1)
assert repr(library.read('symbol').data) == repr(data)
|
import unittest
import numpy as np
from chainercv.transforms import random_expand
from chainercv.utils import testing
@testing.parameterize(
{'max_ratio': 1},
{'max_ratio': 4},
)
class TestRandomExpand(unittest.TestCase):
def test_random_expand(self):
img = np.random.uniform(-1, 1, size=(3, 64, 32))
out = random_expand(img)
out = random_expand(img, max_ratio=1)
np.testing.assert_equal(out, img)
out, param = random_expand(
img, max_ratio=self.max_ratio, return_param=True)
ratio = param['ratio']
y_offset = param['y_offset']
x_offset = param['x_offset']
np.testing.assert_equal(
out[:, y_offset:y_offset + 64, x_offset:x_offset + 32], img)
self.assertGreaterEqual(ratio, 1)
self.assertLessEqual(ratio, self.max_ratio)
self.assertEqual(out.shape[1], int(64 * ratio))
self.assertEqual(out.shape[2], int(32 * ratio))
out = random_expand(img, max_ratio=2)
@testing.parameterize(
{'fill': 128},
{'fill': (104, 117, 123)},
{'fill': np.random.uniform(255, size=(3, 1, 1))},
)
class TestRandomExpandFill(unittest.TestCase):
def test_random_expand_fill(self):
img = np.random.uniform(-1, 1, size=(3, 64, 32))
while True:
out, param = random_expand(img, fill=self.fill, return_param=True)
y_offset = param['y_offset']
x_offset = param['x_offset']
if y_offset > 0 or x_offset > 0:
break
if isinstance(self.fill, int):
fill = (self.fill,) * 3
else:
fill = self.fill
np.testing.assert_equal(
out[:, 0, 0], np.array(fill).flatten())
testing.run_module(__name__, __file__)
|
import os.path as op
import pytest
import numpy as np
from numpy.testing import assert_array_almost_equal, assert_allclose
import mne
from mne.datasets import testing
from mne import (read_cov, read_forward_solution, read_evokeds,
convert_forward_solution, VectorSourceEstimate)
from mne.cov import regularize
from mne.inverse_sparse import gamma_map
from mne.inverse_sparse.mxne_inverse import make_stc_from_dipoles
from mne.minimum_norm.tests.test_inverse import (assert_stc_res,
assert_var_exp_log)
from mne import pick_types_forward
from mne.utils import assert_stcs_equal, run_tests_if_main, catch_logging
from mne.dipole import Dipole
data_path = testing.data_path(download=False)
fname_evoked = op.join(data_path, 'MEG', 'sample', 'sample_audvis-ave.fif')
fname_cov = op.join(data_path, 'MEG', 'sample', 'sample_audvis-cov.fif')
fname_fwd = op.join(data_path, 'MEG', 'sample',
'sample_audvis_trunc-meg-eeg-oct-6-fwd.fif')
subjects_dir = op.join(data_path, 'subjects')
def _check_stc(stc, evoked, idx, hemi, fwd, dist_limit=0., ratio=50.,
res=None, atol=1e-20):
"""Check correctness."""
assert_array_almost_equal(stc.times, evoked.times, 5)
stc_orig = stc
if isinstance(stc, VectorSourceEstimate):
assert stc.data.any(1).any(1).all() # all dipoles should have some
stc = stc.magnitude()
amps = np.sum(stc.data ** 2, axis=1)
order = np.argsort(amps)[::-1]
amps = amps[order]
verts = np.concatenate(stc.vertices)[order]
hemi_idx = int(order[0] >= len(stc.vertices[1]))
hemis = ['lh', 'rh']
assert hemis[hemi_idx] == hemi
dist = np.linalg.norm(np.diff(fwd['src'][hemi_idx]['rr'][[idx, verts[0]]],
axis=0)[0]) * 1000.
assert dist <= dist_limit
assert amps[0] > ratio * amps[1]
if res is not None:
assert_stc_res(evoked, stc_orig, fwd, res, atol=atol)
@pytest.mark.slowtest
@testing.requires_testing_data
def test_gamma_map_standard():
"""Test Gamma MAP inverse."""
forward = read_forward_solution(fname_fwd)
forward = convert_forward_solution(forward, surf_ori=True)
forward = pick_types_forward(forward, meg=False, eeg=True)
evoked = read_evokeds(fname_evoked, condition=0, baseline=(None, 0),
proj=False)
evoked.resample(50, npad=100)
evoked.crop(tmin=0.1, tmax=0.14) # crop to window around peak
cov = read_cov(fname_cov)
cov = regularize(cov, evoked.info, rank=None)
alpha = 0.5
with catch_logging() as log:
stc = gamma_map(evoked, forward, cov, alpha, tol=1e-4,
xyz_same_gamma=True, update_mode=1, verbose=True)
_check_stc(stc, evoked, 68477, 'lh', fwd=forward)
assert_var_exp_log(log.getvalue(), 20, 22)
with catch_logging() as log:
stc_vec, res = gamma_map(
evoked, forward, cov, alpha, tol=1e-4, xyz_same_gamma=True,
update_mode=1, pick_ori='vector', return_residual=True,
verbose=True)
assert_var_exp_log(log.getvalue(), 20, 22)
assert_stcs_equal(stc_vec.magnitude(), stc)
_check_stc(stc_vec, evoked, 68477, 'lh', fwd=forward, res=res)
stc, res = gamma_map(
evoked, forward, cov, alpha, tol=1e-4, xyz_same_gamma=False,
update_mode=1, pick_ori='vector', return_residual=True)
_check_stc(stc, evoked, 82010, 'lh', fwd=forward, dist_limit=6., ratio=2.,
res=res)
with catch_logging() as log:
dips = gamma_map(evoked, forward, cov, alpha, tol=1e-4,
xyz_same_gamma=False, update_mode=1,
return_as_dipoles=True, verbose=True)
exp_var = assert_var_exp_log(log.getvalue(), 58, 60)
dip_exp_var = np.mean(sum(dip.gof for dip in dips))
assert_allclose(exp_var, dip_exp_var, atol=10) # not really equiv, close
assert (isinstance(dips[0], Dipole))
stc_dip = make_stc_from_dipoles(dips, forward['src'])
assert_stcs_equal(stc.magnitude(), stc_dip)
# force fixed orientation
stc, res = gamma_map(evoked, forward, cov, alpha, tol=1e-4,
xyz_same_gamma=False, update_mode=2,
loose=0, return_residual=True)
_check_stc(stc, evoked, 85739, 'lh', fwd=forward, ratio=20., res=res)
@pytest.mark.slowtest
@testing.requires_testing_data
def test_gamma_map_vol_sphere():
"""Gamma MAP with a sphere forward and volumic source space."""
evoked = read_evokeds(fname_evoked, condition=0, baseline=(None, 0),
proj=False)
evoked.resample(50, npad=100)
evoked.crop(tmin=0.1, tmax=0.16) # crop to window around peak
cov = read_cov(fname_cov)
cov = regularize(cov, evoked.info, rank=None)
info = evoked.info
sphere = mne.make_sphere_model(r0=(0., 0., 0.), head_radius=0.080)
src = mne.setup_volume_source_space(subject=None, pos=30., mri=None,
sphere=(0.0, 0.0, 0.0, 0.08),
bem=None, mindist=5.0,
exclude=2.0, sphere_units='m')
fwd = mne.make_forward_solution(info, trans=None, src=src, bem=sphere,
eeg=False, meg=True)
alpha = 0.5
pytest.raises(ValueError, gamma_map, evoked, fwd, cov, alpha,
loose=0, return_residual=False)
pytest.raises(ValueError, gamma_map, evoked, fwd, cov, alpha,
loose=0.2, return_residual=False)
stc = gamma_map(evoked, fwd, cov, alpha, tol=1e-4,
xyz_same_gamma=False, update_mode=2,
return_residual=False)
assert_array_almost_equal(stc.times, evoked.times, 5)
# Compare orientation obtained using fit_dipole and gamma_map
# for a simulated evoked containing a single dipole
stc = mne.VolSourceEstimate(50e-9 * np.random.RandomState(42).randn(1, 4),
vertices=[stc.vertices[0][:1]],
tmin=stc.tmin,
tstep=stc.tstep)
evoked_dip = mne.simulation.simulate_evoked(fwd, stc, info, cov, nave=1e9,
use_cps=True)
dip_gmap = gamma_map(evoked_dip, fwd, cov, 0.1, return_as_dipoles=True)
amp_max = [np.max(d.amplitude) for d in dip_gmap]
dip_gmap = dip_gmap[np.argmax(amp_max)]
assert (dip_gmap[0].pos[0] in src[0]['rr'][stc.vertices[0]])
dip_fit = mne.fit_dipole(evoked_dip, cov, sphere)[0]
assert (np.abs(np.dot(dip_fit.ori[0], dip_gmap.ori[0])) > 0.99)
run_tests_if_main()
|
import logging
import unittest
import numpy as np
from gensim.models.keyedvectors import KeyedVectors, REAL, pseudorandom_weak_vector
from gensim.test.utils import datapath
import gensim.models.keyedvectors
logger = logging.getLogger(__name__)
class TestKeyedVectors(unittest.TestCase):
def setUp(self):
self.vectors = KeyedVectors.load_word2vec_format(datapath('euclidean_vectors.bin'), binary=True)
self.model_path = datapath("w2v_keyedvectors_load_test.modeldata")
self.vocab_path = datapath("w2v_keyedvectors_load_test.vocab")
def test_most_similar(self):
"""Test most_similar returns expected results."""
expected = [
'conflict',
'administration',
'terrorism',
'call',
'israel'
]
predicted = [result[0] for result in self.vectors.most_similar('war', topn=5)]
self.assertEqual(expected, predicted)
def test_most_similar_topn(self):
"""Test most_similar returns correct results when `topn` is specified."""
self.assertEqual(len(self.vectors.most_similar('war', topn=5)), 5)
self.assertEqual(len(self.vectors.most_similar('war', topn=10)), 10)
predicted = self.vectors.most_similar('war', topn=None)
self.assertEqual(len(predicted), len(self.vectors))
predicted = self.vectors.most_similar('war', topn=0)
self.assertEqual(len(predicted), 0)
predicted = self.vectors.most_similar('war', topn=np.uint8(0))
self.assertEqual(len(predicted), 0)
def test_relative_cosine_similarity(self):
"""Test relative_cosine_similarity returns expected results with an input of a word pair and topn"""
wordnet_syn = [
'good', 'goodness', 'commodity', 'trade_good', 'full', 'estimable', 'honorable',
'respectable', 'beneficial', 'just', 'upright', 'adept', 'expert', 'practiced', 'proficient',
'skillful', 'skilful', 'dear', 'near', 'dependable', 'safe', 'secure', 'right', 'ripe', 'well',
'effective', 'in_effect', 'in_force', 'serious', 'sound', 'salutary', 'honest', 'undecomposed',
'unspoiled', 'unspoilt', 'thoroughly', 'soundly',
] # synonyms for "good" as per wordnet
cos_sim = [self.vectors.similarity("good", syn) for syn in wordnet_syn if syn in self.vectors]
cos_sim = sorted(cos_sim, reverse=True) # cosine_similarity of "good" with wordnet_syn in decreasing order
# computing relative_cosine_similarity of two similar words
rcs_wordnet = self.vectors.similarity("good", "nice") / sum(cos_sim[i] for i in range(10))
rcs = self.vectors.relative_cosine_similarity("good", "nice", 10)
self.assertTrue(rcs_wordnet >= rcs)
self.assertTrue(np.allclose(rcs_wordnet, rcs, 0, 0.125))
# computing relative_cosine_similarity for two non-similar words
rcs = self.vectors.relative_cosine_similarity("good", "worst", 10)
self.assertTrue(rcs < 0.10)
def test_most_similar_raises_keyerror(self):
"""Test most_similar raises KeyError when input is out of vocab."""
with self.assertRaises(KeyError):
self.vectors.most_similar('not_in_vocab')
def test_most_similar_restrict_vocab(self):
"""Test most_similar returns handles restrict_vocab correctly."""
expected = set(self.vectors.index_to_key[:5])
predicted = set(result[0] for result in self.vectors.most_similar('war', topn=5, restrict_vocab=5))
self.assertEqual(expected, predicted)
def test_most_similar_with_vector_input(self):
"""Test most_similar returns expected results with an input vector instead of an input word."""
expected = [
'war',
'conflict',
'administration',
'terrorism',
'call',
]
input_vector = self.vectors['war']
predicted = [result[0] for result in self.vectors.most_similar([input_vector], topn=5)]
self.assertEqual(expected, predicted)
def test_most_similar_to_given(self):
"""Test most_similar_to_given returns correct results."""
predicted = self.vectors.most_similar_to_given('war', ['terrorism', 'call', 'waging'])
self.assertEqual(predicted, 'terrorism')
def test_similar_by_word(self):
"""Test similar_by_word returns expected results."""
expected = [
'conflict',
'administration',
'terrorism',
'call',
'israel',
]
predicted = [result[0] for result in self.vectors.similar_by_word('war', topn=5)]
self.assertEqual(expected, predicted)
def test_similar_by_vector(self):
"""Test similar_by_word returns expected results."""
expected = [
'war',
'conflict',
'administration',
'terrorism',
'call',
]
input_vector = self.vectors['war']
predicted = [result[0] for result in self.vectors.similar_by_vector(input_vector, topn=5)]
self.assertEqual(expected, predicted)
def test_distance(self):
"""Test that distance returns expected values."""
self.assertTrue(np.allclose(self.vectors.distance('war', 'conflict'), 0.06694602))
self.assertEqual(self.vectors.distance('war', 'war'), 0)
def test_similarity(self):
"""Test similarity returns expected value for two words, and for identical words."""
self.assertTrue(np.allclose(self.vectors.similarity('war', 'war'), 1))
self.assertTrue(np.allclose(self.vectors.similarity('war', 'conflict'), 0.93305397))
def test_closer_than(self):
"""Test words_closer_than returns expected value for distinct and identical nodes."""
self.assertEqual(self.vectors.closer_than('war', 'war'), [])
expected = set(['conflict', 'administration'])
self.assertEqual(set(self.vectors.closer_than('war', 'terrorism')), expected)
def test_rank(self):
"""Test rank returns expected value for distinct and identical nodes."""
self.assertEqual(self.vectors.rank('war', 'war'), 1)
self.assertEqual(self.vectors.rank('war', 'terrorism'), 3)
def test_add_single(self):
"""Test that adding entity in a manual way works correctly."""
entities = [f'___some_entity{i}_not_present_in_keyed_vectors___' for i in range(5)]
vectors = [np.random.randn(self.vectors.vector_size) for _ in range(5)]
# Test `add` on already filled kv.
for ent, vector in zip(entities, vectors):
self.vectors.add_vectors(ent, vector)
for ent, vector in zip(entities, vectors):
self.assertTrue(np.allclose(self.vectors[ent], vector))
# Test `add` on empty kv.
kv = KeyedVectors(self.vectors.vector_size)
for ent, vector in zip(entities, vectors):
kv.add_vectors(ent, vector)
for ent, vector in zip(entities, vectors):
self.assertTrue(np.allclose(kv[ent], vector))
def test_add_multiple(self):
"""Test that adding a bulk of entities in a manual way works correctly."""
entities = ['___some_entity{}_not_present_in_keyed_vectors___'.format(i) for i in range(5)]
vectors = [np.random.randn(self.vectors.vector_size) for _ in range(5)]
# Test `add` on already filled kv.
vocab_size = len(self.vectors)
self.vectors.add_vectors(entities, vectors, replace=False)
self.assertEqual(vocab_size + len(entities), len(self.vectors))
for ent, vector in zip(entities, vectors):
self.assertTrue(np.allclose(self.vectors[ent], vector))
# Test `add` on empty kv.
kv = KeyedVectors(self.vectors.vector_size)
kv[entities] = vectors
self.assertEqual(len(kv), len(entities))
for ent, vector in zip(entities, vectors):
self.assertTrue(np.allclose(kv[ent], vector))
def test_add_type(self):
kv = KeyedVectors(2)
assert kv.vectors.dtype == REAL
words, vectors = ["a"], np.array([1., 1.], dtype=np.float64).reshape(1, -1)
kv.add_vectors(words, vectors)
assert kv.vectors.dtype == REAL
def test_set_item(self):
"""Test that __setitem__ works correctly."""
vocab_size = len(self.vectors)
# Add new entity.
entity = '___some_new_entity___'
vector = np.random.randn(self.vectors.vector_size)
self.vectors[entity] = vector
self.assertEqual(len(self.vectors), vocab_size + 1)
self.assertTrue(np.allclose(self.vectors[entity], vector))
# Replace vector for entity in vocab.
vocab_size = len(self.vectors)
vector = np.random.randn(self.vectors.vector_size)
self.vectors['war'] = vector
self.assertEqual(len(self.vectors), vocab_size)
self.assertTrue(np.allclose(self.vectors['war'], vector))
# __setitem__ on several entities.
vocab_size = len(self.vectors)
entities = ['war', '___some_new_entity1___', '___some_new_entity2___', 'terrorism', 'conflict']
vectors = [np.random.randn(self.vectors.vector_size) for _ in range(len(entities))]
self.vectors[entities] = vectors
self.assertEqual(len(self.vectors), vocab_size + 2)
for ent, vector in zip(entities, vectors):
self.assertTrue(np.allclose(self.vectors[ent], vector))
def test_load_model_and_vocab_file_strict(self):
"""Test loading model and voacab files which have decoding errors: strict mode"""
with self.assertRaises(UnicodeDecodeError):
gensim.models.KeyedVectors.load_word2vec_format(
self.model_path, fvocab=self.vocab_path, binary=False, unicode_errors="strict")
def test_load_model_and_vocab_file_replace(self):
"""Test loading model and voacab files which have decoding errors: replace mode"""
model = gensim.models.KeyedVectors.load_word2vec_format(
self.model_path, fvocab=self.vocab_path, binary=False, unicode_errors="replace")
self.assertEqual(model.get_vecattr(u'ありがとう�', 'count'), 123)
self.assertEqual(model.get_vecattr(u'どういたしまして�', 'count'), 789)
self.assertEqual(model.key_to_index[u'ありがとう�'], 0)
self.assertEqual(model.key_to_index[u'どういたしまして�'], 1)
self.assertTrue(np.array_equal(
model.get_vector(u'ありがとう�'), np.array([.6, .6, .6], dtype=np.float32)))
self.assertTrue(np.array_equal(
model.get_vector(u'どういたしまして�'), np.array([.1, .2, .3], dtype=np.float32)))
def test_load_model_and_vocab_file_ignore(self):
"""Test loading model and voacab files which have decoding errors: ignore mode"""
model = gensim.models.KeyedVectors.load_word2vec_format(
self.model_path, fvocab=self.vocab_path, binary=False, unicode_errors="ignore")
self.assertEqual(model.get_vecattr(u'ありがとう', 'count'), 123)
self.assertEqual(model.get_vecattr(u'どういたしまして', 'count'), 789)
self.assertEqual(model.key_to_index[u'ありがとう'], 0)
self.assertEqual(model.key_to_index[u'どういたしまして'], 1)
self.assertTrue(np.array_equal(
model.get_vector(u'ありがとう'), np.array([.6, .6, .6], dtype=np.float32)))
self.assertTrue(np.array_equal(
model.get_vector(u'どういたしまして'), np.array([.1, .2, .3], dtype=np.float32)))
def test_save_reload(self):
randkv = KeyedVectors(vector_size=100)
count = 20
keys = [str(i) for i in range(count)]
weights = [pseudorandom_weak_vector(randkv.vector_size) for _ in range(count)]
randkv.add_vectors(keys, weights)
tmpfiletxt = gensim.test.utils.get_tmpfile("tmp_kv.txt")
randkv.save_word2vec_format(tmpfiletxt, binary=False)
reloadtxtkv = KeyedVectors.load_word2vec_format(tmpfiletxt, binary=False)
self.assertEqual(randkv.index_to_key, reloadtxtkv.index_to_key)
self.assertTrue((randkv.vectors == reloadtxtkv.vectors).all())
tmpfilebin = gensim.test.utils.get_tmpfile("tmp_kv.bin")
randkv.save_word2vec_format(tmpfilebin, binary=True)
reloadbinkv = KeyedVectors.load_word2vec_format(tmpfilebin, binary=True)
self.assertEqual(randkv.index_to_key, reloadbinkv.index_to_key)
self.assertTrue((randkv.vectors == reloadbinkv.vectors).all())
def test_no_header(self):
randkv = KeyedVectors(vector_size=100)
count = 20
keys = [str(i) for i in range(count)]
weights = [pseudorandom_weak_vector(randkv.vector_size) for _ in range(count)]
randkv.add_vectors(keys, weights)
tmpfiletxt = gensim.test.utils.get_tmpfile("tmp_kv.txt")
randkv.save_word2vec_format(tmpfiletxt, binary=False, write_header=False)
reloadtxtkv = KeyedVectors.load_word2vec_format(tmpfiletxt, binary=False, no_header=True)
self.assertEqual(randkv.index_to_key, reloadtxtkv.index_to_key)
self.assertTrue((randkv.vectors == reloadtxtkv.vectors).all())
class Gensim320Test(unittest.TestCase):
def test(self):
path = datapath('old_keyedvectors_320.dat')
vectors = gensim.models.keyedvectors.KeyedVectors.load(path)
self.assertTrue(vectors.get_vector('computer') is not None)
def save_dict_to_word2vec_formated_file(fname, word2vec_dict):
with gensim.utils.open(fname, "wb") as f:
num_words = len(word2vec_dict)
vector_length = len(list(word2vec_dict.values())[0])
header = "%d %d\n" % (num_words, vector_length)
f.write(header.encode(encoding="ascii"))
for word, vector in word2vec_dict.items():
f.write(word.encode())
f.write(' '.encode())
f.write(np.array(vector).astype(np.float32).tobytes())
class LoadWord2VecFormatTest(unittest.TestCase):
def assert_dict_equal_to_model(self, d, m):
self.assertEqual(len(d), len(m))
for word in d.keys():
self.assertSequenceEqual(list(d[word]), list(m[word]))
def verify_load2vec_binary_result(self, w2v_dict, binary_chunk_size, limit):
tmpfile = gensim.test.utils.get_tmpfile("tmp_w2v")
save_dict_to_word2vec_formated_file(tmpfile, w2v_dict)
w2v_model = \
gensim.models.keyedvectors._load_word2vec_format(
cls=gensim.models.KeyedVectors,
fname=tmpfile,
binary=True,
limit=limit,
binary_chunk_size=binary_chunk_size)
if limit is None:
limit = len(w2v_dict)
w2v_keys_postprocessed = list(w2v_dict.keys())[:limit]
w2v_dict_postprocessed = {k.lstrip(): w2v_dict[k] for k in w2v_keys_postprocessed}
self.assert_dict_equal_to_model(w2v_dict_postprocessed, w2v_model)
def test_load_word2vec_format_basic(self):
w2v_dict = {"abc": [1, 2, 3],
"cde": [4, 5, 6],
"def": [7, 8, 9]}
self.verify_load2vec_binary_result(w2v_dict, binary_chunk_size=5, limit=None)
self.verify_load2vec_binary_result(w2v_dict, binary_chunk_size=16, limit=None)
self.verify_load2vec_binary_result(w2v_dict, binary_chunk_size=1024, limit=None)
w2v_dict = {"abc": [1, 2, 3],
"cdefg": [4, 5, 6],
"d": [7, 8, 9]}
self.verify_load2vec_binary_result(w2v_dict, binary_chunk_size=5, limit=None)
self.verify_load2vec_binary_result(w2v_dict, binary_chunk_size=16, limit=None)
self.verify_load2vec_binary_result(w2v_dict, binary_chunk_size=1024, limit=None)
def test_load_word2vec_format_limit(self):
w2v_dict = {"abc": [1, 2, 3],
"cde": [4, 5, 6],
"def": [7, 8, 9]}
self.verify_load2vec_binary_result(w2v_dict, binary_chunk_size=5, limit=1)
self.verify_load2vec_binary_result(w2v_dict, binary_chunk_size=16, limit=1)
self.verify_load2vec_binary_result(w2v_dict, binary_chunk_size=1024, limit=1)
w2v_dict = {"abc": [1, 2, 3],
"cde": [4, 5, 6],
"def": [7, 8, 9]}
self.verify_load2vec_binary_result(w2v_dict, binary_chunk_size=5, limit=2)
self.verify_load2vec_binary_result(w2v_dict, binary_chunk_size=16, limit=2)
self.verify_load2vec_binary_result(w2v_dict, binary_chunk_size=1024, limit=2)
w2v_dict = {"abc": [1, 2, 3],
"cdefg": [4, 5, 6],
"d": [7, 8, 9]}
self.verify_load2vec_binary_result(w2v_dict, binary_chunk_size=5, limit=1)
self.verify_load2vec_binary_result(w2v_dict, binary_chunk_size=16, limit=1)
self.verify_load2vec_binary_result(w2v_dict, binary_chunk_size=1024, limit=1)
w2v_dict = {"abc": [1, 2, 3],
"cdefg": [4, 5, 6],
"d": [7, 8, 9]}
self.verify_load2vec_binary_result(w2v_dict, binary_chunk_size=5, limit=2)
self.verify_load2vec_binary_result(w2v_dict, binary_chunk_size=16, limit=2)
self.verify_load2vec_binary_result(w2v_dict, binary_chunk_size=1024, limit=2)
def test_load_word2vec_format_space_stripping(self):
w2v_dict = {"\nabc": [1, 2, 3],
"cdefdg": [4, 5, 6],
"\n\ndef": [7, 8, 9]}
self.verify_load2vec_binary_result(w2v_dict, binary_chunk_size=5, limit=None)
self.verify_load2vec_binary_result(w2v_dict, binary_chunk_size=5, limit=1)
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.DEBUG)
unittest.main()
|
import asyncio
import logging
from types import MappingProxyType
from typing import Any, Dict, Iterable, Optional
from homeassistant.const import ATTR_ENTITY_ID
from homeassistant.core import Context, State
from homeassistant.helpers.typing import HomeAssistantType
from . import (
ATTR_OPTION,
ATTR_OPTIONS,
DOMAIN,
SERVICE_SELECT_OPTION,
SERVICE_SET_OPTIONS,
)
ATTR_GROUP = [ATTR_OPTION, ATTR_OPTIONS]
_LOGGER = logging.getLogger(__name__)
async def _async_reproduce_state(
hass: HomeAssistantType,
state: State,
*,
context: Optional[Context] = None,
reproduce_options: Optional[Dict[str, Any]] = None,
) -> None:
"""Reproduce a single state."""
cur_state = hass.states.get(state.entity_id)
# Return if we can't find entity
if cur_state is None:
_LOGGER.warning("Unable to find entity %s", state.entity_id)
return
# Return if we are already at the right state.
if cur_state.state == state.state and all(
check_attr_equal(cur_state.attributes, state.attributes, attr)
for attr in ATTR_GROUP
):
return
# Set service data
service_data = {ATTR_ENTITY_ID: state.entity_id}
# If options are specified, call SERVICE_SET_OPTIONS
if ATTR_OPTIONS in state.attributes:
service = SERVICE_SET_OPTIONS
service_data[ATTR_OPTIONS] = state.attributes[ATTR_OPTIONS]
await hass.services.async_call(
DOMAIN, service, service_data, context=context, blocking=True
)
# Remove ATTR_OPTIONS from service_data so we can reuse service_data in next call
del service_data[ATTR_OPTIONS]
# Call SERVICE_SELECT_OPTION
service = SERVICE_SELECT_OPTION
service_data[ATTR_OPTION] = state.state
await hass.services.async_call(
DOMAIN, service, service_data, context=context, blocking=True
)
async def async_reproduce_states(
hass: HomeAssistantType,
states: Iterable[State],
*,
context: Optional[Context] = None,
reproduce_options: Optional[Dict[str, Any]] = None,
) -> None:
"""Reproduce Input select states."""
# Reproduce states in parallel.
await asyncio.gather(
*(
_async_reproduce_state(
hass, state, context=context, reproduce_options=reproduce_options
)
for state in states
)
)
def check_attr_equal(
attr1: MappingProxyType, attr2: MappingProxyType, attr_str: str
) -> bool:
"""Return true if the given attributes are equal."""
return attr1.get(attr_str) == attr2.get(attr_str)
|
import re
from reparser import Parser, Token, MatchGroup
from hangups import hangouts_pb2
# Common regex patterns
BOUNDARY_CHARS = r'\s`!()\[\]{{}};:\'".,<>?«»“”‘’*_~='
B_LEFT = r'(?:(?<=[' + BOUNDARY_CHARS + r'])|(?<=^))' # Lookbehind
B_RIGHT = r'(?:(?=[' + BOUNDARY_CHARS + r'])|(?=$))' # Lookahead
# Regex patterns used by token definitions
MARKDOWN_END = r'(?<![\s\\]){tag}' + B_RIGHT
MARKDOWN_START = B_LEFT + r'(?<!\\){tag}(?!\s)(?!{tag})(?=.+%s)' % MARKDOWN_END
markdown_link = r'(?<!\\)\[(?P<link>.+?)\]\((?P<url>.+?)\)'
HTML_END = r'(?i)</{tag}>'
HTML_START = r'(?i)<{tag}>(?=.+%s)' % HTML_END
html_link = r'(?i)<a\s+href=[\'"](?P<url>.+?)[\'"]\s*>(?P<link>.+?)</a>'
html_img = r'(?i)<img\s+src=[\'"](?P<url>.+?)[\'"]\s*/?>'
html_newline = r'(?i)<br\s*/?>'
newline = r'\n|\r\n'
# supported minimal url pattern:
# - http://domain.tld
# - https://domain.tld
# - sub.domain.tld
# - domain.tld/
# custom ports are supported, however there is no port range check
# parens in the path are matched balanced only:
# - me.you/(yeah) is matched as me.you/(yeah)
# - me.you/(nope)) is matched as me.you/(nope)
# this is useful when parsing a wrapped url: (inner.link/path_with_(parens))
auto_link = r"""
\b
(
(?:
(?:
https?://
|
(?<!@)[a-zA-Z0-9\-]{1,63}\.
|
(?=\S+/)
)
(?:[a-zA-Z0-9\-]{1,63}\.)+
[a-zA-Z\-]{2,63}
|
(?:
https?://
|
(?=[\d.]+/)
)
\d{1,3}(?:\.\d{1,3}){3}
)
(?::\d+)?
\b(?!@)
(?:
/
(?:
\(
[^\s/()]*
\(
[^\s/()]+
\)
[^\s/()]*
\)
|
\(
[^\s/()]+
\)
|
[^\s/(){};:!<>«»“”"'‘’`´]*
)*
)*
)
""".replace(' ', '').replace('\n', '')
# Precompiled regex for matching protocol part of URL
url_proto_regex = re.compile(r'(?i)^[a-z][\w-]+:/{1,3}')
# Precompiled regex for removing backslash before escaped Markdown tags
markdown_unescape_regex = re.compile(r'\\([*_~=`\[])')
def markdown(tag):
"""Return start and end regex pattern sequences for simple Markdown tag."""
return (MARKDOWN_START.format(tag=tag), MARKDOWN_END.format(tag=tag))
def html(tag):
"""Return sequence of start and end regex patterns for simple HTML tag"""
return (HTML_START.format(tag=tag), HTML_END.format(tag=tag))
def url_complete(url):
"""If URL doesn't start with protocol, prepend it with http://"""
return url if url_proto_regex.search(url) else 'http://' + url
class Tokens:
"""Groups of tokens to be used by ChatMessageParser"""
basic = [
Token('link', auto_link, link_target=MatchGroup('start',
func=url_complete)),
Token('br', newline, text='\n',
segment_type=hangouts_pb2.SEGMENT_TYPE_LINE_BREAK)
]
markdown = [
Token('md_bi1', *markdown(r'\*\*\*'), is_bold=True, is_italic=True),
Token('md_bi2', *markdown(r'___'), is_bold=True, is_italic=True),
Token('md_b1', *markdown(r'\*\*'), is_bold=True),
Token('md_b2', *markdown(r'__'), is_bold=True),
Token('md_i1', *markdown(r'\*'), is_italic=True),
Token('md_i2', *markdown(r'_'), is_italic=True),
Token('md_pre3', *markdown(r'```'), skip=True),
Token('md_pre2', *markdown(r'``'), skip=True),
Token('md_pre1', *markdown(r'`'), skip=True),
Token('md_s', *markdown(r'~~'), is_strikethrough=True),
Token('md_u', *markdown(r'=='), is_underline=True),
Token('md_link', markdown_link, text=MatchGroup('link'),
link_target=MatchGroup('url', func=url_complete))
]
html = [
Token('html_b1', *html(r'b'), is_bold=True),
Token('html_b2', *html(r'strong'), is_bold=True),
Token('html_i1', *html(r'i'), is_italic=True),
Token('html_i2', *html(r'em'), is_italic=True),
Token('html_s1', *html(r's'), is_strikethrough=True),
Token('html_s2', *html(r'strike'), is_strikethrough=True),
Token('html_s3', *html(r'del'), is_strikethrough=True),
Token('html_u1', *html(r'u'), is_underline=True),
Token('html_u2', *html(r'ins'), is_underline=True),
Token('html_u3', *html(r'mark'), is_underline=True),
Token('html_pre', *html(r'pre'), skip=True),
Token('html_link', html_link, text=MatchGroup('link'),
link_target=MatchGroup('url', func=url_complete)),
Token('html_img', html_img, text=MatchGroup('url'),
link_target=MatchGroup('url', func=url_complete)),
Token('html_br', html_newline, text='\n',
segment_type=hangouts_pb2.SEGMENT_TYPE_LINE_BREAK)
]
class ChatMessageParser(Parser):
"""Chat message parser"""
def __init__(self, tokens=None):
# we add default tokens here.
if not tokens:
tokens = Tokens.markdown + Tokens.html + Tokens.basic
# pylint:disable=useless-super-delegation
super().__init__(tokens)
def preprocess(self, text):
"""Preprocess text before parsing"""
# Replace two consecutive spaces with space and non-breakable space
# (this is how original Hangouts client does it to preserve multiple
# spaces)
return text.replace(' ', ' \xa0')
def postprocess(self, text):
"""Postprocess text after parsing"""
# Remove backslash before escaped Markdown tags
return markdown_unescape_regex.sub(r'\1', text)
|
import numpy as np
import spacy
from sklearn.linear_model import LogisticRegression
from scattertext import SampleCorpora, produce_scattertext_explorer
from scattertext.CorpusFromPandas import CorpusFromPandas
nlp = spacy.load('en')
convention_df = SampleCorpora.ConventionData2012.get_data()
corpus = CorpusFromPandas(convention_df,
category_col='party',
text_col='text',
nlp=nlp).build()
term_freq_df = corpus.get_term_freq_df()
def scale(ar):
return (ar - ar.min()) / (ar.max() - ar.min())
def zero_centered_scale(ar):
ar[ar > 0] = scale(ar[ar > 0])
ar[ar < 0] = -scale(-ar[ar < 0])
return (ar + 1) / 2.
frequencies_scaled = scale(np.log(term_freq_df.sum(axis=1).values))
scores = corpus.get_logreg_coefs('democrat',
LogisticRegression(penalty='l2', C=10, max_iter=10000, n_jobs=-1))
scores_scaled = zero_centered_scale(scores)
html = produce_scattertext_explorer(corpus,
category='democrat',
category_name='Democratic',
not_category_name='Republican',
minimum_term_frequency=5,
width_in_pixels=1000,
x_coords=frequencies_scaled,
y_coords=scores_scaled,
scores=scores,
sort_by_dist=False,
metadata=convention_df['speaker'],
x_label='Log frequency',
y_label='L2-penalized logistic regression coef')
fn = 'demo_expected_vs_actual.html'
open(fn, 'wb').write(html.encode('utf-8'))
print('Open %s in Chrome or Firefox.' % fn)
|
import voluptuous as vol
from homeassistant.components import mysensors
from homeassistant.components.switch import DOMAIN, SwitchEntity
from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON
import homeassistant.helpers.config_validation as cv
from .const import DOMAIN as MYSENSORS_DOMAIN, SERVICE_SEND_IR_CODE
ATTR_IR_CODE = "V_IR_SEND"
SEND_IR_CODE_SERVICE_SCHEMA = vol.Schema(
{vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, vol.Required(ATTR_IR_CODE): cv.string}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the mysensors platform for switches."""
device_class_map = {
"S_DOOR": MySensorsSwitch,
"S_MOTION": MySensorsSwitch,
"S_SMOKE": MySensorsSwitch,
"S_LIGHT": MySensorsSwitch,
"S_LOCK": MySensorsSwitch,
"S_IR": MySensorsIRSwitch,
"S_BINARY": MySensorsSwitch,
"S_SPRINKLER": MySensorsSwitch,
"S_WATER_LEAK": MySensorsSwitch,
"S_SOUND": MySensorsSwitch,
"S_VIBRATION": MySensorsSwitch,
"S_MOISTURE": MySensorsSwitch,
"S_WATER_QUALITY": MySensorsSwitch,
}
mysensors.setup_mysensors_platform(
hass,
DOMAIN,
discovery_info,
device_class_map,
async_add_entities=async_add_entities,
)
async def async_send_ir_code_service(service):
"""Set IR code as device state attribute."""
entity_ids = service.data.get(ATTR_ENTITY_ID)
ir_code = service.data.get(ATTR_IR_CODE)
devices = mysensors.get_mysensors_devices(hass, DOMAIN)
if entity_ids:
_devices = [
device
for device in devices.values()
if isinstance(device, MySensorsIRSwitch)
and device.entity_id in entity_ids
]
else:
_devices = [
device
for device in devices.values()
if isinstance(device, MySensorsIRSwitch)
]
kwargs = {ATTR_IR_CODE: ir_code}
for device in _devices:
await device.async_turn_on(**kwargs)
hass.services.async_register(
MYSENSORS_DOMAIN,
SERVICE_SEND_IR_CODE,
async_send_ir_code_service,
schema=SEND_IR_CODE_SERVICE_SCHEMA,
)
class MySensorsSwitch(mysensors.device.MySensorsEntity, SwitchEntity):
"""Representation of the value of a MySensors Switch child node."""
@property
def assumed_state(self):
"""Return True if unable to access real state of entity."""
return self.gateway.optimistic
@property
def current_power_w(self):
"""Return the current power usage in W."""
set_req = self.gateway.const.SetReq
return self._values.get(set_req.V_WATT)
@property
def is_on(self):
"""Return True if switch is on."""
return self._values.get(self.value_type) == STATE_ON
async def async_turn_on(self, **kwargs):
"""Turn the switch on."""
self.gateway.set_child_value(
self.node_id, self.child_id, self.value_type, 1, ack=1
)
if self.gateway.optimistic:
# Optimistically assume that switch has changed state
self._values[self.value_type] = STATE_ON
self.async_write_ha_state()
async def async_turn_off(self, **kwargs):
"""Turn the switch off."""
self.gateway.set_child_value(
self.node_id, self.child_id, self.value_type, 0, ack=1
)
if self.gateway.optimistic:
# Optimistically assume that switch has changed state
self._values[self.value_type] = STATE_OFF
self.async_write_ha_state()
class MySensorsIRSwitch(MySensorsSwitch):
"""IR switch child class to MySensorsSwitch."""
def __init__(self, *args):
"""Set up instance attributes."""
super().__init__(*args)
self._ir_code = None
@property
def is_on(self):
"""Return True if switch is on."""
set_req = self.gateway.const.SetReq
return self._values.get(set_req.V_LIGHT) == STATE_ON
async def async_turn_on(self, **kwargs):
"""Turn the IR switch on."""
set_req = self.gateway.const.SetReq
if ATTR_IR_CODE in kwargs:
self._ir_code = kwargs[ATTR_IR_CODE]
self.gateway.set_child_value(
self.node_id, self.child_id, self.value_type, self._ir_code
)
self.gateway.set_child_value(
self.node_id, self.child_id, set_req.V_LIGHT, 1, ack=1
)
if self.gateway.optimistic:
# Optimistically assume that switch has changed state
self._values[self.value_type] = self._ir_code
self._values[set_req.V_LIGHT] = STATE_ON
self.async_write_ha_state()
# Turn off switch after switch was turned on
await self.async_turn_off()
async def async_turn_off(self, **kwargs):
"""Turn the IR switch off."""
set_req = self.gateway.const.SetReq
self.gateway.set_child_value(
self.node_id, self.child_id, set_req.V_LIGHT, 0, ack=1
)
if self.gateway.optimistic:
# Optimistically assume that switch has changed state
self._values[set_req.V_LIGHT] = STATE_OFF
self.async_write_ha_state()
async def async_update(self):
"""Update the controller with the latest value from a sensor."""
await super().async_update()
self._ir_code = self._values.get(self.value_type)
|
from datetime import datetime as dt
import numpy as np
import pandas as pd
import pytest
from mock import create_autospec, sentinel, call
from pymongo import ReadPreference
from pymongo.collection import Collection
from arctic._compression import decompress
from arctic.date import CLOSED_OPEN
from arctic.date._daterange import DateRange
from arctic.date._mktz import mktz
from arctic.exceptions import UnorderedDataException
from arctic.tickstore.tickstore import TickStore, IMAGE_DOC, IMAGE, START, \
DTYPE, END, COUNT, SYMBOL, COLUMNS, ROWMASK, DATA, INDEX, IMAGE_TIME
def test_mongo_date_range_query():
self = create_autospec(TickStore)
self._collection = create_autospec(Collection)
self._symbol_query.return_value = {"sy": {"$in" : ["s1" , "s2"]}}
self._collection.aggregate.return_value = iter([{"_id": "s1", "start": dt(2014, 1, 1, 0, 0, tzinfo=mktz())},
{"_id": "s2", "start": dt(2014, 1, 1, 12, 0, tzinfo=mktz())}])
self._collection.find_one.side_effect = [
{'e': dt(2014, 1, 1, 15, 0, tzinfo=mktz())},
{'e': dt(2014, 1, 2, 12, 0, tzinfo=mktz())}]
query = TickStore._mongo_date_range_query(self, 'sym', DateRange(dt(2014, 1, 2, 0, 0, tzinfo=mktz()),
dt(2014, 1, 3, 0, 0, tzinfo=mktz())))
assert self._collection.aggregate.call_args_list == [call([
{"$match": {"s": {"$lte": dt(2014, 1, 2, 0, 0, tzinfo=mktz())}, "sy": {"$in" : ["s1" , "s2"]}}},
{"$project": {"_id": 0, "s": 1, "sy": 1}},
{"$group": {"_id": "$sy", "start": {"$max": "$s"}}},
{"$sort": {"start": 1}}])]
assert self._collection.find_one.call_args_list == [
call({'sy': 's1', 's': dt(2014, 1, 1, 0, 0, tzinfo=mktz())}, {'e': 1}),
call({'sy': 's2', 's': dt(2014, 1, 1, 12, 0, tzinfo=mktz())}, {'e': 1})]
assert query == {'s': {'$gte': dt(2014, 1, 1, 12, 0, tzinfo=mktz()), '$lte': dt(2014, 1, 3, 0, 0, tzinfo=mktz())}}
def test_mongo_date_range_query_asserts():
self = create_autospec(TickStore)
self._collection = create_autospec(Collection)
self._collection.find_one.return_value = {'s': sentinel.start}
with pytest.raises(AssertionError):
TickStore._mongo_date_range_query(self, 'sym', DateRange(None, None, CLOSED_OPEN))
with pytest.raises(AssertionError):
TickStore._mongo_date_range_query(self, 'sym', DateRange(dt(2014, 1, 1), None))
with pytest.raises(AssertionError):
TickStore._mongo_date_range_query(self, 'sym', DateRange(None, dt(2014, 1, 1)))
def test_strify_tickstore():
# Fix GH issue 49 - str(tick library) fails in IPython
self = create_autospec(TickStore)
self._arctic_lib = sentinel.library
assert 'sentinel.library' in TickStore.__str__(self)
def test_tickstore_to_bucket_no_image():
symbol = 'SYM'
data = [{'index': dt(2014, 1, 1, 0, 1, tzinfo=mktz()), 'A': 124, 'D': 0},
{'index': dt(2014, 1, 1, 0, 2, tzinfo=mktz()), 'A': 125, 'B': 27.2}]
bucket, final_image = TickStore._to_bucket(data, symbol, None)
assert bucket[COUNT] == 2
assert bucket[END] == dt(2014, 1, 1, 0, 2, tzinfo=mktz())
assert bucket[SYMBOL] == symbol
assert bucket[START] == dt(2014, 1, 1, 0, 1, tzinfo=mktz())
assert 'A' in bucket[COLUMNS]
assert IMAGE_DOC not in bucket
assert not final_image
def test_tickstore_to_bucket_with_image():
symbol = 'SYM'
tz = 'UTC'
initial_image = {'index': dt(2014, 1, 1, 0, 0, tzinfo=mktz(tz)), 'A': 123, 'B': 54.4, 'C': 'DESC'}
data = [{'index': dt(2014, 1, 1, 0, 1, tzinfo=mktz(tz)), 'A': 124, 'D': 0},
{'index': dt(2014, 1, 1, 0, 2, tzinfo=mktz(tz)), 'A': 125, 'B': 27.2}]
bucket, final_image = TickStore._to_bucket(data, symbol, initial_image)
assert bucket[COUNT] == 2
assert bucket[END] == dt(2014, 1, 1, 0, 2, tzinfo=mktz(tz))
assert set(bucket[COLUMNS]) == set(('A', 'B', 'D'))
assert set(bucket[COLUMNS]['A']) == set((ROWMASK, DTYPE, DATA))
assert get_coldata(bucket[COLUMNS]['A']) == ([124, 125], [1, 1, 0, 0, 0, 0, 0, 0])
assert get_coldata(bucket[COLUMNS]['B']) == ([27.2], [0, 1, 0, 0, 0, 0, 0, 0])
assert get_coldata(bucket[COLUMNS]['D']) == ([0], [1, 0, 0, 0, 0, 0, 0, 0])
index = [dt.fromtimestamp(int(i/1000)).replace(tzinfo=mktz(tz)) for i in
list(np.cumsum(np.frombuffer(decompress(bucket[INDEX]), dtype='uint64')))]
assert index == [i['index'] for i in data]
assert bucket[COLUMNS]['A'][DTYPE] == 'int64'
assert bucket[COLUMNS]['B'][DTYPE] == 'float64'
assert bucket[SYMBOL] == symbol
assert bucket[START] == initial_image['index']
assert bucket[IMAGE_DOC][IMAGE] == initial_image
assert bucket[IMAGE_DOC] == {IMAGE: initial_image,
IMAGE_TIME: initial_image['index']}
assert final_image == {'index': data[-1]['index'], 'A': 125, 'B': 27.2, 'C': 'DESC', 'D': 0}
def test_tickstore_to_bucket_always_forwards():
symbol = 'SYM'
tz = 'UTC'
initial_image = {'index': dt(2014, 1, 1, 0, 0, tzinfo=mktz(tz)), 'A': 123, 'B': 54.4, 'C': 'DESC'}
data = [{'index': dt(2014, 2, 1, 0, 1, tzinfo=mktz(tz)), 'A': 124, 'D': 0},
{'index': dt(2014, 1, 1, 0, 1, tzinfo=mktz(tz)), 'A': 125, 'B': 27.2}]
with pytest.raises(UnorderedDataException):
TickStore._to_bucket(data, symbol, initial_image)
def test_tickstore_to_bucket_always_forwards_image():
symbol = 'SYM'
tz = 'UTC'
initial_image = {'index': dt(2014, 2, 1, 0, 0, tzinfo=mktz(tz)), 'A': 123, 'B': 54.4, 'C': 'DESC'}
data = [{'index': dt(2014, 1, 1, 0, 1, tzinfo=mktz(tz)), 'A': 124, 'D': 0}]
with pytest.raises(UnorderedDataException) as e:
TickStore._to_bucket(data, symbol, initial_image)
def get_coldata(coldata):
""" return values and rowmask """
dtype = np.dtype(coldata[DTYPE])
values = np.frombuffer(decompress(coldata[DATA]), dtype=dtype)
rowmask = np.unpackbits(np.frombuffer(decompress(coldata[ROWMASK]), dtype='uint8'))
return list(values), list(rowmask)
def test_tickstore_pandas_to_bucket_image():
symbol = 'SYM'
tz = 'UTC'
initial_image = {'index': dt(2014, 1, 1, 0, 0, tzinfo=mktz(tz)), 'A': 123, 'B': 54.4, 'C': 'DESC'}
data = [{'A': 120, 'D': 1}, {'A': 122, 'B': 2.0}, {'A': 3, 'B': 3.0, 'D': 1}]
tick_index = [dt(2014, 1, 2, 0, 0, tzinfo=mktz(tz)),
dt(2014, 1, 3, 0, 0, tzinfo=mktz(tz)),
dt(2014, 1, 4, 0, 0, tzinfo=mktz(tz))]
data = pd.DataFrame(data, index=tick_index)
bucket, final_image = TickStore._pandas_to_bucket(data, symbol, initial_image)
assert final_image == {'index': dt(2014, 1, 4, 0, 0, tzinfo=mktz(tz)), 'A': 3, 'B': 3.0, 'C': 'DESC', 'D': 1}
assert IMAGE_DOC in bucket
assert bucket[COUNT] == 3
assert bucket[START] == dt(2014, 1, 1, 0, 0, tzinfo=mktz(tz))
assert bucket[END] == dt(2014, 1, 4, 0, 0, tzinfo=mktz(tz))
assert set(bucket[COLUMNS]) == set(('A', 'B', 'D'))
assert set(bucket[COLUMNS]['A']) == set((ROWMASK, DTYPE, DATA))
assert get_coldata(bucket[COLUMNS]['A']) == ([120, 122, 3], [1, 1, 1, 0, 0, 0, 0, 0])
values, rowmask = get_coldata(bucket[COLUMNS]['B'])
assert np.isnan(values[0]) and values[1:] == [2.0, 3.0]
assert rowmask == [1, 1, 1, 0, 0, 0, 0, 0]
values, rowmask = get_coldata(bucket[COLUMNS]['D'])
assert np.isnan(values[1])
assert values[0] == 1 and values[2] == 1
assert rowmask == [1, 1, 1, 0, 0, 0, 0, 0]
index = [dt.fromtimestamp(int(i/1000)).replace(tzinfo=mktz(tz)) for i in
list(np.cumsum(np.frombuffer(decompress(bucket[INDEX]), dtype='uint64')))]
assert index == tick_index
assert bucket[COLUMNS]['A'][DTYPE] == 'int64'
assert bucket[COLUMNS]['B'][DTYPE] == 'float64'
assert bucket[SYMBOL] == symbol
assert bucket[IMAGE_DOC] == {IMAGE: initial_image,
IMAGE_TIME: initial_image['index']}
def test__read_preference__allow_secondary_true():
self = create_autospec(TickStore)
assert TickStore._read_preference(self, True) == ReadPreference.NEAREST
def test__read_preference__allow_secondary_false():
self = create_autospec(TickStore)
assert TickStore._read_preference(self, False) == ReadPreference.PRIMARY
def test__read_preference__default_true():
self = create_autospec(TickStore, _allow_secondary=True)
assert TickStore._read_preference(self, None) == ReadPreference.NEAREST
def test__read_preference__default_false():
self = create_autospec(TickStore, _allow_secondary=False)
assert TickStore._read_preference(self, None) == ReadPreference.PRIMARY
|
from argparse import ArgumentTypeError
import mock
from pytest import raises
from paasta_tools.cli.cmds import secret
def test_add_subparser():
mock_subparsers = mock.Mock()
secret.add_subparser(mock_subparsers)
assert mock_subparsers.add_parser.called
mock_subparsers.add_parser.return_value.set_defaults.assert_called_with(
command=secret.paasta_secret
)
def test_secret_name_for_env():
assert secret.secret_name_for_env("test-secret2") == "TEST_SECRET2"
assert secret.secret_name_for_env("test.secret.foo") == "TEST_SECRET_FOO"
def test_print_paasta_helper():
secret.print_paasta_helper("/blah/what", "keepithidden", False)
secret.print_paasta_helper("/blah/what", "keepithidden", True)
def test_get_plaintext_input():
with mock.patch("sys.stdin", autospec=True) as mock_stdin, mock.patch(
"paasta_tools.cli.cmds.secret.input", autospec=False
) as mock_input:
mock_args = mock.Mock(plain_text=False, stdin=True)
mock_stdin.buffer.read.return_value = b"SECRET_SQUIRREL"
assert secret.get_plaintext_input(mock_args) == b"SECRET_SQUIRREL"
mock_args = mock.Mock(plain_text="SECRET_CAT", stdin=False)
assert secret.get_plaintext_input(mock_args) == b"SECRET_CAT"
mock_args = mock.Mock(plain_text=False, stdin=False)
mock_input.side_effect = ["DANGER_DOG", EOFError]
assert secret.get_plaintext_input(mock_args) == b"DANGER_DOG"
def test_is_service_folder():
with mock.patch("os.path.isfile", autospec=True) as mock_is_file:
mock_is_file.return_value = True
assert secret.is_service_folder(soa_dir="/nail", service_name="universe")
mock_is_file.assert_called_with("/nail/universe/service.yaml")
mock_is_file.return_value = False
assert not secret.is_service_folder(soa_dir="/nail", service_name="universe")
def test__get_secret_provider_for_service():
with mock.patch("os.getcwd", autospec=True) as mock_getcwd, mock.patch(
"paasta_tools.cli.cmds.secret.is_service_folder", autospec=True
) as mock_is_service_folder, mock.patch(
"paasta_tools.cli.cmds.secret.load_system_paasta_config", autospec=True
) as mock_load_system_paasta_config, mock.patch(
"paasta_tools.cli.cmds.secret.list_clusters", autospec=True
) as mock_list_clusters, mock.patch(
"paasta_tools.cli.cmds.secret.get_secret_provider", autospec=True
) as mock_get_secret_provider:
mock_config = mock.Mock()
mock_load_system_paasta_config.return_value = mock_config
mock_is_service_folder.return_value = False
with raises(SystemExit):
secret._get_secret_provider_for_service("universe")
mock_is_service_folder.return_value = True
ret = secret._get_secret_provider_for_service(
"universe", cluster_names="mesosstage,norcal-devc"
)
assert ret == mock_get_secret_provider.return_value
mock_get_secret_provider.assert_called_with(
secret_provider_name=mock_config.get_secret_provider_name.return_value,
soa_dir=mock_getcwd.return_value,
service_name="universe",
cluster_names=["mesosstage", "norcal-devc"],
secret_provider_kwargs={
"vault_cluster_config": mock_config.get_vault_cluster_config.return_value
},
)
ret = secret._get_secret_provider_for_service("universe", cluster_names=None)
assert ret == mock_get_secret_provider.return_value
mock_get_secret_provider.assert_called_with(
secret_provider_name=mock_config.get_secret_provider_name.return_value,
soa_dir=mock_getcwd.return_value,
service_name="universe",
cluster_names=mock_list_clusters.return_value,
secret_provider_kwargs={
"vault_cluster_config": mock_config.get_vault_cluster_config.return_value
},
)
def test_check_secret_name():
assert secret.check_secret_name("Yelp-Yay-2019_20_08") == "Yelp-Yay-2019_20_08"
assert secret.check_secret_name("KGZ-2019_20_08") == "KGZ-2019_20_08"
with raises(ArgumentTypeError):
secret.check_secret_name("_HOPE_THIS_IS_OK")
with raises(ArgumentTypeError):
secret.check_secret_name("--OK_OR_NOT")
with raises(ArgumentTypeError):
secret.check_secret_name("-Is-This_OK")
with raises(ArgumentTypeError):
secret.check_secret_name("That's a bad name...")
with raises(ArgumentTypeError):
secret.check_secret_name("still.bad.name!")
with raises(ArgumentTypeError):
secret.check_secret_name("youdidntconvincethisfunction&me")
with raises(ArgumentTypeError):
secret.check_secret_name("are_you_kidding?")
def test_paasta_secret():
with mock.patch(
"paasta_tools.cli.cmds.secret._get_secret_provider_for_service", autospec=True
) as mock_get_secret_provider_for_service, mock.patch(
"paasta_tools.cli.cmds.secret.decrypt_secret", autospec=True
) as mock_decrypt_secret, mock.patch(
"paasta_tools.cli.cmds.secret.get_plaintext_input", autospec=True
) as mock_get_plaintext_input, mock.patch(
"paasta_tools.cli.cmds.secret._log_audit", autospec=True
) as mock_log_audit:
mock_secret_provider = mock.Mock(secret_dir="/nail/blah")
mock_get_secret_provider_for_service.return_value = mock_secret_provider
mock_args = mock.Mock(
action="add",
secret_name="theonering",
service="middleearth",
clusters="mesosstage",
shared=False,
cross_env_motivation="because ...",
)
secret.paasta_secret(mock_args)
mock_get_secret_provider_for_service.assert_called_with(
"middleearth", cluster_names="mesosstage"
)
mock_secret_provider.write_secret.assert_called_with(
action="add",
secret_name="theonering",
plaintext=mock_get_plaintext_input.return_value,
cross_environment_motivation="because ...",
)
mock_log_audit.assert_called_with(
action="add-secret",
action_details={"secret_name": "theonering", "clusters": "mesosstage"},
service="middleearth",
)
mock_args = mock.Mock(
action="update",
secret_name="theonering",
service="middleearth",
clusters="mesosstage",
shared=False,
cross_env_motivation=None,
)
secret.paasta_secret(mock_args)
mock_get_secret_provider_for_service.assert_called_with(
"middleearth", cluster_names="mesosstage"
)
mock_secret_provider.write_secret.assert_called_with(
action="update",
secret_name="theonering",
plaintext=mock_get_plaintext_input.return_value,
cross_environment_motivation=None,
)
mock_log_audit.assert_called_with(
action="update-secret",
action_details={"secret_name": "theonering", "clusters": "mesosstage"},
service="middleearth",
)
mock_args = mock.Mock(
action="decrypt",
secret_name="theonering",
service="middleearth",
clusters="mesosstage",
shared=False,
)
secret.paasta_secret(mock_args)
mock_get_secret_provider_for_service.assert_called_with(
"middleearth", cluster_names="mesosstage"
)
mock_decrypt_secret.assert_called_with(
secret_provider=mock_secret_provider, secret_name="theonering"
)
mock_args = mock.Mock(
action="add",
secret_name="theonering",
service=None,
clusters="mesosstage",
shared=True,
)
secret.paasta_secret(mock_args)
mock_get_secret_provider_for_service.assert_called_with(
secret.SHARED_SECRET_SERVICE, cluster_names="mesosstage"
)
mock_decrypt_secret.assert_called_with(
secret_provider=mock_secret_provider, secret_name="theonering"
)
mock_log_audit.assert_called_with(
action="add-secret",
action_details={"secret_name": "theonering", "clusters": "mesosstage"},
service="_shared",
)
mock_args = mock.Mock(
action="add",
secret_name="theonering",
service=None,
clusters=None,
shared=True,
)
with raises(SystemExit):
secret.paasta_secret(mock_args)
def test_decrypt_secret():
mock_secret_provider = mock.Mock(cluster_names=["mesosstage", "devc"])
with raises(SystemExit):
secret.decrypt_secret(mock_secret_provider, "theonering")
mock_secret_provider = mock.Mock(cluster_names=["mesosstage"])
assert (
secret.decrypt_secret(mock_secret_provider, "theonering")
== mock_secret_provider.decrypt_secret.return_value
)
mock_secret_provider.decrypt_secret.assert_called_with("theonering")
|
import pytest
import sh
from molecule import config
from molecule.verifier.lint import flake8
@pytest.fixture
def _patched_get_tests(mocker):
m = mocker.patch('molecule.verifier.lint.flake8.Flake8._get_tests')
m.return_value = ['test1', 'test2', 'test3']
return m
@pytest.fixture
def _verifier_lint_section_data():
return {
'verifier': {
'name': 'testinfra',
'lint': {
'name': 'flake8',
'options': {
'foo': 'bar',
},
'env': {
'FOO': 'bar',
},
}
}
}
# NOTE(retr0h): The use of the `patched_config_validate` fixture, disables
# config.Config._validate from executing. Thus preventing odd side-effects
# throughout patched.assert_called unit tests.
@pytest.fixture
def _instance(patched_config_validate, config_instance):
return flake8.Flake8(config_instance)
def test_config_private_member(_instance):
assert isinstance(_instance._config, config.Config)
def test_default_options_property(_instance):
assert {} == _instance.default_options
def test_default_env_property(_instance):
assert 'MOLECULE_FILE' in _instance.default_env
assert 'MOLECULE_INVENTORY_FILE' in _instance.default_env
assert 'MOLECULE_SCENARIO_DIRECTORY' in _instance.default_env
assert 'MOLECULE_INSTANCE_CONFIG' in _instance.default_env
@pytest.mark.parametrize(
'config_instance', ['_verifier_lint_section_data'], indirect=True)
def test_env_property(_instance):
assert 'bar' == _instance.env['FOO']
@pytest.mark.parametrize(
'config_instance', ['_verifier_lint_section_data'], indirect=True)
def test_name_property(_instance):
assert 'flake8' == _instance.name
def test_enabled_property(_instance):
assert _instance.enabled
@pytest.mark.parametrize(
'config_instance', ['_verifier_lint_section_data'], indirect=True)
def test_options_property(_instance):
x = {
'foo': 'bar',
}
assert x == _instance.options
@pytest.mark.parametrize(
'config_instance', ['_verifier_lint_section_data'], indirect=True)
def test_options_property_handles_cli_args(_instance):
_instance._config.args = {'debug': True}
x = {
'foo': 'bar',
}
# Does nothing. The `flake8` command does not support
# a `debug` flag.
assert x == _instance.options
@pytest.mark.parametrize(
'config_instance', ['_verifier_lint_section_data'], indirect=True)
def test_bake(_instance):
_instance._tests = ['test1', 'test2', 'test3']
_instance.bake()
x = '{} --foo=bar test1 test2 test3'.format(str(sh.flake8))
assert x == _instance._flake8_command
def test_execute(patched_logger_info, patched_logger_success,
patched_run_command, _instance):
_instance._tests = ['test1', 'test2', 'test3']
_instance._flake8_command = 'patched-command'
_instance.execute()
patched_run_command.assert_called_once_with('patched-command', debug=False)
msg = 'Executing Flake8 on files found in {}/...'.format(
_instance._config.verifier.directory)
patched_logger_info.assert_called_once_with(msg)
msg = 'Lint completed successfully.'
patched_logger_success.assert_called_once_with(msg)
def test_execute_does_not_execute(patched_run_command, patched_logger_warn,
_instance):
_instance._config.config['verifier']['lint']['enabled'] = False
_instance.execute()
assert not patched_run_command.called
msg = 'Skipping, verifier_lint is disabled.'
patched_logger_warn.assert_called_once_with(msg)
def test_does_not_execute_without_tests(patched_run_command,
patched_logger_warn, _instance):
_instance.execute()
assert not patched_run_command.called
msg = 'Skipping, no tests found.'
patched_logger_warn.assert_called_once_with(msg)
@pytest.mark.parametrize(
'config_instance', ['_verifier_lint_section_data'], indirect=True)
def test_execute_bakes(patched_run_command, _instance):
_instance._tests = ['test1', 'test2', 'test3']
_instance.execute()
assert _instance._flake8_command is not None
cmd = '{} --foo=bar test1 test2 test3'.format(str(sh.flake8))
patched_run_command.assert_called_once_with(cmd, debug=False)
def test_executes_catches_and_exits_return_code(patched_run_command,
_patched_get_tests, _instance):
patched_run_command.side_effect = sh.ErrorReturnCode_1(sh.flake8, b'', b'')
with pytest.raises(SystemExit) as e:
_instance.execute()
assert 1 == e.value.code
|
import json
import time
from homeassistant import config_entries, setup
from homeassistant.components.flo.const import DOMAIN
from homeassistant.const import CONTENT_TYPE_JSON
from .common import TEST_EMAIL_ADDRESS, TEST_PASSWORD, TEST_TOKEN, TEST_USER_ID
from tests.async_mock import patch
async def test_form(hass, aioclient_mock_fixture):
"""Test we get the form."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
with patch(
"homeassistant.components.flo.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.flo.async_setup_entry", return_value=True
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"], {"username": TEST_USER_ID, "password": TEST_PASSWORD}
)
assert result2["type"] == "create_entry"
assert result2["title"] == "Home"
assert result2["data"] == {"username": TEST_USER_ID, "password": TEST_PASSWORD}
await hass.async_block_till_done()
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_cannot_connect(hass, aioclient_mock):
"""Test we handle cannot connect error."""
now = round(time.time())
# Mocks a failed login response for flo.
aioclient_mock.post(
"https://api.meetflo.com/api/v1/users/auth",
json=json.dumps(
{
"token": TEST_TOKEN,
"tokenPayload": {
"user": {"user_id": TEST_USER_ID, "email": TEST_EMAIL_ADDRESS},
"timestamp": now,
},
"tokenExpiration": 86400,
"timeNow": now,
}
),
headers={"Content-Type": CONTENT_TYPE_JSON},
status=400,
)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"], {"username": "test-username", "password": "test-password"}
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "cannot_connect"}
|
from http import client
from radicale.log import logger
NOT_ALLOWED = (
client.FORBIDDEN, (("Content-Type", "text/plain"),),
"Access to the requested resource forbidden.")
FORBIDDEN = (
client.FORBIDDEN, (("Content-Type", "text/plain"),),
"Action on the requested resource refused.")
BAD_REQUEST = (
client.BAD_REQUEST, (("Content-Type", "text/plain"),), "Bad Request")
NOT_FOUND = (
client.NOT_FOUND, (("Content-Type", "text/plain"),),
"The requested resource could not be found.")
CONFLICT = (
client.CONFLICT, (("Content-Type", "text/plain"),),
"Conflict in the request.")
METHOD_NOT_ALLOWED = (
client.METHOD_NOT_ALLOWED, (("Content-Type", "text/plain"),),
"The method is not allowed on the requested resource.")
PRECONDITION_FAILED = (
client.PRECONDITION_FAILED,
(("Content-Type", "text/plain"),), "Precondition failed.")
REQUEST_TIMEOUT = (
client.REQUEST_TIMEOUT, (("Content-Type", "text/plain"),),
"Connection timed out.")
REQUEST_ENTITY_TOO_LARGE = (
client.REQUEST_ENTITY_TOO_LARGE, (("Content-Type", "text/plain"),),
"Request body too large.")
REMOTE_DESTINATION = (
client.BAD_GATEWAY, (("Content-Type", "text/plain"),),
"Remote destination not supported.")
DIRECTORY_LISTING = (
client.FORBIDDEN, (("Content-Type", "text/plain"),),
"Directory listings are not supported.")
INTERNAL_SERVER_ERROR = (
client.INTERNAL_SERVER_ERROR, (("Content-Type", "text/plain"),),
"A server error occurred. Please contact the administrator.")
DAV_HEADERS = "1, 2, 3, calendar-access, addressbook, extended-mkcol"
def decode_request(configuration, environ, text):
"""Try to magically decode ``text`` according to given ``environ``."""
# List of charsets to try
charsets = []
# First append content charset given in the request
content_type = environ.get("CONTENT_TYPE")
if content_type and "charset=" in content_type:
charsets.append(
content_type.split("charset=")[1].split(";")[0].strip())
# Then append default Radicale charset
charsets.append(configuration.get("encoding", "request"))
# Then append various fallbacks
charsets.append("utf-8")
charsets.append("iso8859-1")
# Remove duplicates
for i, s in reversed(list(enumerate(charsets))):
if s in charsets[:i]:
del charsets[i]
# Try to decode
for charset in charsets:
try:
return text.decode(charset)
except UnicodeDecodeError:
pass
raise UnicodeDecodeError("decode_request", text, 0, len(text),
"all codecs failed [%s]" % ", ".join(charsets))
def read_raw_request_body(configuration, environ):
content_length = int(environ.get("CONTENT_LENGTH") or 0)
if not content_length:
return b""
content = environ["wsgi.input"].read(content_length)
if len(content) < content_length:
raise RuntimeError("Request body too short: %d" % len(content))
return content
def read_request_body(configuration, environ):
content = decode_request(
configuration, environ, read_raw_request_body(configuration, environ))
logger.debug("Request content:\n%s", content)
return content
|
import json
import unittest
import mock
from httpretty import httpretty
from kalliope.core.NeuronModule import NeuronModule, InvalidParameterException
from kalliope.neurons.uri.uri import Uri
class TestUri(unittest.TestCase):
def setUp(self):
self.test_url = "http://kalliope.fr/voices/"
def testGet(self):
expected_content = '{"voice": "nico"}'
httpretty.enable()
httpretty.register_uri(httpretty.GET, self.test_url, body=expected_content)
parameters = {
"url": self.test_url
}
with mock.patch.object(NeuronModule, 'say', return_value=None) as mock_method:
uri_neuron = Uri(**parameters)
self.assertEqual(uri_neuron.text, expected_content)
def testGetRaw(self):
expected_content = b'raw line'
httpretty.enable()
httpretty.register_uri(httpretty.GET, self.test_url, body=expected_content)
parameters = {
"url": self.test_url
}
with mock.patch.object(NeuronModule, 'say', return_value=None) as mock_method:
uri_neuron = Uri(**parameters)
self.assertEqual(uri_neuron.content, expected_content)
def testPost(self):
expected_content = '{"voice": "nico"}'
httpretty.enable()
httpretty.register_uri(httpretty.POST, self.test_url, body=expected_content)
parameters = {
"url": self.test_url,
"method": "POST"
}
with mock.patch.object(NeuronModule, 'say', return_value=None) as mock_method:
uri_neuron = Uri(**parameters)
self.assertEqual(uri_neuron.text, expected_content)
def testPut(self):
expected_content = '{"voice": "nico"}'
httpretty.enable()
httpretty.register_uri(httpretty.PUT, self.test_url, body=expected_content)
parameters = {
"url": self.test_url,
"method": "PUT"
}
with mock.patch.object(NeuronModule, 'say', return_value=None) as mock_method:
uri_neuron = Uri(**parameters)
self.assertEqual(uri_neuron.text, expected_content)
def testDelete(self):
expected_content = '{"voice": "nico"}'
httpretty.enable()
httpretty.register_uri(httpretty.DELETE, self.test_url, body=expected_content)
parameters = {
"url": self.test_url,
"method": "DELETE"
}
with mock.patch.object(NeuronModule, 'say', return_value=None) as mock_method:
uri_neuron = Uri(**parameters)
self.assertEqual(uri_neuron.text, expected_content)
def testOptions(self):
expected_content = '{"voice": "nico"}'
httpretty.enable()
httpretty.register_uri(httpretty.OPTIONS, self.test_url, body=expected_content)
parameters = {
"url": self.test_url,
"method": "OPTIONS"
}
with mock.patch.object(NeuronModule, 'say', return_value=None) as mock_method:
uri_neuron = Uri(**parameters)
self.assertEqual(uri_neuron.text, expected_content)
def testHead(self):
expected_content = '{"voice": "nico"}'
httpretty.enable()
httpretty.register_uri(httpretty.HEAD, self.test_url, body=expected_content)
parameters = {
"url": self.test_url,
"method": "HEAD"
}
with mock.patch.object(NeuronModule, 'say', return_value=None) as mock_method:
uri_neuron = Uri(**parameters)
self.assertEqual(uri_neuron.status_code, 200)
def testPatch(self):
expected_content = '{"voice": "nico"}'
httpretty.enable()
httpretty.register_uri(httpretty.PATCH, self.test_url, body=expected_content)
parameters = {
"url": self.test_url,
"method": "PATCH"
}
with mock.patch.object(NeuronModule, 'say', return_value=None) as mock_method:
uri_neuron = Uri(**parameters)
self.assertEqual(uri_neuron.text, expected_content)
def testParameters(self):
def run_test(parameters_to_test):
with self.assertRaises(InvalidParameterException):
Uri(**parameters_to_test)
parameters = dict()
run_test(parameters)
parameters = {
"url": self.test_url,
"headers": 1
}
run_test(parameters)
parameters = {
"url": self.test_url,
"timeout": "string"
}
run_test(parameters)
parameters = {
"url": self.test_url,
"data": "this is a data",
"data_from_file": "this is another data"
}
run_test(parameters)
parameters = {
"url": self.test_url,
"method": "NONEXIST"
}
run_test(parameters)
def testPostJsonFromFile(self):
"""
Test that we are able to send json data through a file
:return:
"""
def request_callback(request, url, headers):
data = json.loads(request.body.decode())
if "title" in data and "body" in data and "userId" in data:
return 200, headers, "all key received from URL %s" % url
return 400, headers, "server did not receive all keys from URL %s" % url
httpretty.enable()
httpretty.register_uri(httpretty.POST, self.test_url, body=request_callback)
parameters = {
"url": self.test_url,
"method": "POST",
"data_from_file": "kalliope/neurons/uri/tests/data_post_test.json",
"headers": {
"Content-Type": 'application/json'
}
}
with mock.patch.object(NeuronModule, 'say', return_value=None) as mock_method:
uri_neuron = Uri(**parameters)
self.assertEqual(uri_neuron.status_code, 200)
def testPostJson(self):
"""
Test that we are able to send json data directly from the data variable
:return:
"""
def request_callback(request, url, headers):
data = json.loads(request.body.decode())
if "title" in data and "body" in data and "userId" in data:
return 200, headers, "all key received from URL %s" % url
return 400, headers, "server did not receive all keys from URL %s" % url
httpretty.enable()
httpretty.register_uri(httpretty.POST, self.test_url, body=request_callback)
parameters = {
"url": self.test_url,
"method": "POST",
"data": "{\"id\": 1,\"title\": \"foo\", \"body\": \"bar\", \"userId\": 1}",
"headers": {
"Content-Type": 'application/json'
}
}
with mock.patch.object(NeuronModule, 'say', return_value=None) as mock_method:
uri_neuron = Uri(**parameters)
self.assertEqual(uri_neuron.status_code, 200)
if __name__ == '__main__':
unittest.main()
|
from datetime import timedelta
import logging
from homeassistant.components.weather import (
ATTR_CONDITION_CLEAR_NIGHT,
ATTR_CONDITION_SUNNY,
ATTR_FORECAST_CONDITION,
ATTR_FORECAST_PRECIPITATION_PROBABILITY,
ATTR_FORECAST_TEMP,
ATTR_FORECAST_TIME,
ATTR_FORECAST_WIND_BEARING,
ATTR_FORECAST_WIND_SPEED,
WeatherEntity,
)
from homeassistant.const import (
CONF_LATITUDE,
CONF_LONGITUDE,
LENGTH_KILOMETERS,
LENGTH_METERS,
LENGTH_MILES,
PRESSURE_HPA,
PRESSURE_INHG,
PRESSURE_PA,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
)
from homeassistant.core import callback
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
from homeassistant.util.distance import convert as convert_distance
from homeassistant.util.dt import utcnow
from homeassistant.util.pressure import convert as convert_pressure
from homeassistant.util.temperature import convert as convert_temperature
from . import base_unique_id
from .const import (
ATTR_FORECAST_DAYTIME,
ATTR_FORECAST_DETAILED_DESCRIPTION,
ATTRIBUTION,
CONDITION_CLASSES,
COORDINATOR_FORECAST,
COORDINATOR_FORECAST_HOURLY,
COORDINATOR_OBSERVATION,
DAYNIGHT,
DOMAIN,
HOURLY,
NWS_DATA,
)
_LOGGER = logging.getLogger(__name__)
PARALLEL_UPDATES = 0
OBSERVATION_VALID_TIME = timedelta(minutes=20)
FORECAST_VALID_TIME = timedelta(minutes=45)
def convert_condition(time, weather):
"""
Convert NWS codes to HA condition.
Choose first condition in CONDITION_CLASSES that exists in weather code.
If no match is found, return first condition from NWS
"""
conditions = [w[0] for w in weather]
prec_probs = [w[1] or 0 for w in weather]
# Choose condition with highest priority.
cond = next(
(
key
for key, value in CONDITION_CLASSES.items()
if any(condition in value for condition in conditions)
),
conditions[0],
)
if cond == "clear":
if time == "day":
return ATTR_CONDITION_SUNNY, max(prec_probs)
if time == "night":
return ATTR_CONDITION_CLEAR_NIGHT, max(prec_probs)
return cond, max(prec_probs)
async def async_setup_entry(
hass: HomeAssistantType, entry: ConfigType, async_add_entities
) -> None:
"""Set up the NWS weather platform."""
hass_data = hass.data[DOMAIN][entry.entry_id]
async_add_entities(
[
NWSWeather(entry.data, hass_data, DAYNIGHT, hass.config.units),
NWSWeather(entry.data, hass_data, HOURLY, hass.config.units),
],
False,
)
class NWSWeather(WeatherEntity):
"""Representation of a weather condition."""
def __init__(self, entry_data, hass_data, mode, units):
"""Initialise the platform with a data instance and station name."""
self.nws = hass_data[NWS_DATA]
self.latitude = entry_data[CONF_LATITUDE]
self.longitude = entry_data[CONF_LONGITUDE]
self.coordinator_observation = hass_data[COORDINATOR_OBSERVATION]
if mode == DAYNIGHT:
self.coordinator_forecast = hass_data[COORDINATOR_FORECAST]
else:
self.coordinator_forecast = hass_data[COORDINATOR_FORECAST_HOURLY]
self.station = self.nws.station
self.is_metric = units.is_metric
self.mode = mode
self.observation = None
self._forecast = None
async def async_added_to_hass(self) -> None:
"""Set up a listener and load data."""
self.async_on_remove(
self.coordinator_observation.async_add_listener(self._update_callback)
)
self.async_on_remove(
self.coordinator_forecast.async_add_listener(self._update_callback)
)
self._update_callback()
@callback
def _update_callback(self) -> None:
"""Load data from integration."""
self.observation = self.nws.observation
if self.mode == DAYNIGHT:
self._forecast = self.nws.forecast
else:
self._forecast = self.nws.forecast_hourly
self.async_write_ha_state()
@property
def should_poll(self) -> bool:
"""Entities do not individually poll."""
return False
@property
def attribution(self):
"""Return the attribution."""
return ATTRIBUTION
@property
def name(self):
"""Return the name of the station."""
return f"{self.station} {self.mode.title()}"
@property
def temperature(self):
"""Return the current temperature."""
temp_c = None
if self.observation:
temp_c = self.observation.get("temperature")
if temp_c:
return convert_temperature(temp_c, TEMP_CELSIUS, TEMP_FAHRENHEIT)
return None
@property
def pressure(self):
"""Return the current pressure."""
pressure_pa = None
if self.observation:
pressure_pa = self.observation.get("seaLevelPressure")
if pressure_pa is None:
return None
if self.is_metric:
pressure = convert_pressure(pressure_pa, PRESSURE_PA, PRESSURE_HPA)
pressure = round(pressure)
else:
pressure = convert_pressure(pressure_pa, PRESSURE_PA, PRESSURE_INHG)
pressure = round(pressure, 2)
return pressure
@property
def humidity(self):
"""Return the name of the sensor."""
humidity = None
if self.observation:
humidity = self.observation.get("relativeHumidity")
return humidity
@property
def wind_speed(self):
"""Return the current windspeed."""
wind_km_hr = None
if self.observation:
wind_km_hr = self.observation.get("windSpeed")
if wind_km_hr is None:
return None
if self.is_metric:
wind = wind_km_hr
else:
wind = convert_distance(wind_km_hr, LENGTH_KILOMETERS, LENGTH_MILES)
return round(wind)
@property
def wind_bearing(self):
"""Return the current wind bearing (degrees)."""
wind_bearing = None
if self.observation:
wind_bearing = self.observation.get("windDirection")
return wind_bearing
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return TEMP_FAHRENHEIT
@property
def condition(self):
"""Return current condition."""
weather = None
if self.observation:
weather = self.observation.get("iconWeather")
time = self.observation.get("iconTime")
if weather:
cond, _ = convert_condition(time, weather)
return cond
return None
@property
def visibility(self):
"""Return visibility."""
vis_m = None
if self.observation:
vis_m = self.observation.get("visibility")
if vis_m is None:
return None
if self.is_metric:
vis = convert_distance(vis_m, LENGTH_METERS, LENGTH_KILOMETERS)
else:
vis = convert_distance(vis_m, LENGTH_METERS, LENGTH_MILES)
return round(vis, 0)
@property
def forecast(self):
"""Return forecast."""
if self._forecast is None:
return None
forecast = []
for forecast_entry in self._forecast:
data = {
ATTR_FORECAST_DETAILED_DESCRIPTION: forecast_entry.get(
"detailedForecast"
),
ATTR_FORECAST_TEMP: forecast_entry.get("temperature"),
ATTR_FORECAST_TIME: forecast_entry.get("startTime"),
}
if self.mode == DAYNIGHT:
data[ATTR_FORECAST_DAYTIME] = forecast_entry.get("isDaytime")
time = forecast_entry.get("iconTime")
weather = forecast_entry.get("iconWeather")
if time and weather:
cond, precip = convert_condition(time, weather)
else:
cond, precip = None, None
data[ATTR_FORECAST_CONDITION] = cond
data[ATTR_FORECAST_PRECIPITATION_PROBABILITY] = precip
data[ATTR_FORECAST_WIND_BEARING] = forecast_entry.get("windBearing")
wind_speed = forecast_entry.get("windSpeedAvg")
if wind_speed:
if self.is_metric:
data[ATTR_FORECAST_WIND_SPEED] = round(
convert_distance(wind_speed, LENGTH_MILES, LENGTH_KILOMETERS)
)
else:
data[ATTR_FORECAST_WIND_SPEED] = round(wind_speed)
else:
data[ATTR_FORECAST_WIND_SPEED] = None
forecast.append(data)
return forecast
@property
def unique_id(self):
"""Return a unique_id for this entity."""
return f"{base_unique_id(self.latitude, self.longitude)}_{self.mode}"
@property
def available(self):
"""Return if state is available."""
last_success = (
self.coordinator_observation.last_update_success
and self.coordinator_forecast.last_update_success
)
if (
self.coordinator_observation.last_update_success_time
and self.coordinator_forecast.last_update_success_time
):
last_success_time = (
utcnow() - self.coordinator_observation.last_update_success_time
< OBSERVATION_VALID_TIME
and utcnow() - self.coordinator_forecast.last_update_success_time
< FORECAST_VALID_TIME
)
else:
last_success_time = False
return last_success or last_success_time
async def async_update(self):
"""Update the entity.
Only used by the generic entity update service.
"""
await self.coordinator_observation.async_request_refresh()
await self.coordinator_forecast.async_request_refresh()
@property
def entity_registry_enabled_default(self) -> bool:
"""Return if the entity should be enabled when first added to the entity registry."""
return self.mode == DAYNIGHT
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import matplotlib.pyplot as plt
from numpy.random import randn
import numpy as np
from filterpy.memory import FadingMemoryFilter
from filterpy.gh import GHKFilter
def dotest_2d_data():
""" tests having multidimensional data for x"""
fm = FadingMemoryFilter(x0=np.array([[0.,2.],[0.,0.]]), dt=1, order=1, beta=.6)
xs = [x for x in range(0,50)]
for x in xs:
data = [x+randn()*3, x+2+randn()*3]
fm.update(data)
plt.scatter(fm.x[0,0], fm.x[0,1], c = 'r')
plt.scatter(data[0], data[1], c='b')
def dotest_1d(order, beta):
fm = FadingMemoryFilter(x0=0, dt=1, order=order, beta=beta)
xs = [x for x in range(0,50)]
fxs = []
for x in xs:
data = x+randn()*3
fm.update(data)
plt.scatter(x, fm.x[0], c = 'r')
fxs.append(fm.x[0])
plt.scatter(x,data,c='b')
plt.plot(fxs, c='r')
def test_ghk_formulation():
beta = .6
g = 1-beta**3
h = 1.5*(1+beta)*(1-beta)**2
k = 0.5*(1-beta)**3
f1 = GHKFilter(0,0,0,1, g, h, k)
f2 = FadingMemoryFilter(x0=0, dt=1, order=2, beta=beta)
def fx(x):
return .02*x**2 + 2*x - 3
for i in range(1,100):
z = fx(i)
f1.update(z)
f2.update(z)
assert abs(f1.x-f2.x[0]) < 1.e-80
if __name__ == "__main__":
test_ghk_formulation()
'''dotest_1d(0, .7)
dotest_1d(1, .7)
dotest_1d(2, .7)
plt.figure(2)
dotest_2d_data()'''
|
from __future__ import print_function
import os
import sys
import fileinput
import argparse
def main(args):
ap = argparse.ArgumentParser()
ap.add_argument('files', nargs='*', help='files to unique (must be sorted first)')
ns = ap.parse_args(args)
def _print(lines):
if lines is not None:
print(''.join(lines))
fileinput.close() # in case it is not closed
try:
prev_line = None
lines = None
for line in fileinput.input(ns.files, openhook=fileinput.hook_encoded("utf-8")):
if fileinput.isfirstline():
_print(lines)
lines = []
prev_line = None
if prev_line is None or line != prev_line:
lines.append(line)
prev_line = line
_print(lines)
finally:
fileinput.close()
if __name__ == '__main__':
main(sys.argv[1:])
|
import logging
from typing import Optional, Tuple
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
ATTR_TARGET_TEMP_HIGH,
ATTR_TARGET_TEMP_LOW,
CURRENT_HVAC_COOL,
CURRENT_HVAC_FAN,
CURRENT_HVAC_HEAT,
CURRENT_HVAC_IDLE,
CURRENT_HVAC_OFF,
DOMAIN,
HVAC_MODE_AUTO,
HVAC_MODE_COOL,
HVAC_MODE_DRY,
HVAC_MODE_FAN_ONLY,
HVAC_MODE_HEAT,
HVAC_MODE_HEAT_COOL,
HVAC_MODE_OFF,
PRESET_AWAY,
PRESET_BOOST,
PRESET_NONE,
SUPPORT_AUX_HEAT,
SUPPORT_FAN_MODE,
SUPPORT_PRESET_MODE,
SUPPORT_SWING_MODE,
SUPPORT_TARGET_TEMPERATURE,
SUPPORT_TARGET_TEMPERATURE_RANGE,
)
from homeassistant.const import ATTR_TEMPERATURE, TEMP_CELSIUS, TEMP_FAHRENHEIT
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from . import ZWaveDeviceEntity, const
_LOGGER = logging.getLogger(__name__)
CONF_NAME = "name"
DEFAULT_NAME = "Z-Wave Climate"
REMOTEC = 0x5254
REMOTEC_ZXT_120 = 0x8377
REMOTEC_ZXT_120_THERMOSTAT = (REMOTEC, REMOTEC_ZXT_120)
ATTR_OPERATING_STATE = "operating_state"
ATTR_FAN_STATE = "fan_state"
ATTR_FAN_ACTION = "fan_action"
AUX_HEAT_ZWAVE_MODE = "Aux Heat"
# Device is in manufacturer specific mode (e.g. setting the valve manually)
PRESET_MANUFACTURER_SPECIFIC = "Manufacturer Specific"
WORKAROUND_ZXT_120 = "zxt_120"
DEVICE_MAPPINGS = {REMOTEC_ZXT_120_THERMOSTAT: WORKAROUND_ZXT_120}
HVAC_STATE_MAPPINGS = {
"off": HVAC_MODE_OFF,
"heat": HVAC_MODE_HEAT,
"heat mode": HVAC_MODE_HEAT,
"heat (default)": HVAC_MODE_HEAT,
"furnace": HVAC_MODE_HEAT,
"fan only": HVAC_MODE_FAN_ONLY,
"dry air": HVAC_MODE_DRY,
"moist air": HVAC_MODE_DRY,
"cool": HVAC_MODE_COOL,
"heat_cool": HVAC_MODE_HEAT_COOL,
"auto": HVAC_MODE_HEAT_COOL,
"auto changeover": HVAC_MODE_HEAT_COOL,
}
MODE_SETPOINT_MAPPINGS = {
"off": (),
"heat": ("setpoint_heating",),
"cool": ("setpoint_cooling",),
"auto": ("setpoint_heating", "setpoint_cooling"),
"aux heat": ("setpoint_heating",),
"furnace": ("setpoint_furnace",),
"dry air": ("setpoint_dry_air",),
"moist air": ("setpoint_moist_air",),
"auto changeover": ("setpoint_auto_changeover",),
"heat econ": ("setpoint_eco_heating",),
"cool econ": ("setpoint_eco_cooling",),
"away": ("setpoint_away_heating", "setpoint_away_cooling"),
"full power": ("setpoint_full_power",),
# aliases found in xml configs
"comfort": ("setpoint_heating",),
"heat mode": ("setpoint_heating",),
"heat (default)": ("setpoint_heating",),
"dry floor": ("setpoint_dry_air",),
"heat eco": ("setpoint_eco_heating",),
"energy saving": ("setpoint_eco_heating",),
"energy heat": ("setpoint_eco_heating",),
"vacation": ("setpoint_away_heating", "setpoint_away_cooling"),
# for tests
"heat_cool": ("setpoint_heating", "setpoint_cooling"),
}
HVAC_CURRENT_MAPPINGS = {
"idle": CURRENT_HVAC_IDLE,
"heat": CURRENT_HVAC_HEAT,
"pending heat": CURRENT_HVAC_IDLE,
"heating": CURRENT_HVAC_HEAT,
"cool": CURRENT_HVAC_COOL,
"pending cool": CURRENT_HVAC_IDLE,
"cooling": CURRENT_HVAC_COOL,
"fan only": CURRENT_HVAC_FAN,
"vent / economiser": CURRENT_HVAC_FAN,
"off": CURRENT_HVAC_OFF,
}
PRESET_MAPPINGS = {
"away": PRESET_AWAY,
"full power": PRESET_BOOST,
"manufacturer specific": PRESET_MANUFACTURER_SPECIFIC,
}
DEFAULT_HVAC_MODES = [
HVAC_MODE_HEAT_COOL,
HVAC_MODE_HEAT,
HVAC_MODE_COOL,
HVAC_MODE_FAN_ONLY,
HVAC_MODE_DRY,
HVAC_MODE_OFF,
HVAC_MODE_AUTO,
]
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Z-Wave Climate device from Config Entry."""
@callback
def async_add_climate(climate):
"""Add Z-Wave Climate Device."""
async_add_entities([climate])
async_dispatcher_connect(hass, "zwave_new_climate", async_add_climate)
def get_device(hass, values, **kwargs):
"""Create Z-Wave entity device."""
temp_unit = hass.config.units.temperature_unit
if values.primary.command_class == const.COMMAND_CLASS_THERMOSTAT_SETPOINT:
return ZWaveClimateSingleSetpoint(values, temp_unit)
if values.primary.command_class == const.COMMAND_CLASS_THERMOSTAT_MODE:
return ZWaveClimateMultipleSetpoint(values, temp_unit)
return None
class ZWaveClimateBase(ZWaveDeviceEntity, ClimateEntity):
"""Representation of a Z-Wave Climate device."""
def __init__(self, values, temp_unit):
"""Initialize the Z-Wave climate device."""
ZWaveDeviceEntity.__init__(self, values, DOMAIN)
self._target_temperature = None
self._target_temperature_range = (None, None)
self._current_temperature = None
self._hvac_action = None
self._hvac_list = None # [zwave_mode]
self._hvac_mapping = None # {ha_mode:zwave_mode}
self._hvac_mode = None # ha_mode
self._aux_heat = None
self._default_hvac_mode = None # ha_mode
self._preset_mapping = None # {ha_mode:zwave_mode}
self._preset_list = None # [zwave_mode]
self._preset_mode = None # ha_mode if exists, else zwave_mode
self._current_fan_mode = None
self._fan_modes = None
self._fan_action = None
self._current_swing_mode = None
self._swing_modes = None
self._unit = temp_unit
_LOGGER.debug("temp_unit is %s", self._unit)
self._zxt_120 = None
# Make sure that we have values for the key before converting to int
if self.node.manufacturer_id.strip() and self.node.product_id.strip():
specific_sensor_key = (
int(self.node.manufacturer_id, 16),
int(self.node.product_id, 16),
)
if specific_sensor_key in DEVICE_MAPPINGS:
if DEVICE_MAPPINGS[specific_sensor_key] == WORKAROUND_ZXT_120:
_LOGGER.debug("Remotec ZXT-120 Zwave Thermostat workaround")
self._zxt_120 = 1
self.update_properties()
def _mode(self) -> None:
"""Return thermostat mode Z-Wave value."""
raise NotImplementedError()
def _current_mode_setpoints(self) -> Tuple:
"""Return a tuple of current setpoint Z-Wave value(s)."""
raise NotImplementedError()
@property
def supported_features(self):
"""Return the list of supported features."""
support = SUPPORT_TARGET_TEMPERATURE
if self._hvac_list and HVAC_MODE_HEAT_COOL in self._hvac_list:
support |= SUPPORT_TARGET_TEMPERATURE_RANGE
if self._preset_list and PRESET_AWAY in self._preset_list:
support |= SUPPORT_TARGET_TEMPERATURE_RANGE
if self.values.fan_mode:
support |= SUPPORT_FAN_MODE
if self._zxt_120 == 1 and self.values.zxt_120_swing_mode:
support |= SUPPORT_SWING_MODE
if self._aux_heat:
support |= SUPPORT_AUX_HEAT
if self._preset_list:
support |= SUPPORT_PRESET_MODE
return support
def update_properties(self):
"""Handle the data changes for node values."""
# Operation Mode
self._update_operation_mode()
# Current Temp
self._update_current_temp()
# Fan Mode
self._update_fan_mode()
# Swing mode
self._update_swing_mode()
# Set point
self._update_target_temp()
# Operating state
self._update_operating_state()
# Fan operating state
self._update_fan_state()
def _update_operation_mode(self):
"""Update hvac and preset modes."""
if self._mode():
self._hvac_list = []
self._hvac_mapping = {}
self._preset_list = []
self._preset_mapping = {}
mode_list = self._mode().data_items
if mode_list:
for mode in mode_list:
ha_mode = HVAC_STATE_MAPPINGS.get(str(mode).lower())
ha_preset = PRESET_MAPPINGS.get(str(mode).lower())
if mode == AUX_HEAT_ZWAVE_MODE:
# Aux Heat should not be included in any mapping
self._aux_heat = True
elif ha_mode and ha_mode not in self._hvac_mapping:
self._hvac_mapping[ha_mode] = mode
self._hvac_list.append(ha_mode)
elif ha_preset and ha_preset not in self._preset_mapping:
self._preset_mapping[ha_preset] = mode
self._preset_list.append(ha_preset)
else:
# If nothing matches
self._preset_list.append(mode)
# Default operation mode
for mode in DEFAULT_HVAC_MODES:
if mode in self._hvac_mapping.keys():
self._default_hvac_mode = mode
break
if self._preset_list:
# Presets are supported
self._preset_list.append(PRESET_NONE)
current_mode = self._mode().data
_LOGGER.debug("current_mode=%s", current_mode)
_hvac_temp = next(
(
key
for key, value in self._hvac_mapping.items()
if value == current_mode
),
None,
)
if _hvac_temp is None:
# The current mode is not a hvac mode
if (
"heat" in current_mode.lower()
and HVAC_MODE_HEAT in self._hvac_mapping.keys()
):
# The current preset modes maps to HVAC_MODE_HEAT
_LOGGER.debug("Mapped to HEAT")
self._hvac_mode = HVAC_MODE_HEAT
elif (
"cool" in current_mode.lower()
and HVAC_MODE_COOL in self._hvac_mapping.keys()
):
# The current preset modes maps to HVAC_MODE_COOL
_LOGGER.debug("Mapped to COOL")
self._hvac_mode = HVAC_MODE_COOL
else:
# The current preset modes maps to self._default_hvac_mode
_LOGGER.debug("Mapped to DEFAULT")
self._hvac_mode = self._default_hvac_mode
self._preset_mode = next(
(
key
for key, value in self._preset_mapping.items()
if value == current_mode
),
current_mode,
)
else:
# The current mode is a hvac mode
self._hvac_mode = _hvac_temp
self._preset_mode = PRESET_NONE
_LOGGER.debug("self._hvac_mapping=%s", self._hvac_mapping)
_LOGGER.debug("self._hvac_list=%s", self._hvac_list)
_LOGGER.debug("self._hvac_mode=%s", self._hvac_mode)
_LOGGER.debug("self._default_hvac_mode=%s", self._default_hvac_mode)
_LOGGER.debug("self._hvac_action=%s", self._hvac_action)
_LOGGER.debug("self._aux_heat=%s", self._aux_heat)
_LOGGER.debug("self._preset_mapping=%s", self._preset_mapping)
_LOGGER.debug("self._preset_list=%s", self._preset_list)
_LOGGER.debug("self._preset_mode=%s", self._preset_mode)
def _update_current_temp(self):
"""Update current temperature."""
if self.values.temperature:
self._current_temperature = self.values.temperature.data
device_unit = self.values.temperature.units
if device_unit is not None:
self._unit = device_unit
def _update_fan_mode(self):
"""Update fan mode."""
if self.values.fan_mode:
self._current_fan_mode = self.values.fan_mode.data
fan_modes = self.values.fan_mode.data_items
if fan_modes:
self._fan_modes = list(fan_modes)
_LOGGER.debug("self._fan_modes=%s", self._fan_modes)
_LOGGER.debug("self._current_fan_mode=%s", self._current_fan_mode)
def _update_swing_mode(self):
"""Update swing mode."""
if self._zxt_120 == 1:
if self.values.zxt_120_swing_mode:
self._current_swing_mode = self.values.zxt_120_swing_mode.data
swing_modes = self.values.zxt_120_swing_mode.data_items
if swing_modes:
self._swing_modes = list(swing_modes)
_LOGGER.debug("self._swing_modes=%s", self._swing_modes)
_LOGGER.debug("self._current_swing_mode=%s", self._current_swing_mode)
def _update_target_temp(self):
"""Update target temperature."""
current_setpoints = self._current_mode_setpoints()
self._target_temperature = None
self._target_temperature_range = (None, None)
if len(current_setpoints) == 1:
(setpoint,) = current_setpoints
if setpoint is not None:
self._target_temperature = round((float(setpoint.data)), 1)
elif len(current_setpoints) == 2:
(setpoint_low, setpoint_high) = current_setpoints
target_low, target_high = None, None
if setpoint_low is not None:
target_low = round((float(setpoint_low.data)), 1)
if setpoint_high is not None:
target_high = round((float(setpoint_high.data)), 1)
self._target_temperature_range = (target_low, target_high)
def _update_operating_state(self):
"""Update operating state."""
if self.values.operating_state:
mode = self.values.operating_state.data
self._hvac_action = HVAC_CURRENT_MAPPINGS.get(str(mode).lower(), mode)
def _update_fan_state(self):
"""Update fan state."""
if self.values.fan_action:
self._fan_action = self.values.fan_action.data
@property
def fan_mode(self):
"""Return the fan speed set."""
return self._current_fan_mode
@property
def fan_modes(self):
"""Return a list of available fan modes."""
return self._fan_modes
@property
def swing_mode(self):
"""Return the swing mode set."""
return self._current_swing_mode
@property
def swing_modes(self):
"""Return a list of available swing modes."""
return self._swing_modes
@property
def temperature_unit(self):
"""Return the unit of measurement."""
if self._unit == "C":
return TEMP_CELSIUS
if self._unit == "F":
return TEMP_FAHRENHEIT
return self._unit
@property
def current_temperature(self):
"""Return the current temperature."""
return self._current_temperature
@property
def hvac_mode(self):
"""Return hvac operation ie. heat, cool mode.
Need to be one of HVAC_MODE_*.
"""
if self._mode():
return self._hvac_mode
return self._default_hvac_mode
@property
def hvac_modes(self):
"""Return the list of available hvac operation modes.
Need to be a subset of HVAC_MODES.
"""
if self._mode():
return self._hvac_list
return []
@property
def hvac_action(self):
"""Return the current running hvac operation if supported.
Need to be one of CURRENT_HVAC_*.
"""
return self._hvac_action
@property
def is_aux_heat(self):
"""Return true if aux heater."""
if not self._aux_heat:
return None
if self._mode().data == AUX_HEAT_ZWAVE_MODE:
return True
return False
@property
def preset_mode(self):
"""Return preset operation ie. eco, away.
Need to be one of PRESET_*.
"""
if self._mode():
return self._preset_mode
return PRESET_NONE
@property
def preset_modes(self):
"""Return the list of available preset operation modes.
Need to be a subset of PRESET_MODES.
"""
if self._mode():
return self._preset_list
return []
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return self._target_temperature
@property
def target_temperature_low(self) -> Optional[float]:
"""Return the lowbound target temperature we try to reach."""
return self._target_temperature_range[0]
@property
def target_temperature_high(self) -> Optional[float]:
"""Return the highbound target temperature we try to reach."""
return self._target_temperature_range[1]
def set_temperature(self, **kwargs):
"""Set new target temperature."""
current_setpoints = self._current_mode_setpoints()
if len(current_setpoints) == 1:
(setpoint,) = current_setpoints
target_temp = kwargs.get(ATTR_TEMPERATURE)
if setpoint is not None and target_temp is not None:
_LOGGER.debug("Set temperature to %s", target_temp)
setpoint.data = target_temp
elif len(current_setpoints) == 2:
(setpoint_low, setpoint_high) = current_setpoints
target_temp_low = kwargs.get(ATTR_TARGET_TEMP_LOW)
target_temp_high = kwargs.get(ATTR_TARGET_TEMP_HIGH)
if setpoint_low is not None and target_temp_low is not None:
_LOGGER.debug("Set low temperature to %s", target_temp_low)
setpoint_low.data = target_temp_low
if setpoint_high is not None and target_temp_high is not None:
_LOGGER.debug("Set high temperature to %s", target_temp_high)
setpoint_high.data = target_temp_high
def set_fan_mode(self, fan_mode):
"""Set new target fan mode."""
_LOGGER.debug("Set fan mode to %s", fan_mode)
if not self.values.fan_mode:
return
self.values.fan_mode.data = fan_mode
def set_hvac_mode(self, hvac_mode):
"""Set new target hvac mode."""
_LOGGER.debug("Set hvac_mode to %s", hvac_mode)
if not self._mode():
return
operation_mode = self._hvac_mapping.get(hvac_mode)
_LOGGER.debug("Set operation_mode to %s", operation_mode)
self._mode().data = operation_mode
def turn_aux_heat_on(self):
"""Turn auxiliary heater on."""
if not self._aux_heat:
return
operation_mode = AUX_HEAT_ZWAVE_MODE
_LOGGER.debug("Aux heat on. Set operation mode to %s", operation_mode)
self._mode().data = operation_mode
def turn_aux_heat_off(self):
"""Turn auxiliary heater off."""
if not self._aux_heat:
return
if HVAC_MODE_HEAT in self._hvac_mapping:
operation_mode = self._hvac_mapping.get(HVAC_MODE_HEAT)
else:
operation_mode = self._hvac_mapping.get(HVAC_MODE_OFF)
_LOGGER.debug("Aux heat off. Set operation mode to %s", operation_mode)
self._mode().data = operation_mode
def set_preset_mode(self, preset_mode):
"""Set new target preset mode."""
_LOGGER.debug("Set preset_mode to %s", preset_mode)
if not self._mode():
return
if preset_mode == PRESET_NONE:
# Activate the current hvac mode
self._update_operation_mode()
operation_mode = self._hvac_mapping.get(self.hvac_mode)
_LOGGER.debug("Set operation_mode to %s", operation_mode)
self._mode().data = operation_mode
else:
operation_mode = self._preset_mapping.get(preset_mode, preset_mode)
_LOGGER.debug("Set operation_mode to %s", operation_mode)
self._mode().data = operation_mode
def set_swing_mode(self, swing_mode):
"""Set new target swing mode."""
_LOGGER.debug("Set swing_mode to %s", swing_mode)
if self._zxt_120 == 1:
if self.values.zxt_120_swing_mode:
self.values.zxt_120_swing_mode.data = swing_mode
@property
def device_state_attributes(self):
"""Return the optional state attributes."""
data = super().device_state_attributes
if self._fan_action:
data[ATTR_FAN_ACTION] = self._fan_action
return data
class ZWaveClimateSingleSetpoint(ZWaveClimateBase):
"""Representation of a single setpoint Z-Wave thermostat device."""
def __init__(self, values, temp_unit):
"""Initialize the Z-Wave climate device."""
ZWaveClimateBase.__init__(self, values, temp_unit)
def _mode(self) -> None:
"""Return thermostat mode Z-Wave value."""
return self.values.mode
def _current_mode_setpoints(self) -> Tuple:
"""Return a tuple of current setpoint Z-Wave value(s)."""
return (self.values.primary,)
class ZWaveClimateMultipleSetpoint(ZWaveClimateBase):
"""Representation of a multiple setpoint Z-Wave thermostat device."""
def __init__(self, values, temp_unit):
"""Initialize the Z-Wave climate device."""
ZWaveClimateBase.__init__(self, values, temp_unit)
def _mode(self) -> None:
"""Return thermostat mode Z-Wave value."""
return self.values.primary
def _current_mode_setpoints(self) -> Tuple:
"""Return a tuple of current setpoint Z-Wave value(s)."""
current_mode = str(self.values.primary.data).lower()
setpoints_names = MODE_SETPOINT_MAPPINGS.get(current_mode, ())
return tuple(getattr(self.values, name, None) for name in setpoints_names)
|
import time
from test import unittest
from mock import Mock
from mock import patch
from mock import call
import configobj
import diamond.handler.graphite as mod
from diamond.metric import Metric
# These two methods are used for overriding the GraphiteHandler._connect method.
# Please check the Test class' setUp and tearDown methods
def fake_connect(self):
# used for 'we can connect' tests
m = Mock()
self.socket = m
if '__sockets_created' not in self.config:
self.config['__sockets_created'] = [m]
else:
self.config['__sockets_created'].append(m)
def fake_bad_connect(self):
# used for 'we can not connect' tests
self.socket = None
class TestGraphiteHandler(unittest.TestCase):
def setUp(self):
self.__connect_method = mod.GraphiteHandler
mod.GraphiteHandler._connect = fake_connect
def tearDown(self):
# restore the override
mod.GraphiteHandler._connect = self.__connect_method
def test_single_metric(self):
config = configobj.ConfigObj()
config['batch'] = 1
metric = Metric('servers.com.example.www.cpu.total.idle',
0, timestamp=1234567, host='will-be-ignored')
expected_data = [
call("servers.com.example.www.cpu.total.idle 0 1234567\n"),
]
handler = mod.GraphiteHandler(config)
patch_sock = patch.object(handler, 'socket', True)
sendmock = Mock()
patch_send = patch.object(handler, '_send_data', sendmock)
patch_sock.start()
patch_send.start()
handler.process(metric)
patch_send.stop()
patch_sock.stop()
self.assertEqual(sendmock.call_count, len(expected_data))
self.assertEqual(sendmock.call_args_list, expected_data)
def test_multi_no_batching(self):
config = configobj.ConfigObj()
config['batch'] = 1
metrics = [
Metric('metricname1', 0, timestamp=123),
Metric('metricname2', 0, timestamp=123),
Metric('metricname3', 0, timestamp=123),
Metric('metricname4', 0, timestamp=123),
]
expected_data = [
call("metricname1 0 123\n"),
call("metricname2 0 123\n"),
call("metricname3 0 123\n"),
call("metricname4 0 123\n"),
]
handler = mod.GraphiteHandler(config)
patch_sock = patch.object(handler, 'socket', True)
sendmock = Mock()
patch_send = patch.object(handler, '_send_data', sendmock)
patch_sock.start()
patch_send.start()
for m in metrics:
handler.process(m)
patch_send.stop()
patch_sock.stop()
self.assertEqual(sendmock.call_count, len(expected_data))
self.assertEqual(sendmock.call_args_list, expected_data)
def test_multi_with_batching(self):
config = configobj.ConfigObj()
config['batch'] = 2
metrics = [
Metric('metricname1', 0, timestamp=123),
Metric('metricname2', 0, timestamp=123),
Metric('metricname3', 0, timestamp=123),
Metric('metricname4', 0, timestamp=123),
]
expected_data = [
call("metricname1 0 123\nmetricname2 0 123\n"),
call("metricname3 0 123\nmetricname4 0 123\n"),
]
handler = mod.GraphiteHandler(config)
patch_sock = patch.object(handler, 'socket', True)
sendmock = Mock()
patch_send = patch.object(handler, '_send_data', sendmock)
patch_sock.start()
patch_send.start()
for m in metrics:
handler.process(m)
patch_send.stop()
patch_sock.stop()
self.assertEqual(sendmock.call_count, len(expected_data))
self.assertEqual(sendmock.call_args_list, expected_data)
def test_backlog(self):
config = configobj.ConfigObj()
config['batch'] = 1
# start trimming after X batchsizes in buffer
config['max_backlog_multiplier'] = 4
# when trimming: keep last X batchsizes
config['trim_backlog_multiplier'] = 3
metrics = [
Metric('metricname1', 0, timestamp=123),
Metric('metricname2', 0, timestamp=123),
Metric('metricname3', 0, timestamp=123),
Metric('metricname4', 0, timestamp=123),
Metric('metricname5', 0, timestamp=123),
Metric('metricname6', 0, timestamp=123),
Metric('metricname7', 0, timestamp=123),
Metric('metricname8', 0, timestamp=123),
]
expected_data = [
"metricname6 0 123\n",
"metricname7 0 123\n",
"metricname8 0 123\n",
]
# simulate an unreachable graphite host
# thus force backlog functionality
mod.GraphiteHandler._connect = fake_bad_connect
handler = mod.GraphiteHandler(config)
send_mock = Mock()
patch_send = patch.object(handler, '_send_data', send_mock)
patch_send.start()
for m in metrics:
handler.process(m)
patch_send.stop()
# self.assertEqual(connect_mock.call_count, len(metrics))
self.assertEqual(send_mock.call_count, 0)
self.assertEqual(handler.metrics, expected_data)
def test_error_throttling(self):
"""
This is more of a generic test checking that the _throttle_error method
works as expected
TODO: test that the graphite handler calls _throttle_error in the right
circumstances.
"""
config = configobj.ConfigObj()
config['server_error_interval'] = '0.1'
handler = mod.GraphiteHandler(config)
debug_mock = Mock()
patch_debug = patch.object(handler.log, 'debug', debug_mock)
error_mock = Mock()
patch_error = patch.object(handler.log, 'error', error_mock)
patch_debug.start()
patch_error.start()
calls = 5
for _ in range(calls):
handler._throttle_error('Error Message')
# .error should have been called only once
self.assertEqual(error_mock.call_count, 1)
self.assertEqual(debug_mock.call_count, calls - 1)
handler._reset_errors()
debug_mock.reset_mock()
error_mock.reset_mock()
for _ in range(calls):
handler._throttle_error('Error Message')
time.sleep(0.065)
# error should have been called 0.065 * 5 / 0.1 = 3 times
self.assertEqual(error_mock.call_count, 3)
self.assertEqual(debug_mock.call_count, 2)
patch_debug.stop()
patch_error.stop()
def test_disconnect_after_flush_disabled__default(self):
config = configobj.ConfigObj()
handler = mod.GraphiteHandler(config)
socket_mock = Mock()
patch_sock = patch.object(handler, 'socket', socket_mock)
send_mock = Mock()
patch_send = patch.object(handler, '_send_data', send_mock)
check_mock = Mock()
patch.object(handler, '_time_to_reconnect', check_mock)
patch_sock.start()
patch_send.start()
handler.process(Metric('foo.bar', 42, timestamp=123))
handler.process(Metric('foo.bar', 42, timestamp=124))
handler.process(Metric('foo.bar', 42, timestamp=125))
patch_send.stop()
patch_sock.stop()
self.assertEqual(send_mock.call_count, 3)
self.assertEqual(check_mock.call_count, 0)
self.assertEqual(len(handler.config['__sockets_created']), 1)
def test_disconnect_after_flush_enabled(self):
config = configobj.ConfigObj()
handler = mod.GraphiteHandler(config)
socket_mock = Mock()
patch_sock = patch.object(handler, 'socket', socket_mock)
send_mock = Mock()
patch_send = patch.object(handler, '_send_data', send_mock)
# have the reconnect check always return true
# so a reconnect is done after every batch send
check_mock = Mock(return_value=True)
patch_check = patch.object(handler, '_time_to_reconnect', check_mock)
patch_sock.start()
patch_send.start()
patch_check.start()
handler.process(Metric('foo.bar', 42, timestamp=123))
handler.process(Metric('foo.bar', 42, timestamp=124))
handler.process(Metric('foo.bar', 42, timestamp=125))
patch_check.stop()
patch_send.stop()
patch_sock.stop()
self.assertEqual(send_mock.call_count, 3)
self.assertEqual(check_mock.call_count, 3)
self.assertEqual(len(handler.config['__sockets_created']), 3)
if __name__ == "__main__":
unittest.main()
|
import asyncio
import logging
import aiohttp
import async_timeout
import voluptuous as vol
from homeassistant.components.tts import CONF_LANG, PLATFORM_SCHEMA, Provider
from homeassistant.const import CONF_API_KEY, HTTP_OK
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
VOICERSS_API_URL = "https://api.voicerss.org/"
ERROR_MSG = [
b"Error description",
b"The subscription is expired or requests count limitation is exceeded!",
b"The request content length is too large!",
b"The language does not support!",
b"The language is not specified!",
b"The text is not specified!",
b"The API key is not available!",
b"The API key is not specified!",
b"The subscription does not support SSML!",
]
SUPPORT_LANGUAGES = [
"ar-eg",
"ar-sa",
"bg-bg",
"ca-es",
"zh-cn",
"zh-hk",
"zh-tw",
"hr-hr",
"cs-cz",
"da-dk",
"nl-be",
"nl-nl",
"en-au",
"en-ca",
"en-gb",
"en-in",
"en-ie",
"en-us",
"fi-fi",
"fr-ca",
"fr-fr",
"fr-ch",
"de-at",
"de-de",
"de-ch",
"el-gr",
"he-il",
"hi-in",
"hu-hu",
"id-id",
"it-it",
"ja-jp",
"ko-kr",
"ms-my",
"nb-no",
"pl-pl",
"pt-br",
"pt-pt",
"ro-ro",
"ru-ru",
"sk-sk",
"sl-si",
"es-mx",
"es-es",
"sv-se",
"ta-in",
"th-th",
"tr-tr",
"vi-vn",
]
SUPPORT_CODECS = ["mp3", "wav", "aac", "ogg", "caf"]
SUPPORT_FORMATS = [
"8khz_8bit_mono",
"8khz_8bit_stereo",
"8khz_16bit_mono",
"8khz_16bit_stereo",
"11khz_8bit_mono",
"11khz_8bit_stereo",
"11khz_16bit_mono",
"11khz_16bit_stereo",
"12khz_8bit_mono",
"12khz_8bit_stereo",
"12khz_16bit_mono",
"12khz_16bit_stereo",
"16khz_8bit_mono",
"16khz_8bit_stereo",
"16khz_16bit_mono",
"16khz_16bit_stereo",
"22khz_8bit_mono",
"22khz_8bit_stereo",
"22khz_16bit_mono",
"22khz_16bit_stereo",
"24khz_8bit_mono",
"24khz_8bit_stereo",
"24khz_16bit_mono",
"24khz_16bit_stereo",
"32khz_8bit_mono",
"32khz_8bit_stereo",
"32khz_16bit_mono",
"32khz_16bit_stereo",
"44khz_8bit_mono",
"44khz_8bit_stereo",
"44khz_16bit_mono",
"44khz_16bit_stereo",
"48khz_8bit_mono",
"48khz_8bit_stereo",
"48khz_16bit_mono",
"48khz_16bit_stereo",
"alaw_8khz_mono",
"alaw_8khz_stereo",
"alaw_11khz_mono",
"alaw_11khz_stereo",
"alaw_22khz_mono",
"alaw_22khz_stereo",
"alaw_44khz_mono",
"alaw_44khz_stereo",
"ulaw_8khz_mono",
"ulaw_8khz_stereo",
"ulaw_11khz_mono",
"ulaw_11khz_stereo",
"ulaw_22khz_mono",
"ulaw_22khz_stereo",
"ulaw_44khz_mono",
"ulaw_44khz_stereo",
]
CONF_CODEC = "codec"
CONF_FORMAT = "format"
DEFAULT_LANG = "en-us"
DEFAULT_CODEC = "mp3"
DEFAULT_FORMAT = "8khz_8bit_mono"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Optional(CONF_LANG, default=DEFAULT_LANG): vol.In(SUPPORT_LANGUAGES),
vol.Optional(CONF_CODEC, default=DEFAULT_CODEC): vol.In(SUPPORT_CODECS),
vol.Optional(CONF_FORMAT, default=DEFAULT_FORMAT): vol.In(SUPPORT_FORMATS),
}
)
async def async_get_engine(hass, config, discovery_info=None):
"""Set up VoiceRSS TTS component."""
return VoiceRSSProvider(hass, config)
class VoiceRSSProvider(Provider):
"""The VoiceRSS speech API provider."""
def __init__(self, hass, conf):
"""Init VoiceRSS TTS service."""
self.hass = hass
self._extension = conf[CONF_CODEC]
self._lang = conf[CONF_LANG]
self.name = "VoiceRSS"
self._form_data = {
"key": conf[CONF_API_KEY],
"hl": conf[CONF_LANG],
"c": (conf[CONF_CODEC]).upper(),
"f": conf[CONF_FORMAT],
}
@property
def default_language(self):
"""Return the default language."""
return self._lang
@property
def supported_languages(self):
"""Return list of supported languages."""
return SUPPORT_LANGUAGES
async def async_get_tts_audio(self, message, language, options=None):
"""Load TTS from VoiceRSS."""
websession = async_get_clientsession(self.hass)
form_data = self._form_data.copy()
form_data["src"] = message
form_data["hl"] = language
try:
with async_timeout.timeout(10):
request = await websession.post(VOICERSS_API_URL, data=form_data)
if request.status != HTTP_OK:
_LOGGER.error(
"Error %d on load url %s", request.status, request.url
)
return (None, None)
data = await request.read()
if data in ERROR_MSG:
_LOGGER.error("Error receive %s from VoiceRSS", str(data, "utf-8"))
return (None, None)
except (asyncio.TimeoutError, aiohttp.ClientError):
_LOGGER.error("Timeout for VoiceRSS API")
return (None, None)
return (self._extension, data)
|
INLINESTYLES = False
try:
import locale
locale.setlocale(locale.LC_ALL, '')
except:
pass
# set up Pygments
from pygments.formatters import LatexFormatter
# The default formatter
DEFAULT = LatexFormatter()
# Add name -> formatter pairs for every variant you want to use
VARIANTS = {
# 'linenos': HtmlFormatter(noclasses=INLINESTYLES, linenos=True),
}
from docutils import nodes
from docutils.parsers.rst import directives
from pygments import highlight
from pygments.lexers import get_lexer_by_name, TextLexer
def pygments_directive(name, arguments, options, content, lineno,
content_offset, block_text, state, state_machine):
try:
lexer = get_lexer_by_name(arguments[0])
except ValueError as e:
# no lexer found - use the text one instead of an exception
lexer = TextLexer()
# take an arbitrary option if more than one is given
formatter = options and VARIANTS[options.keys()[0]] or DEFAULT
parsed = highlight(u'\n'.join(content), lexer, formatter)
return [nodes.raw('', parsed, format='latex')]
pygments_directive.arguments = (1, 0, 1)
pygments_directive.content = 1
pygments_directive.options = dict([(key, directives.flag) for key in VARIANTS])
directives.register_directive('sourcecode', pygments_directive)
# run the generation
from docutils.core import publish_cmdline, default_description
description = ('Generates LaTeX documents from standalone reStructuredText '
'sources. ' + default_description)
publish_cmdline(writer_name='latex2e', description=description)
|
from typing import Sequence
from PyQt5.QtNetwork import QNetworkCookie, QNetworkCookieJar
from PyQt5.QtCore import pyqtSignal, QDateTime
from qutebrowser.config import config
from qutebrowser.utils import utils, standarddir, objreg, log
from qutebrowser.misc import lineparser, objects
cookie_jar = None
ram_cookie_jar = None
class RAMCookieJar(QNetworkCookieJar):
"""An in-RAM cookie jar.
Signals:
changed: Emitted when the cookie store was changed.
"""
changed = pyqtSignal()
def __repr__(self):
return utils.get_repr(self, count=len(self.allCookies()))
def setCookiesFromUrl(self, cookies, url):
"""Add the cookies in the cookies list to this cookie jar.
Args:
cookies: A list of QNetworkCookies.
url: The URL to set the cookies for.
Return:
True if one or more cookies are set for 'url', otherwise False.
"""
accept = config.instance.get('content.cookies.accept', url=url)
if 'log-cookies' in objects.debug_flags:
log.network.debug('Cookie on {} -> applying setting {}'
.format(url.toDisplayString(), accept))
if accept == 'never':
return False
else:
self.changed.emit()
return super().setCookiesFromUrl(cookies, url)
class CookieJar(RAMCookieJar):
"""A cookie jar saving cookies to disk.
Attributes:
_lineparser: The LineParser managing the cookies file.
"""
def __init__(self, parent=None, *, line_parser=None):
super().__init__(parent)
if line_parser:
self._lineparser = line_parser
else:
self._lineparser = lineparser.LineParser(
standarddir.data(), 'cookies', binary=True, parent=self)
self.parse_cookies()
config.instance.changed.connect(self._on_cookies_store_changed)
objreg.get('save-manager').add_saveable(
'cookies', self.save, self.changed,
config_opt='content.cookies.store')
def parse_cookies(self):
"""Parse cookies from lineparser and store them."""
cookies: Sequence[QNetworkCookie] = []
for line in self._lineparser:
line_cookies = QNetworkCookie.parseCookies(line)
cookies += line_cookies # type: ignore[operator]
self.setAllCookies(cookies)
def purge_old_cookies(self):
"""Purge expired cookies from the cookie jar."""
# Based on:
# http://doc.qt.io/qt-5/qtwebkitexamples-webkitwidgets-browser-cookiejar-cpp.html
now = QDateTime.currentDateTime()
cookies = [c for c in self.allCookies()
if c.isSessionCookie() or
c.expirationDate() >= now] # type: ignore[operator]
self.setAllCookies(cookies)
def save(self):
"""Save cookies to disk."""
self.purge_old_cookies()
lines = []
for cookie in self.allCookies():
if not cookie.isSessionCookie():
lines.append(cookie.toRawForm())
self._lineparser.data = lines
self._lineparser.save()
@config.change_filter('content.cookies.store')
def _on_cookies_store_changed(self):
"""Delete stored cookies if cookies.store changed."""
if not config.val.content.cookies.store:
self._lineparser.data = []
self._lineparser.save()
self.changed.emit()
def init(qapp):
"""Initialize the global cookie jars."""
global cookie_jar, ram_cookie_jar
cookie_jar = CookieJar(qapp)
ram_cookie_jar = RAMCookieJar(qapp)
|
from ast import literal_eval
from itertools import chain
from itertools import islice
from . import nodes
from .compiler import CodeGenerator
from .compiler import has_safe_repr
from .environment import Environment
from .environment import Template
def native_concat(nodes):
"""Return a native Python type from the list of compiled nodes. If
the result is a single node, its value is returned. Otherwise, the
nodes are concatenated as strings. If the result can be parsed with
:func:`ast.literal_eval`, the parsed value is returned. Otherwise,
the string is returned.
:param nodes: Iterable of nodes to concatenate.
"""
head = list(islice(nodes, 2))
if not head:
return None
if len(head) == 1:
raw = head[0]
else:
raw = "".join([str(v) for v in chain(head, nodes)])
try:
return literal_eval(raw)
except (ValueError, SyntaxError, MemoryError):
return raw
class NativeCodeGenerator(CodeGenerator):
"""A code generator which renders Python types by not adding
``str()`` around output nodes.
"""
@staticmethod
def _default_finalize(value):
return value
def _output_const_repr(self, group):
return repr("".join([str(v) for v in group]))
def _output_child_to_const(self, node, frame, finalize):
const = node.as_const(frame.eval_ctx)
if not has_safe_repr(const):
raise nodes.Impossible()
if isinstance(node, nodes.TemplateData):
return const
return finalize.const(const)
def _output_child_pre(self, node, frame, finalize):
if finalize.src is not None:
self.write(finalize.src)
def _output_child_post(self, node, frame, finalize):
if finalize.src is not None:
self.write(")")
class NativeEnvironment(Environment):
"""An environment that renders templates to native Python types."""
code_generator_class = NativeCodeGenerator
class NativeTemplate(Template):
environment_class = NativeEnvironment
def render(self, *args, **kwargs):
"""Render the template to produce a native Python type. If the
result is a single node, its value is returned. Otherwise, the
nodes are concatenated as strings. If the result can be parsed
with :func:`ast.literal_eval`, the parsed value is returned.
Otherwise, the string is returned.
"""
vars = dict(*args, **kwargs)
try:
return native_concat(self.root_render_func(self.new_context(vars)))
except Exception:
return self.environment.handle_exception()
NativeEnvironment.template_class = NativeTemplate
|
from unittest.mock import Mock, patch
from kombu import transport
class test_supports_librabbitmq:
def test_eventlet(self):
with patch('kombu.transport._detect_environment') as de:
de.return_value = 'eventlet'
assert not transport.supports_librabbitmq()
class test_transport:
def test_resolve_transport(self):
from kombu.transport.memory import Transport
assert transport.resolve_transport(
'kombu.transport.memory:Transport') is Transport
assert transport.resolve_transport(Transport) is Transport
def test_resolve_transport_alias_callable(self):
m = transport.TRANSPORT_ALIASES['George'] = Mock(name='lazyalias')
try:
transport.resolve_transport('George')
m.assert_called_with()
finally:
transport.TRANSPORT_ALIASES.pop('George')
def test_resolve_transport_alias(self):
assert transport.resolve_transport('pyamqp')
|
from __future__ import division
import numpy as np
import chainer
from chainer.backends import cuda
import chainer.functions as F
from chainer import initializers
import chainer.links as L
from chainercv.links.model.fpn.misc import argsort
from chainercv.links.model.fpn.misc import choice
from chainercv.links.model.fpn.misc import exp_clip
from chainercv.links.model.fpn.misc import smooth_l1
from chainercv import utils
class RPN(chainer.Chain):
"""Region Proposal Network of Feature Pyramid Networks.
Args:
scales (tuple of floats): The scales of feature maps.
"""
_anchor_size = 32
_anchor_ratios = (0.5, 1, 2)
_nms_thresh = 0.7
_train_nms_limit_pre = 2000
_train_nms_limit_post = 2000
_test_nms_limit_pre = 1000
_test_nms_limit_post = 1000
def __init__(self, scales):
super(RPN, self).__init__()
init = {'initialW': initializers.Normal(0.01)}
with self.init_scope():
self.conv = L.Convolution2D(256, 3, pad=1, **init)
self.loc = L.Convolution2D(len(self._anchor_ratios) * 4, 1, **init)
self.conf = L.Convolution2D(len(self._anchor_ratios), 1, **init)
self._scales = scales
def forward(self, hs):
"""Calculates RoIs.
Args:
hs (iterable of array): An iterable of feature maps.
Returns:
tuple of two arrays:
:obj:`locs` and :obj:`confs`.
* **locs**: A list of arrays whose shape is \
:math:`(N, K_l, 4)`, where :math:`N` is the size of batch and \
:math:`K_l` is the number of the anchor boxes \
of the :math:`l`-th level.
" **confs**: A list of array whose shape is :math:`(N, K_l)`.
"""
locs = []
confs = []
for h in hs:
h = F.relu(self.conv(h))
loc = self.loc(h)
loc = F.transpose(loc, (0, 2, 3, 1))
loc = F.reshape(loc, (loc.shape[0], -1, 4))
locs.append(loc)
conf = self.conf(h)
conf = F.transpose(conf, (0, 2, 3, 1))
conf = F.reshape(conf, (conf.shape[0], -1))
confs.append(conf)
return locs, confs
def anchors(self, sizes):
"""Calculates anchor boxes.
Args:
sizes (iterable of tuples of two ints): An iterable of
:math:`(H_l, W_l)`, where :math:`H_l` and :math:`W_l`
are height and width of the :math:`l`-th feature map.
Returns:
list of arrays:
The shape of the :math:`l`-th array is :math:`(H_l * W_l * A, 4)`,
where :math:`A` is the number of anchor ratios.
"""
anchors = []
for l, (H, W) in enumerate(sizes):
v, u, ar = np.meshgrid(
np.arange(W), np.arange(H), self._anchor_ratios)
w = np.round(1 / np.sqrt(ar) / self._scales[l])
h = np.round(w * ar)
anchor = np.stack((u, v, h, w)).reshape((4, -1)).transpose()
anchor[:, :2] = (anchor[:, :2] + 0.5) / self._scales[l]
anchor[:, 2:] *= (self._anchor_size << l) * self._scales[l]
# yxhw -> tlbr
anchor[:, :2] -= anchor[:, 2:] / 2
anchor[:, 2:] += anchor[:, :2]
anchors.append(self.xp.array(anchor, dtype=np.float32))
return anchors
def decode(self, locs, confs, anchors, in_shape):
"""Decodes back to coordinates of RoIs.
This method decodes :obj:`locs` and :obj:`confs` returned
by a FPN network back to :obj:`rois` and :obj:`roi_indices`.
Args:
locs (list of arrays): A list of arrays whose shape is
:math:`(N, K_l, 4)`, where :math:`N` is the size of batch and
:math:`K_l` is the number of the anchor boxes
of the :math:`l`-th level.
confs (list of arrays): A list of array whose shape is
:math:`(N, K_l)`.
anchors (list of arrays): Anchor boxes returned by :meth:`anchors`.
in_shape (tuple of ints): The shape of input of array
the feature extractor.
Returns:
tuple of two arrays:
:obj:`rois` and :obj:`roi_indices`.
* **rois**: An array of shape :math:`(R, 4)`, \
where :math:`R` is the total number of RoIs in the given batch.
* **roi_indices** : An array of shape :math:`(R,)`.
"""
if chainer.config.train:
nms_limit_pre = self._train_nms_limit_pre
nms_limit_post = self._train_nms_limit_post
else:
nms_limit_pre = self._test_nms_limit_pre
nms_limit_post = self._test_nms_limit_post
rois = []
roi_indices = []
for i in range(in_shape[0]):
roi = []
conf = []
for l in range(len(self._scales)):
loc_l = locs[l].array[i]
conf_l = confs[l].array[i]
roi_l = anchors[l].copy()
# tlbr -> yxhw
roi_l[:, 2:] -= roi_l[:, :2]
roi_l[:, :2] += roi_l[:, 2:] / 2
# offset
roi_l[:, :2] += loc_l[:, :2] * roi_l[:, 2:]
roi_l[:, 2:] *= self.xp.exp(
self.xp.minimum(loc_l[:, 2:], exp_clip))
# yxhw -> tlbr
roi_l[:, :2] -= roi_l[:, 2:] / 2
roi_l[:, 2:] += roi_l[:, :2]
# clip
roi_l[:, :2] = self.xp.maximum(roi_l[:, :2], 0)
roi_l[:, 2:] = self.xp.minimum(
roi_l[:, 2:], self.xp.array(in_shape[2:]))
order = argsort(-conf_l)[:nms_limit_pre]
roi_l = roi_l[order]
conf_l = conf_l[order]
mask = (roi_l[:, 2:] - roi_l[:, :2] > 0).all(axis=1)
roi_l = roi_l[mask]
conf_l = conf_l[mask]
indices = utils.non_maximum_suppression(
roi_l, self._nms_thresh, limit=nms_limit_post)
roi_l = roi_l[indices]
conf_l = conf_l[indices]
roi.append(roi_l)
conf.append(conf_l)
roi = self.xp.vstack(roi).astype(np.float32)
conf = self.xp.hstack(conf).astype(np.float32)
order = argsort(-conf)[:nms_limit_post]
roi = roi[order]
rois.append(roi)
roi_indices.append(self.xp.array((i,) * len(roi)))
rois = self.xp.vstack(rois).astype(np.float32)
roi_indices = self.xp.hstack(roi_indices).astype(np.int32)
return rois, roi_indices
def rpn_loss(locs, confs, anchors, sizes, bboxes):
"""Loss function for RPN.
Args:
locs (iterable of arrays): An iterable of arrays whose shape is
:math:`(N, K_l, 4)`, where :math:`K_l` is the number of
the anchor boxes of the :math:`l`-th level.
confs (iterable of arrays): An iterable of arrays whose shape is
:math:`(N, K_l)`.
anchors (list of arrays): A list of arrays returned by
:meth:`anchors`.
sizes (list of tuples of two ints): A list of
:math:`(H_n, W_n)`, where :math:`H_n` and :math:`W_n`
are height and width of the :math:`n`-th image.
bboxes (list of arrays): A list of arrays whose shape is
:math:`(R_n, 4)`, where :math:`R_n` is the number of
ground truth bounding boxes.
Returns:
tuple of two variables:
:obj:`loc_loss` and :obj:`conf_loss`.
"""
fg_thresh = 0.7
bg_thresh = 0.3
batchsize_per_image = 256
fg_ratio = 0.25
locs = F.concat(locs)
confs = F.concat(confs)
xp = cuda.get_array_module(locs.array, confs.array)
anchors = xp.vstack(anchors)
anchors_yx = (anchors[:, 2:] + anchors[:, :2]) / 2
anchors_hw = anchors[:, 2:] - anchors[:, :2]
loc_loss = 0
conf_loss = 0
for i in range(len(sizes)):
if len(bboxes[i]) > 0:
iou = utils.bbox_iou(anchors, bboxes[i])
gt_loc = bboxes[i][iou.argmax(axis=1)].copy()
# tlbr -> yxhw
gt_loc[:, 2:] -= gt_loc[:, :2]
gt_loc[:, :2] += gt_loc[:, 2:] / 2
# offset
gt_loc[:, :2] = (gt_loc[:, :2] - anchors_yx) / anchors_hw
gt_loc[:, 2:] = xp.log(gt_loc[:, 2:] / anchors_hw)
else:
gt_loc = xp.empty_like(anchors)
gt_label = xp.empty(len(anchors), dtype=np.int32)
gt_label[:] = -1
mask = xp.logical_and(
anchors[:, :2] >= 0,
anchors[:, 2:] < xp.array(sizes[i])).all(axis=1)
if len(bboxes[i]) > 0:
gt_label[xp.where(mask)[0]
[(iou[mask] == iou[mask].max(axis=0)).any(axis=1)]] = 1
gt_label[xp.logical_and(mask, iou.max(axis=1) >= fg_thresh)] = 1
fg_index = xp.where(gt_label == 1)[0]
n_fg = int(batchsize_per_image * fg_ratio)
if len(fg_index) > n_fg:
gt_label[choice(fg_index, size=len(fg_index) - n_fg)] = -1
if len(bboxes[i]) > 0:
bg_index = xp.where(xp.logical_and(
mask, iou.max(axis=1) < bg_thresh))[0]
else:
bg_index = xp.where(mask)[0]
n_bg = batchsize_per_image - int((gt_label == 1).sum())
if len(bg_index) > n_bg:
gt_label[bg_index[
xp.random.randint(len(bg_index), size=n_bg)]] = 0
n_sample = (gt_label >= 0).sum()
loc_loss += F.sum(smooth_l1(
locs[i][gt_label == 1], gt_loc[gt_label == 1], 1 / 9)) / n_sample
conf_loss += F.sum(F.sigmoid_cross_entropy(
confs[i][gt_label >= 0], gt_label[gt_label >= 0], reduce='no')) \
/ n_sample
loc_loss /= len(sizes)
conf_loss /= len(sizes)
return loc_loss, conf_loss
|
from urllib.request import urlopen
import socket
from contextlib import contextmanager
import vcr
@contextmanager
def overridden_dns(overrides):
"""
Monkeypatch socket.getaddrinfo() to override DNS lookups (name will resolve
to address)
"""
real_getaddrinfo = socket.getaddrinfo
def fake_getaddrinfo(*args, **kwargs):
if args[0] in overrides:
address = overrides[args[0]]
return [(2, 1, 6, "", (address, args[1]))]
return real_getaddrinfo(*args, **kwargs)
socket.getaddrinfo = fake_getaddrinfo
yield
socket.getaddrinfo = real_getaddrinfo
def test_ignore_localhost(tmpdir, httpbin):
with overridden_dns({"httpbin.org": "127.0.0.1"}):
cass_file = str(tmpdir.join("filter_qs.yaml"))
with vcr.use_cassette(cass_file, ignore_localhost=True) as cass:
urlopen("http://localhost:{}/".format(httpbin.port))
assert len(cass) == 0
urlopen("http://httpbin.org:{}/".format(httpbin.port))
assert len(cass) == 1
def test_ignore_httpbin(tmpdir, httpbin):
with overridden_dns({"httpbin.org": "127.0.0.1"}):
cass_file = str(tmpdir.join("filter_qs.yaml"))
with vcr.use_cassette(cass_file, ignore_hosts=["httpbin.org"]) as cass:
urlopen("http://httpbin.org:{}/".format(httpbin.port))
assert len(cass) == 0
urlopen("http://localhost:{}/".format(httpbin.port))
assert len(cass) == 1
def test_ignore_localhost_and_httpbin(tmpdir, httpbin):
with overridden_dns({"httpbin.org": "127.0.0.1"}):
cass_file = str(tmpdir.join("filter_qs.yaml"))
with vcr.use_cassette(cass_file, ignore_hosts=["httpbin.org"], ignore_localhost=True) as cass:
urlopen("http://httpbin.org:{}".format(httpbin.port))
urlopen("http://localhost:{}".format(httpbin.port))
assert len(cass) == 0
def test_ignore_localhost_twice(tmpdir, httpbin):
with overridden_dns({"httpbin.org": "127.0.0.1"}):
cass_file = str(tmpdir.join("filter_qs.yaml"))
with vcr.use_cassette(cass_file, ignore_localhost=True) as cass:
urlopen("http://localhost:{}".format(httpbin.port))
assert len(cass) == 0
urlopen("http://httpbin.org:{}".format(httpbin.port))
assert len(cass) == 1
with vcr.use_cassette(cass_file, ignore_localhost=True) as cass:
assert len(cass) == 1
urlopen("http://localhost:{}".format(httpbin.port))
urlopen("http://httpbin.org:{}".format(httpbin.port))
assert len(cass) == 1
|
import logging
import vlc
import voluptuous as vol
from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity
from homeassistant.components.media_player.const import (
MEDIA_TYPE_MUSIC,
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_PLAY_MEDIA,
SUPPORT_STOP,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
)
from homeassistant.const import CONF_NAME, STATE_IDLE, STATE_PAUSED, STATE_PLAYING
import homeassistant.helpers.config_validation as cv
import homeassistant.util.dt as dt_util
_LOGGER = logging.getLogger(__name__)
CONF_ARGUMENTS = "arguments"
DEFAULT_NAME = "Vlc"
SUPPORT_VLC = (
SUPPORT_PAUSE
| SUPPORT_VOLUME_SET
| SUPPORT_VOLUME_MUTE
| SUPPORT_PLAY_MEDIA
| SUPPORT_PLAY
| SUPPORT_STOP
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_ARGUMENTS, default=""): cv.string,
vol.Optional(CONF_NAME): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the vlc platform."""
add_entities(
[VlcDevice(config.get(CONF_NAME, DEFAULT_NAME), config.get(CONF_ARGUMENTS))]
)
class VlcDevice(MediaPlayerEntity):
"""Representation of a vlc player."""
def __init__(self, name, arguments):
"""Initialize the vlc device."""
self._instance = vlc.Instance(arguments)
self._vlc = self._instance.media_player_new()
self._name = name
self._volume = None
self._muted = None
self._state = None
self._media_position_updated_at = None
self._media_position = None
self._media_duration = None
def update(self):
"""Get the latest details from the device."""
status = self._vlc.get_state()
if status == vlc.State.Playing:
self._state = STATE_PLAYING
elif status == vlc.State.Paused:
self._state = STATE_PAUSED
else:
self._state = STATE_IDLE
self._media_duration = self._vlc.get_length() / 1000
position = self._vlc.get_position() * self._media_duration
if position != self._media_position:
self._media_position_updated_at = dt_util.utcnow()
self._media_position = position
self._volume = self._vlc.audio_get_volume() / 100
self._muted = self._vlc.audio_get_mute() == 1
return True
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self._volume
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self._muted
@property
def supported_features(self):
"""Flag media player features that are supported."""
return SUPPORT_VLC
@property
def media_content_type(self):
"""Content type of current playing media."""
return MEDIA_TYPE_MUSIC
@property
def media_duration(self):
"""Duration of current playing media in seconds."""
return self._media_duration
@property
def media_position(self):
"""Position of current playing media in seconds."""
return self._media_position
@property
def media_position_updated_at(self):
"""When was the position of the current playing media valid."""
return self._media_position_updated_at
def media_seek(self, position):
"""Seek the media to a specific location."""
track_length = self._vlc.get_length() / 1000
self._vlc.set_position(position / track_length)
def mute_volume(self, mute):
"""Mute the volume."""
self._vlc.audio_set_mute(mute)
self._muted = mute
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
self._vlc.audio_set_volume(int(volume * 100))
self._volume = volume
def media_play(self):
"""Send play command."""
self._vlc.play()
self._state = STATE_PLAYING
def media_pause(self):
"""Send pause command."""
self._vlc.pause()
self._state = STATE_PAUSED
def media_stop(self):
"""Send stop command."""
self._vlc.stop()
self._state = STATE_IDLE
def play_media(self, media_type, media_id, **kwargs):
"""Play media from a URL or file."""
if not media_type == MEDIA_TYPE_MUSIC:
_LOGGER.error(
"Invalid media type %s. Only %s is supported",
media_type,
MEDIA_TYPE_MUSIC,
)
return
self._vlc.set_media(self._instance.media_new(media_id))
self._vlc.play()
self._state = STATE_PLAYING
|
import logging
import sys
from setuptools import find_packages
from setuptools import setup
from setuptools.command.test import test as TestCommand
long_description = open('README.md').read()
changelog = open('CHANGES.md').read()
class PyTest(TestCommand):
user_options = [('pytest-args=', 'a', "Arguments to pass to py.test")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = []
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
logging.basicConfig(format='%(asctime)s %(levelname)s %(name)s %(message)s', level='DEBUG')
# import here, cause outside the eggs aren't loaded
import pytest
import six
args = [self.pytest_args] if isinstance(self.pytest_args, six.string_types) else list(self.pytest_args)
args.extend(['--cov', 'arctic',
'--cov-report', 'xml',
'--cov-report', 'html',
'--junitxml', 'junit.xml',
])
errno = pytest.main(args)
sys.exit(errno)
setup(
name="arctic",
version="1.80.0",
author="Man AHL Technology",
author_email="[email protected]",
description=("AHL Research Versioned TimeSeries and Tick store"),
license="GPL",
keywords=["ahl", "keyvalue", "tickstore", "mongo", "timeseries", ],
url="https://github.com/manahl/arctic",
packages=find_packages(exclude=['tests', 'tests.*', 'benchmarks']),
long_description='\n'.join((long_description, changelog)),
long_description_content_type="text/markdown",
cmdclass={'test': PyTest},
setup_requires=["six",
"numpy",
"setuptools-git",
],
install_requires=["decorator",
"enum-compat",
"futures; python_version == '2.7'",
"mockextras",
"pandas",
"pymongo>=3.6.0",
"python-dateutil",
"pytz",
"tzlocal",
"lz4"
],
# Note: pytest >= 4.1.0 is not compatible with pytest-cov < 2.6.1.
tests_require=["mock",
"mockextras",
"pytest",
"pytest-cov",
"pytest-server-fixtures",
"pytest-timeout",
"pytest-xdist<=1.26.1",
"lz4"
],
entry_points={'console_scripts': [
'arctic_init_library = arctic.scripts.arctic_init_library:main',
'arctic_list_libraries = arctic.scripts.arctic_list_libraries:main',
'arctic_delete_library = arctic.scripts.arctic_delete_library:main',
'arctic_enable_sharding = arctic.scripts.arctic_enable_sharding:main',
'arctic_copy_data = arctic.scripts.arctic_copy_data:main',
'arctic_create_user = arctic.scripts.arctic_create_user:main',
'arctic_prune_versions = arctic.scripts.arctic_prune_versions:main',
'arctic_fsck = arctic.scripts.arctic_fsck:main',
]
},
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: Implementation :: CPython",
"Operating System :: POSIX",
"Operating System :: MacOS",
"Operating System :: Microsoft :: Windows",
"Topic :: Database",
"Topic :: Database :: Front-Ends",
"Topic :: Software Development :: Libraries",
],
)
|
import pytest
def pytest_addoption(parser):
parser.addoption("-E", action="append", metavar="NAME",
help="only run tests matching the environment NAME.")
def pytest_configure(config):
# register an additional marker
config.addinivalue_line("markers",
"env(name): mark test to run only on named environment")
def pytest_runtest_setup(item):
envnames = [mark.args[0] for mark in item.iter_markers(name='env')]
if envnames:
if item.config.getoption("-E") is None or len(set(item.config.getoption("-E")) & set(envnames)) == 0:
# We skip test if does not mentioned by -E param
pytest.skip("test requires env in %r" % envnames)
|
from datetime import timedelta
from io import BytesIO
import av
import pytest
from homeassistant.components.stream.core import Segment
from homeassistant.components.stream.recorder import recorder_save_worker
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from tests.async_mock import patch
from tests.common import async_fire_time_changed
from tests.components.stream.common import generate_h264_video, preload_stream
@pytest.mark.skip("Flaky in CI")
async def test_record_stream(hass, hass_client):
"""
Test record stream.
Purposefully not mocking anything here to test full
integration with the stream component.
"""
await async_setup_component(hass, "stream", {"stream": {}})
with patch("homeassistant.components.stream.recorder.recorder_save_worker"):
# Setup demo track
source = generate_h264_video()
stream = preload_stream(hass, source)
recorder = stream.add_provider("recorder")
stream.start()
while True:
segment = await recorder.recv()
if not segment:
break
segments = segment.sequence
stream.stop()
assert segments > 1
@pytest.mark.skip("Flaky in CI")
async def test_recorder_timeout(hass, hass_client):
"""Test recorder timeout."""
await async_setup_component(hass, "stream", {"stream": {}})
with patch(
"homeassistant.components.stream.recorder.RecorderOutput.cleanup"
) as mock_cleanup:
# Setup demo track
source = generate_h264_video()
stream = preload_stream(hass, source)
recorder = stream.add_provider("recorder")
stream.start()
await recorder.recv()
# Wait a minute
future = dt_util.utcnow() + timedelta(minutes=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
assert mock_cleanup.called
@pytest.mark.skip("Flaky in CI")
async def test_recorder_save():
"""Test recorder save."""
# Setup
source = generate_h264_video()
output = BytesIO()
output.name = "test.mp4"
# Run
recorder_save_worker(output, [Segment(1, source, 4)], "mp4")
# Assert
assert output.getvalue()
@pytest.mark.skip("Flaky in CI")
async def test_record_stream_audio(hass, hass_client):
"""
Test treatment of different audio inputs.
Record stream output should have an audio channel when input has
a valid codec and audio packets and no audio channel otherwise.
"""
await async_setup_component(hass, "stream", {"stream": {}})
for a_codec, expected_audio_streams in (
("aac", 1), # aac is a valid mp4 codec
("pcm_mulaw", 0), # G.711 is not a valid mp4 codec
("empty", 0), # audio stream with no packets
(None, 0), # no audio stream
):
with patch("homeassistant.components.stream.recorder.recorder_save_worker"):
# Setup demo track
source = generate_h264_video(
container_format="mov", audio_codec=a_codec
) # mov can store PCM
stream = preload_stream(hass, source)
recorder = stream.add_provider("recorder")
stream.start()
while True:
segment = await recorder.recv()
if not segment:
break
last_segment = segment
result = av.open(last_segment.segment, "r", format="mp4")
assert len(result.streams.audio) == expected_audio_streams
result.close()
stream.stop()
|
from typing import Dict, List
import voluptuous as vol
from homeassistant.components.device_automation import toggle_entity
from homeassistant.const import CONF_DOMAIN
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.condition import ConditionCheckerType
from homeassistant.helpers.typing import ConfigType
from . import DOMAIN
CONDITION_SCHEMA = toggle_entity.CONDITION_SCHEMA.extend(
{vol.Required(CONF_DOMAIN): DOMAIN}
)
@callback
def async_condition_from_config(
config: ConfigType, config_validation: bool
) -> ConditionCheckerType:
"""Evaluate state based on configuration."""
if config_validation:
config = CONDITION_SCHEMA(config)
return toggle_entity.async_condition_from_config(config)
async def async_get_conditions(
hass: HomeAssistant, device_id: str
) -> List[Dict[str, str]]:
"""List device conditions."""
return await toggle_entity.async_get_conditions(hass, device_id, DOMAIN)
async def async_get_condition_capabilities(hass: HomeAssistant, config: dict) -> dict:
"""List condition capabilities."""
return await toggle_entity.async_get_condition_capabilities(hass, config)
|
from django.http import Http404
from django.template.defaultfilters import slugify
from django.utils.translation import gettext as _
from django.views.generic.list import BaseListView
from django.views.generic.list import ListView
from tagging.models import Tag
from tagging.models import TaggedItem
from tagging.utils import get_tag
from zinnia.models.entry import Entry
from zinnia.settings import PAGINATION
from zinnia.views.mixins.prefetch_related import PrefetchCategoriesAuthorsMixin
from zinnia.views.mixins.templates import EntryQuerysetTemplateResponseMixin
class TagList(ListView):
"""
View return a list of all published tags.
"""
template_name = 'zinnia/tag_list.html'
context_object_name = 'tag_list'
def get_queryset(self):
"""
Return a queryset of published tags,
with a count of their entries published.
"""
return Tag.objects.usage_for_queryset(
Entry.published.all(), counts=True)
class BaseTagDetail(object):
"""
Mixin providing the behavior of the tag detail view,
by returning in the context the current tag and a
queryset containing the entries published with the tag.
"""
def get_queryset(self):
"""
Retrieve the tag by his name and
build a queryset of his published entries.
"""
self.tag = get_tag(self.kwargs['tag'])
if self.tag is None:
raise Http404(_('No Tag found matching "%s".') %
self.kwargs['tag'])
return TaggedItem.objects.get_by_model(
Entry.published.all(), self.tag)
def get_context_data(self, **kwargs):
"""
Add the current tag in context.
"""
context = super(BaseTagDetail, self).get_context_data(**kwargs)
context['tag'] = self.tag
return context
class TagDetail(EntryQuerysetTemplateResponseMixin,
PrefetchCategoriesAuthorsMixin,
BaseTagDetail,
BaseListView):
"""
Detailed view for a Tag combinating these mixins:
- EntryQuerysetTemplateResponseMixin to provide custom templates
for the tag display page.
- PrefetchCategoriesAuthorsMixin to prefetch related Categories
and Authors to belonging the entry list.
- BaseTagDetail to provide the behavior of the view.
- BaseListView to implement the ListView.
"""
model_type = 'tag'
paginate_by = PAGINATION
def get_model_name(self):
"""
The model name is the tag slugified.
"""
return slugify(self.tag)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
_PROVIDER_INFO_REGISTRY = {}
def GetProviderInfoClass(cloud):
"""Returns the provider info class corresponding to the cloud."""
return _PROVIDER_INFO_REGISTRY.get(cloud, BaseProviderInfo)
class AutoRegisterProviderInfoMeta(type):
"""Metaclass which allows ProviderInfos to automatically be registered."""
def __init__(cls, name, bases, dct):
super(AutoRegisterProviderInfoMeta, cls).__init__(name, bases, dct)
if cls.CLOUD is not None:
_PROVIDER_INFO_REGISTRY[cls.CLOUD] = cls
class BaseProviderInfo(six.with_metaclass(AutoRegisterProviderInfoMeta)):
"""Class that holds provider-related data."""
CLOUD = None
UNSUPPORTED_BENCHMARKS = []
@classmethod
def IsBenchmarkSupported(cls, benchmark):
if benchmark in cls.UNSUPPORTED_BENCHMARKS:
return False
else:
return True
|
from datetime import datetime, timedelta
import logging
from pathlib import Path
from typing import Any, Dict, List, Optional
from aiofreepybox import Freepybox
from aiofreepybox.api.wifi import Wifi
from aiofreepybox.exceptions import HttpRequestError
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_HOST, CONF_PORT
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.event import async_track_time_interval
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.util import slugify
from .const import (
API_VERSION,
APP_DESC,
CONNECTION_SENSORS,
DOMAIN,
STORAGE_KEY,
STORAGE_VERSION,
)
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(seconds=30)
class FreeboxRouter:
"""Representation of a Freebox router."""
def __init__(self, hass: HomeAssistantType, entry: ConfigEntry) -> None:
"""Initialize a Freebox router."""
self.hass = hass
self._entry = entry
self._host = entry.data[CONF_HOST]
self._port = entry.data[CONF_PORT]
self._api: Freepybox = None
self._name = None
self.mac = None
self._sw_v = None
self._attrs = {}
self.devices: Dict[str, Any] = {}
self.sensors_temperature: Dict[str, int] = {}
self.sensors_connection: Dict[str, float] = {}
self.call_list: List[Dict[str, Any]] = []
self._unsub_dispatcher = None
self.listeners = []
async def setup(self) -> None:
"""Set up a Freebox router."""
self._api = await get_api(self.hass, self._host)
try:
await self._api.open(self._host, self._port)
except HttpRequestError:
_LOGGER.exception("Failed to connect to Freebox")
return ConfigEntryNotReady
# System
fbx_config = await self._api.system.get_config()
self.mac = fbx_config["mac"]
self._name = fbx_config["model_info"]["pretty_name"]
self._sw_v = fbx_config["firmware_version"]
# Devices & sensors
await self.update_all()
self._unsub_dispatcher = async_track_time_interval(
self.hass, self.update_all, SCAN_INTERVAL
)
async def update_all(self, now: Optional[datetime] = None) -> None:
"""Update all Freebox platforms."""
await self.update_sensors()
await self.update_devices()
async def update_devices(self) -> None:
"""Update Freebox devices."""
new_device = False
fbx_devices: Dict[str, Any] = await self._api.lan.get_hosts_list()
# Adds the Freebox itself
fbx_devices.append(
{
"primary_name": self._name,
"l2ident": {"id": self.mac},
"vendor_name": "Freebox SAS",
"host_type": "router",
"active": True,
"attrs": self._attrs,
}
)
for fbx_device in fbx_devices:
device_mac = fbx_device["l2ident"]["id"]
if self.devices.get(device_mac) is None:
new_device = True
self.devices[device_mac] = fbx_device
async_dispatcher_send(self.hass, self.signal_device_update)
if new_device:
async_dispatcher_send(self.hass, self.signal_device_new)
async def update_sensors(self) -> None:
"""Update Freebox sensors."""
# System sensors
syst_datas: Dict[str, Any] = await self._api.system.get_config()
# According to the doc `syst_datas["sensors"]` is temperature sensors in celsius degree.
# Name and id of sensors may vary under Freebox devices.
for sensor in syst_datas["sensors"]:
self.sensors_temperature[sensor["name"]] = sensor["value"]
# Connection sensors
connection_datas: Dict[str, Any] = await self._api.connection.get_status()
for sensor_key in CONNECTION_SENSORS:
self.sensors_connection[sensor_key] = connection_datas[sensor_key]
self._attrs = {
"IPv4": connection_datas.get("ipv4"),
"IPv6": connection_datas.get("ipv6"),
"connection_type": connection_datas["media"],
"uptime": datetime.fromtimestamp(
round(datetime.now().timestamp()) - syst_datas["uptime_val"]
),
"firmware_version": self._sw_v,
"serial": syst_datas["serial"],
}
self.call_list = await self._api.call.get_call_list()
async_dispatcher_send(self.hass, self.signal_sensor_update)
async def reboot(self) -> None:
"""Reboot the Freebox."""
await self._api.system.reboot()
async def close(self) -> None:
"""Close the connection."""
if self._api is not None:
await self._api.close()
self._unsub_dispatcher()
self._api = None
@property
def device_info(self) -> Dict[str, Any]:
"""Return the device information."""
return {
"connections": {(CONNECTION_NETWORK_MAC, self.mac)},
"identifiers": {(DOMAIN, self.mac)},
"name": self._name,
"manufacturer": "Freebox SAS",
"sw_version": self._sw_v,
}
@property
def signal_device_new(self) -> str:
"""Event specific per Freebox entry to signal new device."""
return f"{DOMAIN}-{self._host}-device-new"
@property
def signal_device_update(self) -> str:
"""Event specific per Freebox entry to signal updates in devices."""
return f"{DOMAIN}-{self._host}-device-update"
@property
def signal_sensor_update(self) -> str:
"""Event specific per Freebox entry to signal updates in sensors."""
return f"{DOMAIN}-{self._host}-sensor-update"
@property
def sensors(self) -> Dict[str, Any]:
"""Return sensors."""
return {**self.sensors_temperature, **self.sensors_connection}
@property
def wifi(self) -> Wifi:
"""Return the wifi."""
return self._api.wifi
async def get_api(hass: HomeAssistantType, host: str) -> Freepybox:
"""Get the Freebox API."""
freebox_path = Path(hass.helpers.storage.Store(STORAGE_VERSION, STORAGE_KEY).path)
freebox_path.mkdir(exist_ok=True)
token_file = Path(f"{freebox_path}/{slugify(host)}.conf")
return Freepybox(APP_DESC, token_file, API_VERSION)
|
from datetime import timedelta
import importlib
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_DISPLAY_OPTIONS,
CONF_NAME,
PRESSURE_HPA,
TEMP_CELSIUS,
VOLT,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "envirophat"
CONF_USE_LEDS = "use_leds"
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60)
SENSOR_TYPES = {
"light": ["light", " ", "mdi:weather-sunny"],
"light_red": ["light_red", " ", "mdi:invert-colors"],
"light_green": ["light_green", " ", "mdi:invert-colors"],
"light_blue": ["light_blue", " ", "mdi:invert-colors"],
"accelerometer_x": ["accelerometer_x", "G", "mdi:earth"],
"accelerometer_y": ["accelerometer_y", "G", "mdi:earth"],
"accelerometer_z": ["accelerometer_z", "G", "mdi:earth"],
"magnetometer_x": ["magnetometer_x", " ", "mdi:magnet"],
"magnetometer_y": ["magnetometer_y", " ", "mdi:magnet"],
"magnetometer_z": ["magnetometer_z", " ", "mdi:magnet"],
"temperature": ["temperature", TEMP_CELSIUS, "mdi:thermometer"],
"pressure": ["pressure", PRESSURE_HPA, "mdi:gauge"],
"voltage_0": ["voltage_0", VOLT, "mdi:flash"],
"voltage_1": ["voltage_1", VOLT, "mdi:flash"],
"voltage_2": ["voltage_2", VOLT, "mdi:flash"],
"voltage_3": ["voltage_3", VOLT, "mdi:flash"],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_DISPLAY_OPTIONS, default=list(SENSOR_TYPES)): [
vol.In(SENSOR_TYPES)
],
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_USE_LEDS, default=False): cv.boolean,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Sense HAT sensor platform."""
try:
envirophat = importlib.import_module("envirophat")
except OSError:
_LOGGER.error("No Enviro pHAT was found")
return False
data = EnvirophatData(envirophat, config.get(CONF_USE_LEDS))
dev = []
for variable in config[CONF_DISPLAY_OPTIONS]:
dev.append(EnvirophatSensor(data, variable))
add_entities(dev, True)
class EnvirophatSensor(Entity):
"""Representation of an Enviro pHAT sensor."""
def __init__(self, data, sensor_types):
"""Initialize the sensor."""
self.data = data
self._name = SENSOR_TYPES[sensor_types][0]
self._unit_of_measurement = SENSOR_TYPES[sensor_types][1]
self.type = sensor_types
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return SENSOR_TYPES[self.type][2]
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit_of_measurement
def update(self):
"""Get the latest data and updates the states."""
self.data.update()
if self.type == "light":
self._state = self.data.light
if self.type == "light_red":
self._state = self.data.light_red
if self.type == "light_green":
self._state = self.data.light_green
if self.type == "light_blue":
self._state = self.data.light_blue
if self.type == "accelerometer_x":
self._state = self.data.accelerometer_x
if self.type == "accelerometer_y":
self._state = self.data.accelerometer_y
if self.type == "accelerometer_z":
self._state = self.data.accelerometer_z
if self.type == "magnetometer_x":
self._state = self.data.magnetometer_x
if self.type == "magnetometer_y":
self._state = self.data.magnetometer_y
if self.type == "magnetometer_z":
self._state = self.data.magnetometer_z
if self.type == "temperature":
self._state = self.data.temperature
if self.type == "pressure":
self._state = self.data.pressure
if self.type == "voltage_0":
self._state = self.data.voltage_0
if self.type == "voltage_1":
self._state = self.data.voltage_1
if self.type == "voltage_2":
self._state = self.data.voltage_2
if self.type == "voltage_3":
self._state = self.data.voltage_3
class EnvirophatData:
"""Get the latest data and update."""
def __init__(self, envirophat, use_leds):
"""Initialize the data object."""
self.envirophat = envirophat
self.use_leds = use_leds
# sensors readings
self.light = None
self.light_red = None
self.light_green = None
self.light_blue = None
self.accelerometer_x = None
self.accelerometer_y = None
self.accelerometer_z = None
self.magnetometer_x = None
self.magnetometer_y = None
self.magnetometer_z = None
self.temperature = None
self.pressure = None
self.voltage_0 = None
self.voltage_1 = None
self.voltage_2 = None
self.voltage_3 = None
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the latest data from Enviro pHAT."""
# Light sensor reading: 16-bit integer
self.light = self.envirophat.light.light()
if self.use_leds:
self.envirophat.leds.on()
# the three color values scaled against the overall light, 0-255
self.light_red, self.light_green, self.light_blue = self.envirophat.light.rgb()
if self.use_leds:
self.envirophat.leds.off()
# accelerometer readings in G
(
self.accelerometer_x,
self.accelerometer_y,
self.accelerometer_z,
) = self.envirophat.motion.accelerometer()
# raw magnetometer reading
(
self.magnetometer_x,
self.magnetometer_y,
self.magnetometer_z,
) = self.envirophat.motion.magnetometer()
# temperature resolution of BMP280 sensor: 0.01°C
self.temperature = round(self.envirophat.weather.temperature(), 2)
# pressure resolution of BMP280 sensor: 0.16 Pa, rounding to 0.1 Pa
# with conversion to 100 Pa = 1 hPa
self.pressure = round(self.envirophat.weather.pressure() / 100.0, 3)
# Voltage sensor, reading between 0-3.3V
(
self.voltage_0,
self.voltage_1,
self.voltage_2,
self.voltage_3,
) = self.envirophat.analog.read_all()
|
import unittest
import math
import numpy as np
import pandas as pd
from numpy import testing as np_test
from pgmpy.estimators.CITests import *
np.random.seed(42)
class TestPearsonr(unittest.TestCase):
def setUp(self):
self.df_ind = pd.DataFrame(np.random.randn(10000, 3), columns=["X", "Y", "Z"])
Z = np.random.randn(10000)
X = 3 * Z + np.random.normal(loc=0, scale=0.1, size=10000)
Y = 2 * Z + np.random.normal(loc=0, scale=0.1, size=10000)
self.df_cind = pd.DataFrame({"X": X, "Y": Y, "Z": Z})
Z1 = np.random.randn(10000)
Z2 = np.random.randn(10000)
X = 3 * Z1 + 2 * Z2 + np.random.normal(loc=0, scale=0.1, size=10000)
Y = 2 * Z1 + 3 * Z2 + np.random.normal(loc=0, scale=0.1, size=10000)
self.df_cind_mul = pd.DataFrame({"X": X, "Y": Y, "Z1": Z1, "Z2": Z2})
X = np.random.rand(10000)
Y = np.random.rand(10000)
Z = 2 * X + 2 * Y + np.random.normal(loc=0, scale=0.1, size=10000)
self.df_vstruct = pd.DataFrame({"X": X, "Y": Y, "Z": Z})
def test_pearsonr(self):
coef, p_value = pearsonr(X="X", Y="Y", Z=[], data=self.df_ind, boolean=False)
self.assertTrue(coef < 0.1)
self.assertTrue(p_value > 0.05)
coef, p_value = pearsonr(
X="X", Y="Y", Z=["Z"], data=self.df_cind, boolean=False
)
self.assertTrue(coef < 0.1)
self.assertTrue(p_value > 0.05)
coef, p_value = pearsonr(
X="X", Y="Y", Z=["Z1", "Z2"], data=self.df_cind_mul, boolean=False
)
self.assertTrue(coef < 0.1)
self.assertTrue(p_value > 0.05)
coef, p_value = pearsonr(
X="X", Y="Y", Z=["Z"], data=self.df_vstruct, boolean=False
)
self.assertTrue(abs(coef) > 0.9)
self.assertTrue(p_value < 0.05)
# Tests for when boolean=True
self.assertTrue(
pearsonr(X="X", Y="Y", Z=[], data=self.df_ind, significance_level=0.05)
)
self.assertTrue(
pearsonr(X="X", Y="Y", Z=["Z"], data=self.df_cind, significance_level=0.05)
)
self.assertTrue(
pearsonr(
X="X",
Y="Y",
Z=["Z1", "Z2"],
data=self.df_cind_mul,
significance_level=0.05,
)
)
self.assertFalse(
pearsonr(
X="X", Y="Y", Z=["Z"], data=self.df_vstruct, significance_level=0.05
)
)
class TestChiSquare(unittest.TestCase):
def setUp(self):
self.df_adult = pd.read_csv("pgmpy/tests/test_estimators/testdata/adult.csv")
def test_chisquare_adult_dataset(self):
# Comparision values taken from dagitty (DAGitty)
coef, dof, p_value = chi_square(
X="Age", Y="Immigrant", Z=[], data=self.df_adult, boolean=False
)
np_test.assert_almost_equal(coef, 57.75, decimal=1)
np_test.assert_almost_equal(np.log(p_value), -25.47, decimal=1)
self.assertEqual(dof, 4)
coef, dof, p_value = chi_square(
X="Age", Y="Race", Z=[], data=self.df_adult, boolean=False
)
np_test.assert_almost_equal(coef, 56.25, decimal=1)
np_test.assert_almost_equal(np.log(p_value), -24.75, decimal=1)
self.assertEqual(dof, 4)
coef, dof, p_value = chi_square(
X="Age", Y="Sex", Z=[], data=self.df_adult, boolean=False
)
np_test.assert_almost_equal(coef, 289.62, decimal=1)
np_test.assert_almost_equal(np.log(p_value), -139.82, decimal=1)
self.assertEqual(dof, 4)
coef, dof, p_value = chi_square(
X="Education",
Y="HoursPerWeek",
Z=["Age", "Immigrant", "Race", "Sex"],
data=self.df_adult,
boolean=False,
)
np_test.assert_almost_equal(coef, 1460.11, decimal=1)
np_test.assert_almost_equal(p_value, 0, decimal=1)
self.assertEqual(dof, 316)
coef, dof, p_value = chi_square(
X="Immigrant", Y="Sex", Z=[], data=self.df_adult, boolean=False
)
np_test.assert_almost_equal(coef, 0.2724, decimal=1)
np_test.assert_almost_equal(np.log(p_value), -0.50, decimal=1)
self.assertEqual(dof, 1)
coef, dof, p_value = chi_square(
X="Education",
Y="MaritalStatus",
Z=["Age", "Sex"],
data=self.df_adult,
boolean=False,
)
np_test.assert_almost_equal(coef, 481.96, decimal=1)
np_test.assert_almost_equal(p_value, 0, decimal=1)
self.assertEqual(dof, 58)
# Values differ (for next 2 tests) from dagitty because dagitty ignores grouped
# dataframes with very few samples
coef, dof, p_value = chi_square(
X="Income",
Y="Race",
Z=["Age", "Education", "HoursPerWeek", "MaritalStatus"],
data=self.df_adult,
boolean=False,
)
np_test.assert_almost_equal(coef, 99.25, decimal=1)
np_test.assert_almost_equal(p_value, 0.99, decimal=1)
self.assertEqual(dof, 136)
coef, dof, p_value = chi_square(
X="Immigrant",
Y="Income",
Z=["Age", "Education", "HoursPerWeek", "MaritalStatus"],
data=self.df_adult,
boolean=False,
)
np_test.assert_almost_equal(coef, 107.79, decimal=1)
np_test.assert_almost_equal(p_value, 0.931, decimal=2)
self.assertEqual(dof, 131)
def test_discrete_tests(self):
for t in [
chi_square,
g_sq,
log_likelihood,
freeman_tuckey,
modified_log_likelihood,
neyman,
cressie_read,
]:
self.assertFalse(
t(
X="Age",
Y="Immigrant",
Z=[],
data=self.df_adult,
boolean=True,
significance_level=0.05,
)
)
self.assertFalse(
t(
X="Age",
Y="Race",
Z=[],
data=self.df_adult,
boolean=True,
significance_level=0.05,
)
)
self.assertFalse(
t(
X="Age",
Y="Sex",
Z=[],
data=self.df_adult,
boolean=True,
significance_level=0.05,
)
)
self.assertFalse(
t(
X="Education",
Y="HoursPerWeek",
Z=["Age", "Immigrant", "Race", "Sex"],
data=self.df_adult,
boolean=True,
significance_level=0.05,
)
)
self.assertTrue(
t(
X="Immigrant",
Y="Sex",
Z=[],
data=self.df_adult,
boolean=True,
significance_level=0.05,
)
)
self.assertFalse(
t(
X="Education",
Y="MaritalStatus",
Z=["Age", "Sex"],
data=self.df_adult,
boolean=True,
significance_level=0.05,
)
)
def test_exactly_same_vars(self):
x = np.random.choice([0, 1], size=1000)
y = x.copy()
df = pd.DataFrame({"x": x, "y": y})
for t in [
chi_square,
g_sq,
log_likelihood,
freeman_tuckey,
modified_log_likelihood,
neyman,
cressie_read,
]:
stat, dof, p_value = t(X="x", Y="y", Z=[], data=df, boolean=False)
self.assertEqual(dof, 1)
np_test.assert_almost_equal(p_value, 0, decimal=5)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.