text
stringlengths 213
32.3k
|
---|
import aiohttp
from homeassistant import config_entries, data_entry_flow
from homeassistant.components.ovo_energy.const import DOMAIN
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from homeassistant.core import HomeAssistant
from tests.async_mock import patch
FIXTURE_USER_INPUT = {CONF_USERNAME: "[email protected]", CONF_PASSWORD: "something"}
async def test_show_form(hass: HomeAssistant) -> None:
"""Test that the setup form is served."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
async def test_authorization_error(hass: HomeAssistant) -> None:
"""Test we show user form on connection error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
with patch(
"homeassistant.components.ovo_energy.config_flow.OVOEnergy.authenticate",
return_value=False,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
FIXTURE_USER_INPUT,
)
assert result2["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result2["step_id"] == "user"
assert result2["errors"] == {"base": "invalid_auth"}
async def test_connection_error(hass: HomeAssistant) -> None:
"""Test we show user form on connection error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
with patch(
"homeassistant.components.ovo_energy.config_flow.OVOEnergy.authenticate",
side_effect=aiohttp.ClientError,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
FIXTURE_USER_INPUT,
)
assert result2["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result2["step_id"] == "user"
assert result2["errors"] == {"base": "cannot_connect"}
async def test_full_flow_implementation(hass: HomeAssistant) -> None:
"""Test registering an integration and finishing flow works."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
with patch(
"homeassistant.components.ovo_energy.config_flow.OVOEnergy.authenticate",
return_value=True,
), patch(
"homeassistant.components.ovo_energy.async_setup",
return_value=True,
), patch(
"homeassistant.components.ovo_energy.async_setup_entry",
return_value=True,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
FIXTURE_USER_INPUT,
)
assert result2["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result2["data"][CONF_USERNAME] == FIXTURE_USER_INPUT[CONF_USERNAME]
assert result2["data"][CONF_PASSWORD] == FIXTURE_USER_INPUT[CONF_PASSWORD]
|
from typing import Optional
import voluptuous as vol
from homeassistant.components.sensor import (
DEVICE_CLASSES_SCHEMA,
ENTITY_ID_FORMAT,
PLATFORM_SCHEMA,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_FRIENDLY_NAME,
ATTR_UNIT_OF_MEASUREMENT,
CONF_DEVICE_CLASS,
CONF_ENTITY_PICTURE_TEMPLATE,
CONF_FRIENDLY_NAME_TEMPLATE,
CONF_ICON_TEMPLATE,
CONF_SENSORS,
CONF_UNIQUE_ID,
CONF_VALUE_TEMPLATE,
)
from homeassistant.core import callback
from homeassistant.exceptions import TemplateError
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity, async_generate_entity_id
from homeassistant.helpers.reload import async_setup_reload_service
from .const import CONF_AVAILABILITY_TEMPLATE, DOMAIN, PLATFORMS
from .template_entity import TemplateEntity
CONF_ATTRIBUTE_TEMPLATES = "attribute_templates"
SENSOR_SCHEMA = vol.All(
cv.deprecated(ATTR_ENTITY_ID),
vol.Schema(
{
vol.Required(CONF_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_ICON_TEMPLATE): cv.template,
vol.Optional(CONF_ENTITY_PICTURE_TEMPLATE): cv.template,
vol.Optional(CONF_FRIENDLY_NAME_TEMPLATE): cv.template,
vol.Optional(CONF_AVAILABILITY_TEMPLATE): cv.template,
vol.Optional(CONF_ATTRIBUTE_TEMPLATES, default={}): vol.Schema(
{cv.string: cv.template}
),
vol.Optional(ATTR_FRIENDLY_NAME): cv.string,
vol.Optional(ATTR_UNIT_OF_MEASUREMENT): cv.string,
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
vol.Optional(CONF_UNIQUE_ID): cv.string,
}
),
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_SENSORS): cv.schema_with_slug_keys(SENSOR_SCHEMA)}
)
async def _async_create_entities(hass, config):
"""Create the template sensors."""
sensors = []
for device, device_config in config[CONF_SENSORS].items():
state_template = device_config[CONF_VALUE_TEMPLATE]
icon_template = device_config.get(CONF_ICON_TEMPLATE)
entity_picture_template = device_config.get(CONF_ENTITY_PICTURE_TEMPLATE)
availability_template = device_config.get(CONF_AVAILABILITY_TEMPLATE)
friendly_name = device_config.get(ATTR_FRIENDLY_NAME, device)
friendly_name_template = device_config.get(CONF_FRIENDLY_NAME_TEMPLATE)
unit_of_measurement = device_config.get(ATTR_UNIT_OF_MEASUREMENT)
device_class = device_config.get(CONF_DEVICE_CLASS)
attribute_templates = device_config[CONF_ATTRIBUTE_TEMPLATES]
unique_id = device_config.get(CONF_UNIQUE_ID)
sensors.append(
SensorTemplate(
hass,
device,
friendly_name,
friendly_name_template,
unit_of_measurement,
state_template,
icon_template,
entity_picture_template,
availability_template,
device_class,
attribute_templates,
unique_id,
)
)
return sensors
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the template sensors."""
await async_setup_reload_service(hass, DOMAIN, PLATFORMS)
async_add_entities(await _async_create_entities(hass, config))
class SensorTemplate(TemplateEntity, Entity):
"""Representation of a Template Sensor."""
def __init__(
self,
hass,
device_id,
friendly_name,
friendly_name_template,
unit_of_measurement,
state_template,
icon_template,
entity_picture_template,
availability_template,
device_class,
attribute_templates,
unique_id,
):
"""Initialize the sensor."""
super().__init__(
attribute_templates=attribute_templates,
availability_template=availability_template,
icon_template=icon_template,
entity_picture_template=entity_picture_template,
)
self.entity_id = async_generate_entity_id(
ENTITY_ID_FORMAT, device_id, hass=hass
)
self._name = friendly_name
self._friendly_name_template = friendly_name_template
self._unit_of_measurement = unit_of_measurement
self._template = state_template
self._state = None
self._device_class = device_class
self._unique_id = unique_id
async def async_added_to_hass(self):
"""Register callbacks."""
self.add_template_attribute("_state", self._template, None, self._update_state)
if self._friendly_name_template is not None:
self.add_template_attribute("_name", self._friendly_name_template)
await super().async_added_to_hass()
@callback
def _update_state(self, result):
super()._update_state(result)
self._state = None if isinstance(result, TemplateError) else result
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unique_id(self):
"""Return the unique id of this sensor."""
return self._unique_id
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def device_class(self) -> Optional[str]:
"""Return the device class of the sensor."""
return self._device_class
@property
def unit_of_measurement(self):
"""Return the unit_of_measurement of the device."""
return self._unit_of_measurement
|
from flexx.util.testing import run_tests_if_main, raises, skip
import gc
import sys
import weakref
import asyncio
from flexx import app
from flexx.app import Session
from flexx.app._assetstore import assets, AssetStore as _AssetStore
class AssetStore(_AssetStore):
_test_mode = True
class Fooo1(app.PyComponent):
x = 3
def test_session_basics():
s = Session('xx')
assert s.app_name == 'xx'
assert 'xx' in repr(s)
def test_get_component_instance_by_id():
# is really a test for the session, but historically, the test is done here
# This test needs a default session
session = app.manager.get_default_session()
if session is None:
session = app.manager.create_default_session()
m1 = Fooo1()
m2 = Fooo1()
assert m1 is not m2
assert session.get_component_instance(m1.id) is m1
assert session.get_component_instance(m2.id) is m2
assert session.get_component_instance('blaaaa') is None
def test_session_assets_data():
store = AssetStore()
store.add_shared_data('ww', b'wwww')
s = Session('', store)
s._send_command = lambda x: None
assert s.id
# Add data
s.add_data('xx', b'xxxx')
s.add_data('yy', b'yyyy')
assert len(s.get_data_names()) == 2
assert 'xx' in s.get_data_names()
assert 'yy' in s.get_data_names()
# get_data()
assert s.get_data('xx') == b'xxxx'
assert s.get_data('zz') is None
assert s.get_data('ww') is b'wwww'
# # Add url data
# s.add_data('readme', 'https://github.com/flexxui/flexx/blob/master/README.md')
# #assert 'Flexx is' in s.get_data('readme').decode()
# assert s.get_data('readme').startswith('https://github')
# Add data with same name
with raises(ValueError):
s.add_data('xx', b'zzzz')
# Add BS data
with raises(TypeError):
s.add_data('dd') # no data
with raises(TypeError):
s.add_data('dd', 4) # not an asset
if sys.version_info > (3, ):
with raises(TypeError):
s.add_data('dd', 'not bytes')
with raises(TypeError):
s.add_data(b'dd', b'yes, bytes') # name not str
with raises(TypeError):
s.add_data(4, b'zzzz') # name not a str
# get_data()
assert s.get_data('xx') is b'xxxx'
assert s.get_data('ww') is store.get_data('ww')
assert s.get_data('ww') == b'wwww'
assert s.get_data('bla') is None
def test_session_registering_component_classes():
try:
from flexx import ui
except ImportError:
skip('no flexx.ui')
store = AssetStore()
store.update_modules()
s = Session('', store)
commands = []
s._send_command = lambda x: commands.append(x)
assert not s.present_modules
s._register_component_class(ui.Button)
assert len(s.present_modules) == 2
assert 'flexx.ui._widget' in s.present_modules
assert 'flexx.ui.widgets._button' in s.present_modules
assert len(s._present_classes) == 6 # Because a module was loaded that has more widgets
assert ui.Button in s._present_classes
assert ui.RadioButton in s._present_classes
assert ui.CheckBox in s._present_classes
assert ui.ToggleButton in s._present_classes
assert ui.BaseButton in s._present_classes
assert ui.Widget in s._present_classes
with raises(TypeError):
s._register_component_class(3)
## Prepare module loading tests
from flexx.event._component import new_type
PKG_NAME = 'flxtest2'
def add_prefix(n):
if isinstance(n, list):
return [add_prefix(i) for i in n]
elif n.startswith('foo.'):
return PKG_NAME + '.' + n
else:
return n
def teardown_module():
clear_test_classes()
def clear_test_classes():
classes = app._component2.AppComponentMeta.CLASSES
for cls in list(classes):
if cls.__jsmodule__.startswith(PKG_NAME + '.'):
classes.remove(cls)
def fakecomponent_init(self, s):
self._session = s
self._id = 'FakeComponent'
def fakecomponent_setattr(self, s, v):
return object.__setattr__(self, s, v)
def fakecomponent_del(self):
pass
Component_overload = dict(__linenr__=0,
__init__=fakecomponent_init,
__setattr__=fakecomponent_setattr,
__del__=fakecomponent_del,
)
class SessionTester(Session):
""" A session subclass that keeps track of DEFINE commands.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.assets_js = []
self.assets_css = []
def send_command(self, *command):
if command[0] == 'DEFINE':
if 'JS' in command[1]:
_, _, name, _ = command
self.assets_js.append(name)
elif 'CSS' in command[1]:
_, _, name, _ = command
self.assets_css.append(name)
class FakeModule:
""" An object that looks and walks like a JSModule. Enough to fool
Flexx' internals.
"""
def __init__(self, store, name):
self.name = add_prefix(name)
self.deps = set()
self.component_classes = set()
store._modules[self.name] = self
b1 = app.Bundle(self.name + '.js')
b2 = app.Bundle(self.name + '.css')
b1.add_module(self)
b2.add_module(self)
store._assets[b1.name] = b1
store._assets[b2.name] = b2
def make_component_class(self, name, base=app.JsComponent):
cls = new_type(name, (base, ), Component_overload.copy())
self.component_classes.add(cls)
cls.__module__ = self.name
cls.__jsmodule__ = self.name
self.deps.add(base.__jsmodule__)
return cls
def add_variable(self, name):
assert name in [m.__name__ for m in self.component_classes]
def get_js(self):
return self.name + '-JS'
def get_css(self):
return self.name + '-CSS'
## Test module loading
def test_module_loading1():
""" Simple case. """
clear_test_classes()
store = AssetStore()
s = SessionTester('', store)
m1 = FakeModule(store, 'foo.m1')
m2 = FakeModule(store, 'foo.m2')
Ma = m1.make_component_class('Maa')
Mb = m1.make_component_class('Mbb')
Mc = m2.make_component_class('Mcc')
s._register_component(Ma(flx_session=s))
s._register_component(Mb(flx_session=s))
s._register_component(Mc(flx_session=s))
assert s.assets_js == add_prefix(['foo.m1.js', 'foo.m2.js'])
assert s.assets_css == add_prefix(['foo.m1.css', 'foo.m2.css'])
def test_module_loading2():
""" No deps """
clear_test_classes()
store = AssetStore()
s = SessionTester('', store)
m1 = FakeModule(store, 'foo.m1')
m2 = FakeModule(store, 'foo.m2')
m3 = FakeModule(store, 'foo.m3')
Ma = m2.make_component_class('Ma')
# m2.deps = add_prefix(['foo.m3'])
# m3.deps = add_prefix(['foo.m1'])
s._register_component(Ma(flx_session=s))
assert s.assets_js == add_prefix(['foo.m2.js'])
def test_module_loading3():
""" Dependencies get defined too (and before) """
clear_test_classes()
store = AssetStore()
s = SessionTester('', store)
m1 = FakeModule(store, 'foo.m1')
m2 = FakeModule(store, 'foo.m2')
m3 = FakeModule(store, 'foo.m3')
Ma = m2.make_component_class('Ma')
m2.deps = add_prefix(['foo.m3'])
m3.deps = add_prefix(['foo.m1'])
s._register_component(Ma(flx_session=s))
assert s.assets_js == add_prefix(['foo.m1.js', 'foo.m3.js', 'foo.m2.js'])
def test_module_loading4():
""" Dependencies by inheritance """
# A bit silly; the JSModule (and our FakeModule) handles this dependency
clear_test_classes()
store = AssetStore()
s = SessionTester('', store)
m1 = FakeModule(store, 'foo.m1')
m2 = FakeModule(store, 'foo.m2')
m3 = FakeModule(store, 'foo.m3')
Ma = m2.make_component_class('Ma')
Mb = m3.make_component_class('Mb', Ma)
Mc = m1.make_component_class('Mc', Mb)
s._register_component(Mc(flx_session=s))
assert s.assets_js == add_prefix(['foo.m2.js', 'foo.m3.js', 'foo.m1.js'])
def test_module_loading5():
""" Associated assets """
# A bit silly; the JSModule (and our FakeModule) handles this dependency
clear_test_classes()
store = AssetStore()
s = SessionTester('', store)
m1 = FakeModule(store, 'foo.m1')
m2 = FakeModule(store, 'foo.m2')
m3 = FakeModule(store, 'foo.m3')
store.add_shared_asset('spam.js', 'XX')
store.associate_asset(add_prefix('foo.m1'), 'spam.js')
store.associate_asset(add_prefix('foo.m2'), 'eggs.js', 'YY')
store.associate_asset(add_prefix('foo.m2'), 'spam.js')
store.associate_asset(add_prefix('foo.m2'), 'bla.css', 'ZZ')
store.associate_asset(add_prefix('foo.m3'), 'bla.css')
Ma = m1.make_component_class('Ma')
Mb = m2.make_component_class('Mb')
Mc = m3.make_component_class('Mc')
s._register_component(Ma(flx_session=s))
s._register_component(Mb(flx_session=s))
s._register_component(Mc(flx_session=s))
assert s.assets_js == add_prefix(['spam.js', 'foo.m1.js', 'eggs.js', 'foo.m2.js', 'foo.m3.js'])
assert s.assets_css == add_prefix(['foo.m1.css', 'bla.css', 'foo.m2.css', 'foo.m3.css'])
# clear_test_classes()
# test_module_loading5()
# clear_test_classes()
##
def pongit(session, n):
for i in range(n):
c = session._ping_counter
session._ping_counter += 1
session._receive_pong(c)
loop = asyncio.get_event_loop()
for j in range(2):
loop.call_soon(loop.stop)
loop.run_forever()
session._ping_counter = c + 1
# def pongit(session, n):
# max_timeout = session._ping_counter + n
# loop = asyncio.get_event_loop()
# def check():
# if session._ping_counter >= max_timeout:
# loop.stop()
# else:
# loop.call_soon(check)
# loop.run_forever()
def test_keep_alive():
# Avoid Pyzo hijack
asyncio.set_event_loop(asyncio.new_event_loop())
session = app.manager.get_default_session()
if session is None:
session = app.manager.create_default_session()
class Foo:
pass
foo1, foo2, foo3 = Foo(), Foo(), Foo()
foo1_ref = weakref.ref(foo1)
foo2_ref = weakref.ref(foo2)
foo3_ref = weakref.ref(foo3)
session.keep_alive(foo1, 10)
session.keep_alive(foo1, 5) # should do nothing, longest time counts
session.keep_alive(foo2, 5)
session.keep_alive(foo2, 11) # longest timeout counts
session.keep_alive(foo3, 15)
# Delete objects, session keeps them alive
del foo1, foo2, foo3
gc.collect()
assert foo1_ref() is not None
assert foo2_ref() is not None
assert foo3_ref() is not None
# Pong 4, too soon for the session to release the objects
pongit(session, 4)
gc.collect()
assert foo1_ref() is not None
assert foo2_ref() is not None
assert foo3_ref() is not None
# Pong 7, still too soon
pongit(session, 3)
gc.collect()
assert foo1_ref() is not None
assert foo2_ref() is not None
assert foo3_ref() is not None
# Pong 10, should remove foo1
pongit(session, 4)
gc.collect()
assert foo1_ref() is None
assert foo2_ref() is not None
assert foo3_ref() is not None
# Pong 11, should remove foo2
pongit(session, 1)
gc.collect()
assert foo1_ref() is None
assert foo2_ref() is None
assert foo3_ref() is not None
# Pong 20, should remove foo3
pongit(session, 10)
gc.collect()
assert foo1_ref() is None
assert foo2_ref() is None
assert foo3_ref() is None
def test_keep_alive_noleak1():
class Foo:
pass
# Create a session and an object that has a reference to it (like Component)
session = app.Session('test')
foo = Foo()
foo.session = session
# Let the session keep the object alive, so it keeps its reference
session.keep_alive(foo)
session_ref = weakref.ref(session)
foo_ref = weakref.ref(foo)
# Removing object wont delete it
del foo
gc.collect()
assert foo_ref() is not None
# But closing the session will; session clears up after itself
session.close()
gc.collect()
assert foo_ref() is None
def test_keep_alive_noleak2():
# Even if the above would not work ...
class Foo:
pass
# Create a session and an object that has a reference to it (like Component)
session = app.Session('test')
foo = Foo()
foo.session = session
# Let the session keep the object alive, so it keeps its reference
session.keep_alive(foo)
session_ref = weakref.ref(session)
foo_ref = weakref.ref(foo)
# Removing object alone wont delete it
del foo
gc.collect()
assert foo_ref() is not None
# But removing both will; gc is able to clear circular ref
del session
gc.collect()
assert session_ref() is None
assert foo_ref() is None
run_tests_if_main()
|
import logging
import voluptuous as vol
from yalesmartalarmclient.client import (
YALE_STATE_ARM_FULL,
YALE_STATE_ARM_PARTIAL,
YALE_STATE_DISARM,
AuthenticationError,
YaleSmartAlarmClient,
)
from homeassistant.components.alarm_control_panel import (
PLATFORM_SCHEMA,
AlarmControlPanelEntity,
)
from homeassistant.components.alarm_control_panel.const import (
SUPPORT_ALARM_ARM_AWAY,
SUPPORT_ALARM_ARM_HOME,
)
from homeassistant.const import (
CONF_NAME,
CONF_PASSWORD,
CONF_USERNAME,
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_DISARMED,
)
import homeassistant.helpers.config_validation as cv
CONF_AREA_ID = "area_id"
DEFAULT_NAME = "Yale Smart Alarm"
DEFAULT_AREA_ID = "1"
_LOGGER = logging.getLogger(__name__)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_AREA_ID, default=DEFAULT_AREA_ID): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the alarm platform."""
name = config[CONF_NAME]
username = config[CONF_USERNAME]
password = config[CONF_PASSWORD]
area_id = config[CONF_AREA_ID]
try:
client = YaleSmartAlarmClient(username, password, area_id)
except AuthenticationError:
_LOGGER.error("Authentication failed. Check credentials")
return
add_entities([YaleAlarmDevice(name, client)], True)
class YaleAlarmDevice(AlarmControlPanelEntity):
"""Represent a Yale Smart Alarm."""
def __init__(self, name, client):
"""Initialize the Yale Alarm Device."""
self._name = name
self._client = client
self._state = None
self._state_map = {
YALE_STATE_DISARM: STATE_ALARM_DISARMED,
YALE_STATE_ARM_PARTIAL: STATE_ALARM_ARMED_HOME,
YALE_STATE_ARM_FULL: STATE_ALARM_ARMED_AWAY,
}
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def supported_features(self) -> int:
"""Return the list of supported features."""
return SUPPORT_ALARM_ARM_HOME | SUPPORT_ALARM_ARM_AWAY
def update(self):
"""Return the state of the device."""
armed_status = self._client.get_armed_status()
self._state = self._state_map.get(armed_status)
def alarm_disarm(self, code=None):
"""Send disarm command."""
self._client.disarm()
def alarm_arm_home(self, code=None):
"""Send arm home command."""
self._client.arm_partial()
def alarm_arm_away(self, code=None):
"""Send arm away command."""
self._client.arm_full()
|
import unittest
from absl.testing import parameterized
import mock
from perfkitbenchmarker import os_types
from perfkitbenchmarker.linux_packages import epel_release
from tests import pkb_common_test_case
_REPOLIST_NO_EPEL = """
repo id repo name status
base/7/x86_64 CentOS-7 - Base 10070
updates/7/x86_64 CentOS-7 - Updates 900
repolist: 11382
"""
_REPOLIST_WITH_EPEL = """
repo id repo name status
base/7/x86_64 CentOS-7 - Base 10070
epel/x86_64 Extra Packages for Enterprise Linux 7 - x86_64 13421
updates/7/x86_64 CentOS-7 - Updates 900
repolist: 24803
"""
_REPOLIST_WITH_EPEL_REMOTE = """
repo id repo name status
*epel/x86_64 Extra Packages for Enterprise Linux 7 - x86_64 13421
"""
EMPTY_RES = ''
def Vm(os_type, responses):
vm_spec = pkb_common_test_case.CreateTestVmSpec()
vm = pkb_common_test_case.TestLinuxVirtualMachine(vm_spec=vm_spec)
# pylint: disable=invalid-name
vm.OS_TYPE = os_type
vm.RemoteCommand = mock.Mock()
vm.InstallPackages = mock.Mock()
vm.RemoteCommand.side_effect = [(text, '') for text in responses]
# pylint: enable=invalid-name
return vm
class EpelReleaseTest(pkb_common_test_case.PkbCommonTestCase):
def testHappyPathCentos8(self):
responses = [
_REPOLIST_NO_EPEL, # initial call for repo list
EMPTY_RES, # centos8 PowerTools
EMPTY_RES, # config manager
_REPOLIST_WITH_EPEL, # call to confirm epel repo present
]
vm = Vm(os_types.CENTOS8, responses)
epel_release.YumInstall(vm)
self.assertEqual(
vm.InstallPackages.call_args_list,
[mock.call(epel_release._EPEL_URL.format(8)),
mock.call('yum-utils')])
def testRepoAllReadyInstalled(self):
vm = Vm(os_types.CENTOS7, [_REPOLIST_WITH_EPEL])
epel_release.YumInstall(vm)
vm.RemoteCommand.assert_called_once()
@parameterized.named_parameters(
('NoEpelRepo', _REPOLIST_NO_EPEL,
('base/7/x86_64', 'updates/7/x86_64'), False),
('HasEpelRepo', _REPOLIST_WITH_EPEL,
('epel/x86_64', 'base/7/x86_64', 'updates/7/x86_64'), True),
('HasRemoteEpelRepo', _REPOLIST_WITH_EPEL_REMOTE, ('epel/x86_64',), True),
)
def testRepoList(self, repo_response, repo_ids, repo_enabled):
vm = Vm(os_types.CENTOS7, [repo_response, repo_response])
self.assertEqual(epel_release.Repolist(vm), frozenset(repo_ids))
self.assertEqual(epel_release.IsEpelRepoInstalled(vm), repo_enabled)
if __name__ == '__main__':
unittest.main()
|
import glob
import os
import numpy as np
from chainer.dataset import download
from chainercv.chainer_experimental.datasets.sliceable import GetterDataset
from chainercv.datasets.cityscapes.cityscapes_utils import cityscapes_labels
from chainercv.utils import read_image
from chainercv.utils import read_label
class CityscapesSemanticSegmentationDataset(GetterDataset):
"""Semantic segmentation dataset for `Cityscapes dataset`_.
.. _`Cityscapes dataset`: https://www.cityscapes-dataset.com
.. note::
Please manually download the data because it is not allowed to
re-distribute Cityscapes dataset.
Args:
data_dir (string): Path to the dataset directory. The directory should
contain at least two directories, :obj:`leftImg8bit` and either
:obj:`gtFine` or :obj:`gtCoarse`. If :obj:`auto` is given, it uses
:obj:`$CHAINER_DATSET_ROOT/pfnet/chainercv/cityscapes` by default.
label_resolution ({'fine', 'coarse'}): The resolution of the labels. It
should be either :obj:`fine` or :obj:`coarse`.
split ({'train', 'val'}): Select from dataset splits used in
Cityscapes dataset.
ignore_labels (bool): If :obj:`True`, the labels marked
:obj:`ignoreInEval` defined in the original `cityscapesScripts`_
will be replaced with :obj:`-1` in the :meth:`get_example` method.
The default value is :obj:`True`.
.. _`cityscapesScripts`: https://github.com/mcordts/cityscapesScripts
This dataset returns the following data.
.. csv-table::
:header: name, shape, dtype, format
:obj:`img`, ":math:`(3, H, W)`", :obj:`float32`, \
"RGB, :math:`[0, 255]`"
:obj:`label`, ":math:`(H, W)`", :obj:`int32`, \
":math:`[-1, \#class - 1]`"
"""
def __init__(self, data_dir='auto', label_resolution=None, split='train',
ignore_labels=True):
super(CityscapesSemanticSegmentationDataset, self).__init__()
if data_dir == 'auto':
data_dir = download.get_dataset_directory(
'pfnet/chainercv/cityscapes')
if label_resolution not in ['fine', 'coarse']:
raise ValueError('\'label_resolution\' argment should be either '
'\'fine\' or \'coarse\'.')
img_dir = os.path.join(data_dir, os.path.join('leftImg8bit', split))
resol = 'gtFine' if label_resolution == 'fine' else 'gtCoarse'
label_dir = os.path.join(data_dir, resol)
if not os.path.exists(img_dir) or not os.path.exists(label_dir):
raise ValueError(
'Cityscapes dataset does not exist at the expected location.'
'Please download it from https://www.cityscapes-dataset.com/.'
'Then place directory leftImg8bit at {} and {} at {}.'.format(
os.path.join(data_dir, 'leftImg8bit'), resol, label_dir))
self.ignore_labels = ignore_labels
self.label_paths = []
self.img_paths = []
city_dnames = []
for dname in glob.glob(os.path.join(label_dir, '*')):
if split in dname:
for city_dname in glob.glob(os.path.join(dname, '*')):
for label_path in glob.glob(
os.path.join(city_dname, '*_labelIds.png')):
self.label_paths.append(label_path)
city_dnames.append(os.path.basename(city_dname))
for city_dname, label_path in zip(city_dnames, self.label_paths):
label_path = os.path.basename(label_path)
img_path = label_path.replace(
'{}_labelIds'.format(resol), 'leftImg8bit')
img_path = os.path.join(img_dir, city_dname, img_path)
self.img_paths.append(img_path)
self.add_getter('img', self._get_image)
self.add_getter('label', self._get_label)
def __len__(self):
return len(self.img_paths)
def _get_image(self, i):
return read_image(self.img_paths[i])
def _get_label(self, i):
label_orig = read_label(self.label_paths[i], dtype=np.int32)
if self.ignore_labels:
label_out = np.ones(label_orig.shape, dtype=np.int32) * -1
for label in cityscapes_labels:
if not label.ignoreInEval:
label_out[label_orig == label.id] = label.trainId
else:
label_out = label_orig
return label_out
|
import collections
import numpy as np
from chainercv.chainer_experimental.datasets.sliceable import GetterDataset
def _construct_label_to_key(labels):
d = collections.defaultdict(list)
for i, label in enumerate(labels):
d[label].append(i)
return d
class SiameseDataset(GetterDataset):
"""A dataset that returns samples fetched from two datasets.
The dataset returns samples from the two base datasets.
If :obj:`pos_ratio` is not :obj:`None`,
:class:`SiameseDataset` can be configured to return positive
pairs at the ratio of :obj:`pos_ratio` and negative pairs at the ratio
of :obj:`1 - pos_ratio`.
In this mode, the base datasets are assumed to be label datasets that
return an image and a label as a sample.
Example:
We construct a siamese dataset from MNIST.
.. code::
>>> from chainer.datasets import get_mnist
>>> from chainercv.datasets import SiameseDataset
>>> mnist, _ = get_mnist()
>>> dataset = SiameseDataset(mnist, mnist, pos_ratio=0.3)
# The probability of the two samples having the same label
# is 0.3 as specified by pos_ratio.
>>> img_0, label_0, img_1, label_1 = dataset[0]
# The returned examples may change in the next
# call even if the index is the same as before
# because SiameseDataset picks examples randomly
# (e.g., img_0_new may differ from img_0).
>>> img_0_new, label_0_new, img_1_new, label_1_new = dataset[0]
Args:
dataset_0: The first base dataset.
dataset_1: The second base dataset.
pos_ratio (float): If this is not :obj:`None`,
this dataset tries to construct positive pairs at the
given rate. If :obj:`None`,
this dataset randomly samples examples from the base
datasets. The default value is :obj:`None`.
length (int): The length of this dataset. If :obj:`None`,
the length of the first base dataset is the length of this
dataset.
labels_0 (numpy.ndarray): The labels associated to
the first base dataset. The length should be the same as
the length of the first dataset. If this is :obj:`None`,
the labels are automatically fetched using the following
line of code: :obj:`[ex[1] for ex in dataset_0]`.
By setting :obj:`labels_0` and skipping the fetching
iteration, the computation cost can be reduced.
Also, if :obj:`pos_ratio` is :obj:`None`, this value
is ignored. The default value is :obj:`None`.
If :obj:`labels_1` is spcified and
:obj:`dataset_0` and :obj:`dataset_1` are the same,
:obj:`labels_0` can be skipped.
labels_1 (numpy.ndarray): The labels associated to
the second base dataset. If :obj:`labels_0` is spcified and
:obj:`dataset_0` and :obj:`dataset_1` are the same,
:obj:`labels_1` can be skipped.
Please consult the explanation for :obj:`labels_0`.
This dataset returns the following data.
.. csv-table::
:header: name, shape, dtype, format
:obj:`img_0`, [#siamese_1]_, [#siamese_1]_, [#siamese_1]_
:obj:`label_0`, scalar, :obj:`int32`, ":math:`[0, \#class - 1]`"
:obj:`img_1`, [#siamese_2]_, [#siamese_2]_, [#siamese_2]_
:obj:`label_1`, scalar, :obj:`int32`, ":math:`[0, \#class - 1]`"
.. [#siamese_1] Same as :obj:`dataset_0`.
.. [#siamese_2] Same as :obj:`dataset_1`.
"""
def __init__(self, dataset_0, dataset_1,
pos_ratio=None, length=None, labels_0=None, labels_1=None):
super(SiameseDataset, self).__init__()
self._dataset_0 = dataset_0
self._dataset_1 = dataset_1
self._pos_ratio = pos_ratio
if length is None:
length = len(self._dataset_0)
self._length = length
if pos_ratio is not None:
# handle cases when labels_0 and labels_1 are not set
if dataset_0 is dataset_1:
if labels_0 is None and labels_1 is None:
labels_0 = np.array([example[1] for example in dataset_0])
labels_1 = labels_0
elif labels_0 is None:
labels_0 = labels_1
elif labels_1 is None:
labels_1 = labels_0
else:
if labels_0 is None:
labels_0 = np.array([example[1] for example in dataset_0])
if labels_1 is None:
labels_1 = np.array([example[1] for example in dataset_1])
if not (labels_0.dtype == np.int32 and labels_0.ndim == 1 and
len(labels_0) == len(dataset_0) and
labels_1.dtype == np.int32 and labels_1.ndim == 1 and
len(labels_1) == len(dataset_1)):
raise ValueError('the labels are invalid.')
# Construct mapping label->idx
self._label_to_index_0 = _construct_label_to_key(labels_0)
if dataset_0 is dataset_1:
self._label_to_index_1 = self._label_to_index_0
else:
self._label_to_index_1 = _construct_label_to_key(labels_1)
# select labels with positive pairs
unique_0 = np.array(list(self._label_to_index_0.keys()))
self._exist_pos_pair_labels_0 =\
np.array([l for l in unique_0 if np.any(labels_1 == l)])
if len(self._exist_pos_pair_labels_0) == 0 and pos_ratio > 0:
raise ValueError(
'There is no positive pairs. For the given pair of '
'datasets, please set pos_ratio to None.')
# select labels in dataset_0 with negative pairs
self._exist_neg_pair_labels_0 = \
np.array([l for l in unique_0 if np.any(labels_1 != l)])
if len(self._exist_neg_pair_labels_0) == 0 and pos_ratio < 1:
raise ValueError(
'There is no negative pairs. For the given pair of '
'datasets, please set pos_ratio to None.')
self._labels_0 = labels_0
self._labels_1 = labels_1
self.add_getter(
('img_0', 'label_0', 'img_1', 'label_1'), self._get_example)
def __len__(self):
return self._length
def _get_example(self, i):
if self._pos_ratio is None:
idx0 = np.random.choice(np.arange(len(self._dataset_0)))
idx1 = np.random.choice(np.arange(len(self._dataset_1)))
else:
# get pos-pair
if np.random.binomial(1, self._pos_ratio):
label = np.random.choice(self._exist_pos_pair_labels_0)
idx0 = np.random.choice(self._label_to_index_0[label])
idx1 = np.random.choice(self._label_to_index_1[label])
# get neg-pair
else:
label_0 = np.random.choice(self._exist_neg_pair_labels_0)
keys = list(self._label_to_index_1.keys())
if label_0 in keys:
keys.remove(label_0)
label_1 = np.random.choice(keys)
idx0 = np.random.choice(self._label_to_index_0[label_0])
idx1 = np.random.choice(self._label_to_index_1[label_1])
example_0 = self._dataset_0[idx0]
example_1 = self._dataset_1[idx1]
return tuple(example_0) + tuple(example_1)
|
from pprint import pformat
from kombu import Connection, Exchange, Queue, Consumer, eventloop
#: By default messages sent to exchanges are persistent (delivery_mode=2),
#: and queues and exchanges are durable.
exchange = Exchange('kombu_demo', type='direct')
queue = Queue('kombu_demo', exchange, routing_key='kombu_demo')
def pretty(obj):
return pformat(obj, indent=4)
#: This is the callback applied when a message is received.
def handle_message(body, message):
print(f'Received message: {body!r}')
print(' properties:\n{}'.format(pretty(message.properties)))
print(' delivery_info:\n{}'.format(pretty(message.delivery_info)))
message.ack()
#: Create a connection and a channel.
#: If hostname, userid, password and virtual_host is not specified
#: the values below are the default, but listed here so it can
#: be easily changed.
with Connection('amqp://guest:guest@localhost:5672//') as connection:
#: Create consumer using our callback and queue.
#: Second argument can also be a list to consume from
#: any number of queues.
with Consumer(connection, queue, callbacks=[handle_message]):
#: Each iteration waits for a single event. Note that this
#: event may not be a message, or a message that is to be
#: delivered to the consumers channel, but any event received
#: on the connection.
for _ in eventloop(connection):
pass
|
from typing import List, Optional, Tuple
from aioesphomeapi import LightInfo, LightState
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_COLOR_TEMP,
ATTR_EFFECT,
ATTR_FLASH,
ATTR_HS_COLOR,
ATTR_TRANSITION,
ATTR_WHITE_VALUE,
FLASH_LONG,
FLASH_SHORT,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
SUPPORT_COLOR_TEMP,
SUPPORT_EFFECT,
SUPPORT_FLASH,
SUPPORT_TRANSITION,
SUPPORT_WHITE_VALUE,
LightEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.helpers.typing import HomeAssistantType
import homeassistant.util.color as color_util
from . import EsphomeEntity, esphome_state_property, platform_async_setup_entry
FLASH_LENGTHS = {FLASH_SHORT: 2, FLASH_LONG: 10}
async def async_setup_entry(
hass: HomeAssistantType, entry: ConfigEntry, async_add_entities
) -> None:
"""Set up ESPHome lights based on a config entry."""
await platform_async_setup_entry(
hass,
entry,
async_add_entities,
component_key="light",
info_type=LightInfo,
entity_type=EsphomeLight,
state_type=LightState,
)
class EsphomeLight(EsphomeEntity, LightEntity):
"""A switch implementation for ESPHome."""
@property
def _static_info(self) -> LightInfo:
return super()._static_info
@property
def _state(self) -> Optional[LightState]:
return super()._state
# https://github.com/PyCQA/pylint/issues/3150 for all @esphome_state_property
# pylint: disable=invalid-overridden-method
@esphome_state_property
def is_on(self) -> Optional[bool]:
"""Return true if the switch is on."""
return self._state.state
async def async_turn_on(self, **kwargs) -> None:
"""Turn the entity on."""
data = {"key": self._static_info.key, "state": True}
if ATTR_HS_COLOR in kwargs:
hue, sat = kwargs[ATTR_HS_COLOR]
red, green, blue = color_util.color_hsv_to_RGB(hue, sat, 100)
data["rgb"] = (red / 255, green / 255, blue / 255)
if ATTR_FLASH in kwargs:
data["flash_length"] = FLASH_LENGTHS[kwargs[ATTR_FLASH]]
if ATTR_TRANSITION in kwargs:
data["transition_length"] = kwargs[ATTR_TRANSITION]
if ATTR_BRIGHTNESS in kwargs:
data["brightness"] = kwargs[ATTR_BRIGHTNESS] / 255
if ATTR_COLOR_TEMP in kwargs:
data["color_temperature"] = kwargs[ATTR_COLOR_TEMP]
if ATTR_EFFECT in kwargs:
data["effect"] = kwargs[ATTR_EFFECT]
if ATTR_WHITE_VALUE in kwargs:
data["white"] = kwargs[ATTR_WHITE_VALUE] / 255
await self._client.light_command(**data)
async def async_turn_off(self, **kwargs) -> None:
"""Turn the entity off."""
data = {"key": self._static_info.key, "state": False}
if ATTR_FLASH in kwargs:
data["flash_length"] = FLASH_LENGTHS[kwargs[ATTR_FLASH]]
if ATTR_TRANSITION in kwargs:
data["transition_length"] = kwargs[ATTR_TRANSITION]
await self._client.light_command(**data)
@esphome_state_property
def brightness(self) -> Optional[int]:
"""Return the brightness of this light between 0..255."""
return round(self._state.brightness * 255)
@esphome_state_property
def hs_color(self) -> Optional[Tuple[float, float]]:
"""Return the hue and saturation color value [float, float]."""
return color_util.color_RGB_to_hs(
self._state.red * 255, self._state.green * 255, self._state.blue * 255
)
@esphome_state_property
def color_temp(self) -> Optional[float]:
"""Return the CT color value in mireds."""
return self._state.color_temperature
@esphome_state_property
def white_value(self) -> Optional[int]:
"""Return the white value of this light between 0..255."""
return round(self._state.white * 255)
@esphome_state_property
def effect(self) -> Optional[str]:
"""Return the current effect."""
return self._state.effect
@property
def supported_features(self) -> int:
"""Flag supported features."""
flags = SUPPORT_FLASH
if self._static_info.supports_brightness:
flags |= SUPPORT_BRIGHTNESS
flags |= SUPPORT_TRANSITION
if self._static_info.supports_rgb:
flags |= SUPPORT_COLOR
if self._static_info.supports_white_value:
flags |= SUPPORT_WHITE_VALUE
if self._static_info.supports_color_temperature:
flags |= SUPPORT_COLOR_TEMP
if self._static_info.effects:
flags |= SUPPORT_EFFECT
return flags
@property
def effect_list(self) -> List[str]:
"""Return the list of supported effects."""
return self._static_info.effects
@property
def min_mireds(self) -> float:
"""Return the coldest color_temp that this light supports."""
return self._static_info.min_mireds
@property
def max_mireds(self) -> float:
"""Return the warmest color_temp that this light supports."""
return self._static_info.max_mireds
|
from . import *
class TestPlugin(Plugin):
MIN_VERSION = '0.3.1'
@classmethod
def attach(cls, subparsers: argparse.ArgumentParser, log: list, **kwargs):
""" Attaches this plugin to the top-level argparse subparser group
:param subparsers the action subparser group
:param log a list to put initialization log messages in
"""
p = subparsers.add_parser('test', add_help=False)
p.add_argument('--silent', action='store_true', default=False)
p.set_defaults(func=cls.action)
log.append(str(cls) + ' attached.')
@classmethod
def action(cls, args: argparse.Namespace) -> int:
""" This is where the magic happens.
Return a zero for success, a non-zero int for failure. """
if not args.silent:
print('This plugin works.')
# args.cache.do_something()
# args.acd_client.do_something()
return 0
|
import diamond.collector
try:
import vms
except ImportError:
vms = None
class VMSDomsCollector(diamond.collector.Collector):
PLUGIN_STATS = {
'nominal': ('pages', 4096),
'current': ('memory.current', 4096),
'clean': ('memory.clean', 4096),
'dirty': ('memory.dirty', 4096),
'limit': ('memory.limit', 4096),
'target': ('memory.target', 4096),
'evicted': ('eviction.dropped', 4096),
'pagedout': ('eviction.pagedout', 4096),
'pagedin': ('eviction.pagedin', 4096),
}
def get_default_config_help(self):
config_help = super(VMSDomsCollector, self).get_default_config_help()
config_help.update({
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(VMSDomsCollector, self).get_default_config()
config.update({
'path': 'vms'
})
return config
def collect(self):
if vms is None:
self.log.error('Unable to import vms')
return {}
vms.virt.init()
hypervisor = vms.virt.AUTO.Hypervisor()
# Get list of domains and iterate.
domains = hypervisor.domain_list()
vms_domains = []
count = 0
# Pre filter VMS domains.
for d in domains:
# Skip non-VMS domains.
if not vms.control.exists(d):
continue
# Grab a control connection.
dom = hypervisor.domain_lookup(d)
if dom is None:
continue
ctrl = dom._wait_for_control(wait=False)
if ctrl is None:
continue
try:
# Skip ghost domains.
if ctrl.get('gd.isghost') == '1':
continue
except vms.control.ControlException:
continue
vms_domains.append((dom, ctrl))
count += 1
# Add the number of domains.
self.publish('domains', count)
# For each stat,
for stat in self.PLUGIN_STATS:
key = self.PLUGIN_STATS[stat][0]
scale = self.PLUGIN_STATS[stat][1]
total = 0
# For each domain,
for dom, ctrl in vms_domains:
try:
# Get value and scale.
value = long(ctrl.get(key)) * scale
except vms.control.ControlException:
continue
# Dispatch.
self.publish(stat, value, instance=dom.name())
# Add to total.
total = total + value
# Dispatch total value.
self.publish(stat, total)
|
import logging
import textwrap
import pytest
import py.path # pylint: disable=no-name-in-module
from PyQt5.QtCore import QUrl
from qutebrowser.utils import usertypes
from qutebrowser.browser import greasemonkey
from qutebrowser.misc import objects
test_gm_script = r"""
// ==UserScript==
// @name qutebrowser test userscript
// @namespace invalid.org
// @include http://localhost:*/data/title.html
// @match http://*.trolol.com/*
// @exclude https://badhost.xxx/*
// @run-at document-start
// ==/UserScript==
console.log("Script is running.");
"""
pytestmark = pytest.mark.usefixtures('data_tmpdir')
def _save_script(script_text, filename):
# pylint: disable=no-member
file_path = py.path.local(greasemonkey._scripts_dir()) / filename
# pylint: enable=no-member
file_path.write_text(script_text, encoding='utf-8', ensure=True)
def test_all():
"""Test that a script gets read from file, parsed and returned."""
_save_script(test_gm_script, 'test.user.js')
gm_manager = greasemonkey.GreasemonkeyManager()
assert (gm_manager.all_scripts()[0].name ==
"qutebrowser test userscript")
@pytest.mark.parametrize("url, expected_matches", [
# included
('http://trolol.com/', 1),
# neither included nor excluded
('http://aaaaaaaaaa.com/', 0),
# excluded
('https://badhost.xxx/', 0),
])
def test_get_scripts_by_url(url, expected_matches):
"""Check Greasemonkey include/exclude rules work."""
_save_script(test_gm_script, 'test.user.js')
gm_manager = greasemonkey.GreasemonkeyManager()
scripts = gm_manager.scripts_for(QUrl(url))
assert (len(scripts.start + scripts.end + scripts.idle) ==
expected_matches)
@pytest.mark.parametrize("url, expected_matches", [
# included
('https://github.com/qutebrowser/qutebrowser/', 1),
# neither included nor excluded
('http://aaaaaaaaaa.com/', 0),
# excluded takes priority
('http://github.com/foo', 0),
])
def test_regex_includes_scripts_for(url, expected_matches):
"""Ensure our GM @*clude support supports regular expressions."""
gh_dark_example = textwrap.dedent(r"""
// ==UserScript==
// @include /^https?://((gist|guides|help|raw|status|developer)\.)?github\.com/((?!generated_pages\/preview).)*$/
// @exclude /https?://github\.com/foo/
// @run-at document-start
// ==/UserScript==
""")
_save_script(gh_dark_example, 'test.user.js')
gm_manager = greasemonkey.GreasemonkeyManager()
scripts = gm_manager.scripts_for(QUrl(url))
assert (len(scripts.start + scripts.end + scripts.idle) ==
expected_matches)
def test_no_metadata(caplog):
"""Run on all sites at document-end is the default."""
_save_script("var nothing = true;\n", 'nothing.user.js')
with caplog.at_level(logging.WARNING):
gm_manager = greasemonkey.GreasemonkeyManager()
scripts = gm_manager.scripts_for(QUrl('http://notamatch.invalid/'))
assert len(scripts.start + scripts.end + scripts.idle) == 1
assert len(scripts.end) == 1
def test_no_name():
"""Ensure that GreaseMonkeyScripts must have a name."""
msg = "@name key required or pass filename to init."
with pytest.raises(ValueError, match=msg):
greasemonkey.GreasemonkeyScript([("something", "else")], "")
def test_no_name_with_fallback():
"""Ensure that script's name can fallback to the provided filename."""
script = greasemonkey.GreasemonkeyScript(
[("something", "else")], "", filename=r"C:\COM1")
assert script
assert script.name == r"C:\COM1"
def test_bad_scheme(caplog):
"""qute:// isn't in the list of allowed schemes."""
_save_script("var nothing = true;\n", 'nothing.user.js')
with caplog.at_level(logging.WARNING):
gm_manager = greasemonkey.GreasemonkeyManager()
scripts = gm_manager.scripts_for(QUrl('qute://settings'))
assert len(scripts.start + scripts.end + scripts.idle) == 0
def test_load_emits_signal(qtbot):
gm_manager = greasemonkey.GreasemonkeyManager()
with qtbot.wait_signal(gm_manager.scripts_reloaded):
gm_manager.load_scripts()
def test_utf8_bom():
"""Make sure UTF-8 BOMs are stripped from scripts.
If we don't strip them, we'll have a BOM in the middle of the file, causing
QtWebEngine to not catch the "// ==UserScript==" line.
"""
script = textwrap.dedent("""
\N{BYTE ORDER MARK}// ==UserScript==
// @name qutebrowser test userscript
// ==/UserScript==
""".lstrip('\n'))
_save_script(script, 'bom.user.js')
gm_manager = greasemonkey.GreasemonkeyManager()
scripts = gm_manager.all_scripts()
assert len(scripts) == 1
script = scripts[0]
assert '// ==UserScript==' in script.code().splitlines()
class TestForceDocumentEnd:
def _get_script(self, *, namespace, name):
source = textwrap.dedent("""
// ==UserScript==
// @namespace {}
// @name {}
// ==/UserScript==
""".format(namespace, name))
_save_script(source, 'force.user.js')
gm_manager = greasemonkey.GreasemonkeyManager()
scripts = gm_manager.all_scripts()
assert len(scripts) == 1
return scripts[0]
@pytest.mark.parametrize('namespace, name, force', [
('http://userstyles.org', 'foobar', True),
('https://github.com/ParticleCore', 'Iridium', True),
('https://github.com/ParticleCore', 'Foo', False),
('https://example.org', 'Iridium', False),
])
def test_matching(self, monkeypatch, namespace, name, force):
"""Test matching based on namespace/name."""
monkeypatch.setattr(objects, 'backend', usertypes.Backend.QtWebEngine)
script = self._get_script(namespace=namespace, name=name)
assert script.needs_document_end_workaround() == force
def test_required_scripts_are_included(download_stub, tmpdir):
test_require_script = textwrap.dedent("""
// ==UserScript==
// @name qutebrowser test userscript
// @namespace invalid.org
// @include http://localhost:*/data/title.html
// @match http://trolol*
// @exclude https://badhost.xxx/*
// @run-at document-start
// @require http://localhost/test.js
// ==/UserScript==
console.log("Script is running.");
""")
_save_script(test_require_script, 'requiring.user.js')
(tmpdir / 'test.js').write_text('REQUIRED SCRIPT', encoding='UTF-8')
gm_manager = greasemonkey.GreasemonkeyManager()
assert len(gm_manager._in_progress_dls) == 1
for download in gm_manager._in_progress_dls:
download.finished.emit()
scripts = gm_manager.all_scripts()
assert len(scripts) == 1
assert "REQUIRED SCRIPT" in scripts[0].code()
# Additionally check that the base script is still being parsed correctly
assert "Script is running." in scripts[0].code()
assert scripts[0].excludes
class TestWindowIsolation:
"""Check that greasemonkey scripts get a shadowed global scope."""
@pytest.fixture
def setup(self):
# pylint: disable=attribute-defined-outside-init
class SetupData:
pass
ret = SetupData()
# Change something in the global scope
ret.setup_script = "window.$ = 'global'"
# Greasemonkey script to report back on its scope.
test_script = greasemonkey.GreasemonkeyScript.parse(
textwrap.dedent("""
// ==UserScript==
// @name scopetest
// ==/UserScript==
// Check the thing the page set is set to the expected type
result.push(window.$);
result.push($);
// Now overwrite it
window.$ = 'shadowed';
// And check everything is how the script would expect it to be
// after just writing to the "global" scope
result.push(window.$);
result.push($);
""")
)
# The compiled source of that scripts with some additional setup
# bookending it.
ret.test_script = "\n".join([
"""
const result = [];
""",
test_script.code(),
"""
// Now check that the actual global scope has
// not been overwritten
result.push(window.$);
result.push($);
// And return our findings
result;
"""
])
# What we expect the script to report back.
ret.expected = ["global", "global",
"shadowed", "shadowed",
"global", "global"]
return ret
def test_webengine(self, qtbot, webengineview, setup):
page = webengineview.page()
page.runJavaScript(setup.setup_script)
with qtbot.wait_callback() as callback:
page.runJavaScript(setup.test_script, callback)
callback.assert_called_with(setup.expected)
# The JSCore in 602.1 doesn't fully support Proxy.
@pytest.mark.qtwebkit6021_xfail
def test_webkit(self, webview, setup):
elem = webview.page().mainFrame().documentElement()
elem.evaluateJavaScript(setup.setup_script)
result = elem.evaluateJavaScript(setup.test_script)
assert result == setup.expected
class TestSharedWindowProxy:
"""Check that all scripts have access to the same window proxy."""
@pytest.fixture
def setup(self):
# pylint: disable=attribute-defined-outside-init
class SetupData:
pass
ret = SetupData()
# Greasemonkey script to add a property to the window proxy.
ret.test_script_a = greasemonkey.GreasemonkeyScript.parse(
textwrap.dedent("""
// ==UserScript==
// @name a
// ==/UserScript==
// Set a value from script a
window.$ = 'test';
""")
).code()
# Greasemonkey script to retrieve a property from the window proxy.
ret.test_script_b = greasemonkey.GreasemonkeyScript.parse(
textwrap.dedent("""
// ==UserScript==
// @name b
// ==/UserScript==
// Check that the value is accessible from script b
return [window.$, $];
""")
).code()
# What we expect the script to report back.
ret.expected = ["test", "test"]
return ret
def test_webengine(self, qtbot, webengineview, setup):
page = webengineview.page()
with qtbot.wait_callback() as callback:
page.runJavaScript(setup.test_script_a, callback)
with qtbot.wait_callback() as callback:
page.runJavaScript(setup.test_script_b, callback)
callback.assert_called_with(setup.expected)
def test_webkit(self, webview, setup):
elem = webview.page().mainFrame().documentElement()
elem.evaluateJavaScript(setup.test_script_a)
result = elem.evaluateJavaScript(setup.test_script_b)
assert result == setup.expected
|
import re
from jinja2.exceptions import TemplateSyntaxError
from jinja2.ext import Extension
from jinja2.lexer import count_newlines
from jinja2.lexer import Token
_outside_re = re.compile(r"\\?(gettext|_)\(")
_inside_re = re.compile(r"\\?[()]")
class InlineGettext(Extension):
"""This extension implements support for inline gettext blocks::
<h1>_(Welcome)</h1>
<p>_(This is a paragraph)</p>
Requires the i18n extension to be loaded and configured.
"""
def filter_stream(self, stream):
paren_stack = 0
for token in stream:
if token.type != "data":
yield token
continue
pos = 0
lineno = token.lineno
while 1:
if not paren_stack:
match = _outside_re.search(token.value, pos)
else:
match = _inside_re.search(token.value, pos)
if match is None:
break
new_pos = match.start()
if new_pos > pos:
preval = token.value[pos:new_pos]
yield Token(lineno, "data", preval)
lineno += count_newlines(preval)
gtok = match.group()
if gtok[0] == "\\":
yield Token(lineno, "data", gtok[1:])
elif not paren_stack:
yield Token(lineno, "block_begin", None)
yield Token(lineno, "name", "trans")
yield Token(lineno, "block_end", None)
paren_stack = 1
else:
if gtok == "(" or paren_stack > 1:
yield Token(lineno, "data", gtok)
paren_stack += -1 if gtok == ")" else 1
if not paren_stack:
yield Token(lineno, "block_begin", None)
yield Token(lineno, "name", "endtrans")
yield Token(lineno, "block_end", None)
pos = match.end()
if pos < len(token.value):
yield Token(lineno, "data", token.value[pos:])
if paren_stack:
raise TemplateSyntaxError(
"unclosed gettext expression",
token.lineno,
stream.name,
stream.filename,
)
|
import pytest
from homeassistant.components.mold_indicator.sensor import (
ATTR_CRITICAL_TEMP,
ATTR_DEWPOINT,
)
import homeassistant.components.sensor as sensor
from homeassistant.const import (
ATTR_UNIT_OF_MEASUREMENT,
PERCENTAGE,
STATE_UNKNOWN,
TEMP_CELSIUS,
)
from homeassistant.setup import async_setup_component
@pytest.fixture(autouse=True)
def init_sensors_fixture(hass):
"""Set up things to be run when tests are started."""
hass.states.async_set(
"test.indoortemp", "20", {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}
)
hass.states.async_set(
"test.outdoortemp", "10", {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}
)
hass.states.async_set(
"test.indoorhumidity", "50", {ATTR_UNIT_OF_MEASUREMENT: PERCENTAGE}
)
async def test_setup(hass):
"""Test the mold indicator sensor setup."""
assert await async_setup_component(
hass,
sensor.DOMAIN,
{
"sensor": {
"platform": "mold_indicator",
"indoor_temp_sensor": "test.indoortemp",
"outdoor_temp_sensor": "test.outdoortemp",
"indoor_humidity_sensor": "test.indoorhumidity",
"calibration_factor": 2.0,
}
},
)
await hass.async_block_till_done()
moldind = hass.states.get("sensor.mold_indicator")
assert moldind
assert PERCENTAGE == moldind.attributes.get("unit_of_measurement")
async def test_invalidcalib(hass):
"""Test invalid sensor values."""
hass.states.async_set(
"test.indoortemp", "10", {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}
)
hass.states.async_set(
"test.outdoortemp", "10", {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}
)
hass.states.async_set(
"test.indoorhumidity", "0", {ATTR_UNIT_OF_MEASUREMENT: PERCENTAGE}
)
assert await async_setup_component(
hass,
sensor.DOMAIN,
{
"sensor": {
"platform": "mold_indicator",
"indoor_temp_sensor": "test.indoortemp",
"outdoor_temp_sensor": "test.outdoortemp",
"indoor_humidity_sensor": "test.indoorhumidity",
"calibration_factor": 0,
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
moldind = hass.states.get("sensor.mold_indicator")
assert moldind
assert moldind.state == "unavailable"
assert moldind.attributes.get(ATTR_DEWPOINT) is None
assert moldind.attributes.get(ATTR_CRITICAL_TEMP) is None
async def test_invalidhum(hass):
"""Test invalid sensor values."""
hass.states.async_set(
"test.indoortemp", "10", {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}
)
hass.states.async_set(
"test.outdoortemp", "10", {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}
)
hass.states.async_set(
"test.indoorhumidity", "-1", {ATTR_UNIT_OF_MEASUREMENT: PERCENTAGE}
)
assert await async_setup_component(
hass,
sensor.DOMAIN,
{
"sensor": {
"platform": "mold_indicator",
"indoor_temp_sensor": "test.indoortemp",
"outdoor_temp_sensor": "test.outdoortemp",
"indoor_humidity_sensor": "test.indoorhumidity",
"calibration_factor": 2.0,
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
moldind = hass.states.get("sensor.mold_indicator")
assert moldind
assert moldind.state == "unavailable"
assert moldind.attributes.get(ATTR_DEWPOINT) is None
assert moldind.attributes.get(ATTR_CRITICAL_TEMP) is None
hass.states.async_set(
"test.indoorhumidity", "A", {ATTR_UNIT_OF_MEASUREMENT: PERCENTAGE}
)
await hass.async_block_till_done()
moldind = hass.states.get("sensor.mold_indicator")
assert moldind
assert moldind.state == "unavailable"
assert moldind.attributes.get(ATTR_DEWPOINT) is None
assert moldind.attributes.get(ATTR_CRITICAL_TEMP) is None
hass.states.async_set(
"test.indoorhumidity", "10", {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}
)
await hass.async_block_till_done()
moldind = hass.states.get("sensor.mold_indicator")
assert moldind
assert moldind.state == "unavailable"
assert moldind.attributes.get(ATTR_DEWPOINT) is None
assert moldind.attributes.get(ATTR_CRITICAL_TEMP) is None
async def test_calculation(hass):
"""Test the mold indicator internal calculations."""
assert await async_setup_component(
hass,
sensor.DOMAIN,
{
"sensor": {
"platform": "mold_indicator",
"indoor_temp_sensor": "test.indoortemp",
"outdoor_temp_sensor": "test.outdoortemp",
"indoor_humidity_sensor": "test.indoorhumidity",
"calibration_factor": 2.0,
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
moldind = hass.states.get("sensor.mold_indicator")
assert moldind
# assert dewpoint
dewpoint = moldind.attributes.get(ATTR_DEWPOINT)
assert dewpoint
assert dewpoint > 9.25
assert dewpoint < 9.26
# assert temperature estimation
esttemp = moldind.attributes.get(ATTR_CRITICAL_TEMP)
assert esttemp
assert esttemp > 14.9
assert esttemp < 15.1
# assert mold indicator value
state = moldind.state
assert state
assert state == "68"
async def test_unknown_sensor(hass):
"""Test the sensor_changed function."""
assert await async_setup_component(
hass,
sensor.DOMAIN,
{
"sensor": {
"platform": "mold_indicator",
"indoor_temp_sensor": "test.indoortemp",
"outdoor_temp_sensor": "test.outdoortemp",
"indoor_humidity_sensor": "test.indoorhumidity",
"calibration_factor": 2.0,
}
},
)
await hass.async_block_till_done()
await hass.async_start()
hass.states.async_set(
"test.indoortemp", STATE_UNKNOWN, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}
)
await hass.async_block_till_done()
moldind = hass.states.get("sensor.mold_indicator")
assert moldind
assert moldind.state == "unavailable"
assert moldind.attributes.get(ATTR_DEWPOINT) is None
assert moldind.attributes.get(ATTR_CRITICAL_TEMP) is None
hass.states.async_set(
"test.indoortemp", "30", {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}
)
hass.states.async_set(
"test.outdoortemp", STATE_UNKNOWN, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}
)
await hass.async_block_till_done()
moldind = hass.states.get("sensor.mold_indicator")
assert moldind
assert moldind.state == "unavailable"
assert moldind.attributes.get(ATTR_DEWPOINT) is None
assert moldind.attributes.get(ATTR_CRITICAL_TEMP) is None
hass.states.async_set(
"test.outdoortemp", "25", {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}
)
hass.states.async_set(
"test.indoorhumidity",
STATE_UNKNOWN,
{ATTR_UNIT_OF_MEASUREMENT: PERCENTAGE},
)
await hass.async_block_till_done()
moldind = hass.states.get("sensor.mold_indicator")
assert moldind
assert moldind.state == "unavailable"
assert moldind.attributes.get(ATTR_DEWPOINT) is None
assert moldind.attributes.get(ATTR_CRITICAL_TEMP) is None
hass.states.async_set(
"test.indoorhumidity", "20", {ATTR_UNIT_OF_MEASUREMENT: PERCENTAGE}
)
await hass.async_block_till_done()
moldind = hass.states.get("sensor.mold_indicator")
assert moldind
assert moldind.state == "23"
dewpoint = moldind.attributes.get(ATTR_DEWPOINT)
assert dewpoint
assert dewpoint > 4.58
assert dewpoint < 4.59
esttemp = moldind.attributes.get(ATTR_CRITICAL_TEMP)
assert esttemp
assert esttemp == 27.5
async def test_sensor_changed(hass):
"""Test the sensor_changed function."""
assert await async_setup_component(
hass,
sensor.DOMAIN,
{
"sensor": {
"platform": "mold_indicator",
"indoor_temp_sensor": "test.indoortemp",
"outdoor_temp_sensor": "test.outdoortemp",
"indoor_humidity_sensor": "test.indoorhumidity",
"calibration_factor": 2.0,
}
},
)
await hass.async_block_till_done()
await hass.async_start()
hass.states.async_set(
"test.indoortemp", "30", {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}
)
await hass.async_block_till_done()
assert hass.states.get("sensor.mold_indicator").state == "90"
hass.states.async_set(
"test.outdoortemp", "25", {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}
)
await hass.async_block_till_done()
assert hass.states.get("sensor.mold_indicator").state == "57"
hass.states.async_set(
"test.indoorhumidity", "20", {ATTR_UNIT_OF_MEASUREMENT: PERCENTAGE}
)
await hass.async_block_till_done()
assert hass.states.get("sensor.mold_indicator").state == "23"
|
import logging
from kalliope.core.Lifo.LIFOBuffer import LIFOBuffer
from six import with_metaclass
from kalliope.core.Models import Singleton
logging.basicConfig()
logger = logging.getLogger("kalliope")
class LifoManager(with_metaclass(Singleton, object)):
lifo_buffer = None
@classmethod
def get_singleton_lifo(cls):
if cls.lifo_buffer is None:
cls._new_lifo_buffer()
return cls.lifo_buffer
@classmethod
def get_new_lifo(cls):
"""
This class is used to manage hooks "on_start_speaking" and "on_stop_speaking".
:return:
"""
return LIFOBuffer()
@classmethod
def clean_saved_lifo(cls):
cls.lifo_buffer = LIFOBuffer()
@classmethod
def _new_lifo_buffer(cls):
cls.lifo_buffer = LIFOBuffer()
|
from django.db import models
from django.template.loader import select_template
from django.utils.translation import gettext_lazy as _
from shop import deferred
from shop.conf import app_settings
class AddressManager(models.Manager):
def get_max_priority(self, customer):
aggr = self.get_queryset().filter(customer=customer).aggregate(models.Max('priority'))
priority = aggr['priority__max'] or 0
return priority
def get_fallback(self, customer):
"""
Return a fallback address, whenever the customer has not declared one.
"""
return self.get_queryset().filter(customer=customer).order_by('priority').last()
class BaseAddress(models.Model):
customer = deferred.ForeignKey(
'BaseCustomer',
on_delete=models.CASCADE,
)
priority = models.SmallIntegerField(
default=0,
db_index=True,
help_text=_("Priority for using this address"),
)
class Meta:
abstract = True
objects = AddressManager()
def as_text(self):
"""
Return the address as plain text to be used for printing, etc.
"""
template_names = [
'{}/{}-address.txt'.format(app_settings.APP_LABEL, self.address_type),
'{}/address.txt'.format(app_settings.APP_LABEL),
'shop/address.txt',
]
template = select_template(template_names)
return template.render({'address': self})
class BaseShippingAddress(BaseAddress, metaclass=deferred.ForeignKeyBuilder):
address_type = 'shipping'
class Meta:
abstract = True
ShippingAddressModel = deferred.MaterializedModel(BaseShippingAddress)
class BaseBillingAddress(BaseAddress, metaclass=deferred.ForeignKeyBuilder):
address_type = 'billing'
class Meta:
abstract = True
BillingAddressModel = deferred.MaterializedModel(BaseBillingAddress)
ISO_3166_CODES = [
('AF', _("Afghanistan")),
('AX', _("Aland Islands")),
('AL', _("Albania")),
('DZ', _("Algeria")),
('AS', _("American Samoa")),
('AD', _("Andorra")),
('AO', _("Angola")),
('AI', _("Anguilla")),
('AQ', _("Antarctica")),
('AG', _("Antigua And Barbuda")),
('AR', _("Argentina")),
('AM', _("Armenia")),
('AW', _("Aruba")),
('AU', _("Australia")),
('AT', _("Austria")),
('AZ', _("Azerbaijan")),
('BS', _("Bahamas")),
('BH', _("Bahrain")),
('BD', _("Bangladesh")),
('BB', _("Barbados")),
('BY', _("Belarus")),
('BE', _("Belgium")),
('BZ', _("Belize")),
('BJ', _("Benin")),
('BM', _("Bermuda")),
('BT', _("Bhutan")),
('BO', _("Bolivia, Plurinational State Of")),
('BQ', _("Bonaire, Saint Eustatius And Saba")),
('BA', _("Bosnia And Herzegovina")),
('BW', _("Botswana")),
('BV', _("Bouvet Island")),
('BR', _("Brazil")),
('IO', _("British Indian Ocean Territory")),
('BN', _("Brunei Darussalam")),
('BG', _("Bulgaria")),
('BF', _("Burkina Faso")),
('BI', _("Burundi")),
('KH', _("Cambodia")),
('CM', _("Cameroon")),
('CA', _("Canada")),
('CV', _("Cape Verde")),
('KY', _("Cayman Islands")),
('CF', _("Central African Republic")),
('TD', _("Chad")),
('CL', _("Chile")),
('CN', _("China")),
('CX', _("Christmas Island")),
('CC', _("Cocos (Keeling) Islands")),
('CO', _("Colombia")),
('KM', _("Comoros")),
('CG', _("Congo")),
('CD', _("Congo, The Democratic Republic Of The")),
('CK', _("Cook Islands")),
('CR', _("Costa Rica")),
('HR', _("Croatia")),
('CU', _("Cuba")),
('CW', _("Curacao")),
('CY', _("Cyprus")),
('CZ', _("Czech Republic")),
('DK', _("Denmark")),
('DJ', _("Djibouti")),
('DM', _("Dominica")),
('DO', _("Dominican Republic")),
('EC', _("Ecuador")),
('EG', _("Egypt")),
('SV', _("El Salvador")),
('GQ', _("Equatorial Guinea")),
('ER', _("Eritrea")),
('EE', _("Estonia")),
('ET', _("Ethiopia")),
('FK', _("Falkland Islands (Malvinas)")),
('FO', _("Faroe Islands")),
('FJ', _("Fiji")),
('FI', _("Finland")),
('FR', _("France")),
('GF', _("French Guiana")),
('PF', _("French Polynesia")),
('TF', _("French Southern Territories")),
('GA', _("Gabon")),
('GM', _("Gambia")),
('DE', _("Germany")),
('GH', _("Ghana")),
('GI', _("Gibraltar")),
('GR', _("Greece")),
('GL', _("Greenland")),
('GD', _("Grenada")),
('GP', _("Guadeloupe")),
('GU', _("Guam")),
('GT', _("Guatemala")),
('GG', _("Guernsey")),
('GN', _("Guinea")),
('GW', _("Guinea-Bissau")),
('GY', _("Guyana")),
('HT', _("Haiti")),
('HM', _("Heard Island and McDonald Islands")),
('VA', _("Holy See (Vatican City State)")),
('HN', _("Honduras")),
('HK', _("Hong Kong")),
('HU', _("Hungary")),
('IS', _("Iceland")),
('IN', _("India")),
('ID', _("Indonesia")),
('IR', _("Iran, Islamic Republic Of")),
('IQ', _("Iraq")),
('IE', _("Ireland")),
('IL', _("Israel")),
('IT', _("Italy")),
('CI', _("Ivory Coast")),
('JM', _("Jamaica")),
('JP', _("Japan")),
('JE', _("Jersey")),
('JO', _("Jordan")),
('KZ', _("Kazakhstan")),
('KE', _("Kenya")),
('KI', _("Kiribati")),
('KP', _("Korea, Democratic People's Republic Of")),
('KR', _("Korea, Republic Of")),
('KS', _("Kosovo")),
('KW', _("Kuwait")),
('KG', _("Kyrgyzstan")),
('LA', _("Lao People's Democratic Republic")),
('LV', _("Latvia")),
('LB', _("Lebanon")),
('LS', _("Lesotho")),
('LR', _("Liberia")),
('LY', _("Libyan Arab Jamahiriya")),
('LI', _("Liechtenstein")),
('LT', _("Lithuania")),
('LU', _("Luxembourg")),
('MO', _("Macao")),
('MK', _("Macedonia")),
('MG', _("Madagascar")),
('MW', _("Malawi")),
('MY', _("Malaysia")),
('MV', _("Maldives")),
('ML', _("Mali")),
('ML', _("Malta")),
('MH', _("Marshall Islands")),
('MQ', _("Martinique")),
('MR', _("Mauritania")),
('MU', _("Mauritius")),
('YT', _("Mayotte")),
('MX', _("Mexico")),
('FM', _("Micronesia")),
('MD', _("Moldova")),
('MC', _("Monaco")),
('MN', _("Mongolia")),
('ME', _("Montenegro")),
('MS', _("Montserrat")),
('MA', _("Morocco")),
('MZ', _("Mozambique")),
('MM', _("Myanmar")),
('NA', _("Namibia")),
('NR', _("Nauru")),
('NP', _("Nepal")),
('NL', _("Netherlands")),
('AN', _("Netherlands Antilles")),
('NC', _("New Caledonia")),
('NZ', _("New Zealand")),
('NI', _("Nicaragua")),
('NE', _("Niger")),
('NG', _("Nigeria")),
('NU', _("Niue")),
('NF', _("Norfolk Island")),
('MP', _("Northern Mariana Islands")),
('NO', _("Norway")),
('OM', _("Oman")),
('PK', _("Pakistan")),
('PW', _("Palau")),
('PS', _("Palestinian Territory, Occupied")),
('PA', _("Panama")),
('PG', _("Papua New Guinea")),
('PY', _("Paraguay")),
('PE', _("Peru")),
('PH', _("Philippines")),
('PN', _("Pitcairn")),
('PL', _("Poland")),
('PT', _("Portugal")),
('PR', _("Puerto Rico")),
('QA', _("Qatar")),
('RE', _("Reunion")),
('RO', _("Romania")),
('RU', _("Russian Federation")),
('RW', _("Rwanda")),
('BL', _("Saint Barthelemy")),
('SH', _("Saint Helena, Ascension & Tristan Da Cunha")),
('KN', _("Saint Kitts and Nevis")),
('LC', _("Saint Lucia")),
('MF', _("Saint Martin (French Part)")),
('PM', _("Saint Pierre and Miquelon")),
('VC', _("Saint Vincent And The Grenadines")),
('WS', _("Samoa")),
('SM', _("San Marino")),
('ST', _("Sao Tome And Principe")),
('SA', _("Saudi Arabia")),
('SN', _("Senegal")),
('RS', _("Serbia")),
('SC', _("Seychelles")),
('SL', _("Sierra Leone")),
('SG', _("Singapore")),
('SX', _("Sint Maarten (Dutch Part)")),
('SK', _("Slovakia")),
('SI', _("Slovenia")),
('SB', _("Solomon Islands")),
('SO', _("Somalia")),
('ZA', _("South Africa")),
('GS', _("South Georgia And The South Sandwich Islands")),
('ES', _("Spain")),
('LK', _("Sri Lanka")),
('SD', _("Sudan")),
('SR', _("Suriname")),
('SJ', _("Svalbard And Jan Mayen")),
('SZ', _("Swaziland")),
('SE', _("Sweden")),
('CH', _("Switzerland")),
('SY', _("Syrian Arab Republic")),
('TW', _("Taiwan")),
('TJ', _("Tajikistan")),
('TZ', _("Tanzania")),
('TH', _("Thailand")),
('TL', _("Timor-Leste")),
('TG', _("Togo")),
('TK', _("Tokelau")),
('TO', _("Tonga")),
('TT', _("Trinidad and Tobago")),
('TN', _("Tunisia")),
('TR', _("Turkey")),
('TM', _("Turkmenistan")),
('TC', _("Turks And Caicos Islands")),
('TV', _("Tuvalu")),
('UG', _("Uganda")),
('UA', _("Ukraine")),
('AE', _("United Arab Emirates")),
('GB', _("United Kingdom")),
('US', _("United States")),
('UM', _("United States Minor Outlying Islands")),
('UY', _("Uruguay")),
('UZ', _("Uzbekistan")),
('VU', _("Vanuatu")),
('VE', _("Venezuela, Bolivarian Republic Of")),
('VN', _("Viet Nam")),
('VG', _("Virgin Islands, British")),
('VI', _("Virgin Islands, U.S.")),
('WF', _("Wallis and Futuna")),
('EH', _("Western Sahara")),
('YE', _("Yemen")),
('ZM', _("Zambia")),
('ZW', _("Zimbabwe")),
]
class CountryField(models.CharField):
"""
This creates a simple input field to choose a country.
"""
def __init__(self, *args, **kwargs):
defaults = {
'max_length': 3,
'choices': ISO_3166_CODES,
}
defaults.update(kwargs)
super().__init__(*args, **defaults)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if kwargs['max_length'] == 3:
kwargs.pop('max_length')
if kwargs['choices'] == ISO_3166_CODES:
kwargs.pop('choices')
return name, path, args, kwargs
|
import pytest
import voluptuous as vol
from homeassistant.components import water_heater
from homeassistant.setup import async_setup_component
from homeassistant.util.unit_system import IMPERIAL_SYSTEM
from tests.components.water_heater import common
ENTITY_WATER_HEATER = "water_heater.demo_water_heater"
ENTITY_WATER_HEATER_CELSIUS = "water_heater.demo_water_heater_celsius"
@pytest.fixture(autouse=True)
async def setup_comp(hass):
"""Set up demo component."""
hass.config.units = IMPERIAL_SYSTEM
assert await async_setup_component(
hass, water_heater.DOMAIN, {"water_heater": {"platform": "demo"}}
)
await hass.async_block_till_done()
async def test_setup_params(hass):
"""Test the initial parameters."""
state = hass.states.get(ENTITY_WATER_HEATER)
assert state.attributes.get("temperature") == 119
assert state.attributes.get("away_mode") == "off"
assert state.attributes.get("operation_mode") == "eco"
async def test_default_setup_params(hass):
"""Test the setup with default parameters."""
state = hass.states.get(ENTITY_WATER_HEATER)
assert state.attributes.get("min_temp") == 110
assert state.attributes.get("max_temp") == 140
async def test_set_only_target_temp_bad_attr(hass):
"""Test setting the target temperature without required attribute."""
state = hass.states.get(ENTITY_WATER_HEATER)
assert state.attributes.get("temperature") == 119
with pytest.raises(vol.Invalid):
await common.async_set_temperature(hass, None, ENTITY_WATER_HEATER)
assert state.attributes.get("temperature") == 119
async def test_set_only_target_temp(hass):
"""Test the setting of the target temperature."""
state = hass.states.get(ENTITY_WATER_HEATER)
assert state.attributes.get("temperature") == 119
await common.async_set_temperature(hass, 110, ENTITY_WATER_HEATER)
state = hass.states.get(ENTITY_WATER_HEATER)
assert state.attributes.get("temperature") == 110
async def test_set_operation_bad_attr_and_state(hass):
"""Test setting operation mode without required attribute.
Also check the state.
"""
state = hass.states.get(ENTITY_WATER_HEATER)
assert state.attributes.get("operation_mode") == "eco"
assert state.state == "eco"
with pytest.raises(vol.Invalid):
await common.async_set_operation_mode(hass, None, ENTITY_WATER_HEATER)
state = hass.states.get(ENTITY_WATER_HEATER)
assert state.attributes.get("operation_mode") == "eco"
assert state.state == "eco"
async def test_set_operation(hass):
"""Test setting of new operation mode."""
state = hass.states.get(ENTITY_WATER_HEATER)
assert state.attributes.get("operation_mode") == "eco"
assert state.state == "eco"
await common.async_set_operation_mode(hass, "electric", ENTITY_WATER_HEATER)
state = hass.states.get(ENTITY_WATER_HEATER)
assert state.attributes.get("operation_mode") == "electric"
assert state.state == "electric"
async def test_set_away_mode_bad_attr(hass):
"""Test setting the away mode without required attribute."""
state = hass.states.get(ENTITY_WATER_HEATER)
assert state.attributes.get("away_mode") == "off"
with pytest.raises(vol.Invalid):
await common.async_set_away_mode(hass, None, ENTITY_WATER_HEATER)
assert state.attributes.get("away_mode") == "off"
async def test_set_away_mode_on(hass):
"""Test setting the away mode on/true."""
await common.async_set_away_mode(hass, True, ENTITY_WATER_HEATER)
state = hass.states.get(ENTITY_WATER_HEATER)
assert state.attributes.get("away_mode") == "on"
async def test_set_away_mode_off(hass):
"""Test setting the away mode off/false."""
await common.async_set_away_mode(hass, False, ENTITY_WATER_HEATER_CELSIUS)
state = hass.states.get(ENTITY_WATER_HEATER_CELSIUS)
assert state.attributes.get("away_mode") == "off"
async def test_set_only_target_temp_with_convert(hass):
"""Test the setting of the target temperature."""
state = hass.states.get(ENTITY_WATER_HEATER_CELSIUS)
assert state.attributes.get("temperature") == 113
await common.async_set_temperature(hass, 114, ENTITY_WATER_HEATER_CELSIUS)
state = hass.states.get(ENTITY_WATER_HEATER_CELSIUS)
assert state.attributes.get("temperature") == 114
|
from __future__ import print_function
import codecs
import os
try:
from cStringIO import StringIO
python2 = True
except ImportError:
from io import StringIO
python2 = False
import re
import xml.etree.ElementTree as ET
from functools import reduce
from xml.dom import Node as DomNode
from xml.dom.minidom import parseString
import rospkg
pattern = r'[^\x09\x0A\x0D\x20-\x7E\x85\xA0-\xFF\u0100-\uD7FF\uE000-\uFDCF\uFDE0-\uFFFD]'
if python2:
pattern = pattern.decode('unicode_escape')
else:
pattern = codecs.decode(pattern, 'unicode_escape')
invalid_chars = re.compile(pattern)
def invalid_char_replacer(m):
return '&#x' + ('%04X' % ord(m.group(0))) + ';'
def filter_nonprintable_text(text):
return re.sub(invalid_chars, invalid_char_replacer, text)
def cdata(cdata_text):
return '<![CDATA[\n{}\n]]>'.format(cdata_text)
class TestInfo(object):
"""
Common container for 'error' and 'failure' results
"""
def __init__(self, type_, text):
"""
@param type_: type attribute from xml
@type type_: str
@param text: text property from xml
@type text: str
"""
self.type = type_
self.text = text
class TestError(TestInfo):
"""
'error' result container
"""
def xml(self):
"""
@return XML tag representing the object, with non-XML text filtered out
@rtype: str
"""
return ET.tostring(self.xml_element(), encoding='utf-8', method='xml')
def xml_element(self):
"""
@return XML tag representing the object, with non-XML text filtered out
@rtype: xml.etree.ElementTree.Element
"""
error = ET.Element('error')
error.set('type', self.type)
error.text = cdata(filter_nonprintable_text(self.text))
return error
class TestFailure(TestInfo):
"""
'failure' result container
"""
def xml(self):
"""
@return XML tag representing the object, with non-XML text filtered out
@rtype: str
"""
return ET.tostring(self.xml_element(), encoding='utf-8', method='xml')
def xml_element(self):
"""
@return XML tag representing the object, with non-XML text filtered out
@rtype: xml.etree.ElementTree.Element
"""
error = ET.Element('failure')
error.set('type', self.type)
error.text = cdata(filter_nonprintable_text(self.text))
return error
class TestCaseResult(object):
"""
'testcase' result container
"""
def __init__(self, name):
"""
@param name: name of testcase
@type name: str
"""
self.name = name
self.failures = []
self.errors = []
self.time = 0.0
self.classname = ''
def _passed(self):
"""
@return: True if test passed
@rtype: bool
"""
return not self.errors and not self.failures
# bool: True if test passed without errors or failures
passed = property(_passed)
def _failure_description(self):
"""
@return: description of testcase failure
@rtype: str
"""
if self.failures:
tmpl = '[%s][FAILURE]' % self.name
tmpl = tmpl + '-'*(80 - len(tmpl))
tmpl = tmpl+'\n%s\n' + '-' * 80 + '\n\n'
return '\n'.join(tmpl % x.text for x in self.failures)
return ''
def _error_description(self):
"""
@return: description of testcase error
@rtype: str
"""
if self.errors:
tmpl = '[%s][ERROR]' % self.name
tmpl = tmpl + '-' * (80 - len(tmpl))
tmpl = tmpl+'\n%s\n' + '-' * 80 + '\n\n'
return '\n'.join(tmpl % x.text for x in self.errors)
return ''
def _description(self):
"""
@return: description of testcase result
@rtype: str
"""
if self.passed:
return '[%s][passed]\n' % self.name
else:
return self._failure_description() + \
self._error_description()
# str: printable description of testcase result
description = property(_description)
def add_failure(self, failure):
"""
@param failure TestFailure
"""
self.failures.append(failure)
def add_error(self, error):
"""
@param failure TestError
"""
self.errors.append(error)
def xml(self):
"""
@return XML tag representing the object, with non-XML text filtered out
@rtype: str
"""
return ET.tostring(self.xml_element(), encoding='utf-8', method='xml')
def xml_element(self):
"""
@return XML tag representing the object, with non-XML text filtered out
@rtype: xml.etree.ElementTree.Element
"""
testcase = ET.Element('testcase')
testcase.set('classname', self.classname)
testcase.set('name', self.name)
testcase.set('time', str(self.time))
for f in self.failures:
testcase.append(f.xml_element())
for e in self.errors:
testcase.append(e.xml_element())
return testcase
class Result(object):
__slots__ = ['name', 'num_errors', 'num_failures', 'num_tests',
'test_case_results', 'system_out', 'system_err', 'time']
def __init__(self, name, num_errors=0, num_failures=0, num_tests=0):
self.name = name
self.num_errors = num_errors
self.num_failures = num_failures
self.num_tests = num_tests
self.test_case_results = []
self.system_out = ''
self.system_err = ''
self.time = 0.0
def accumulate(self, r):
"""
Add results from r to this result
@param r: results to aggregate with this result
@type r: Result
"""
self.num_errors += r.num_errors
self.num_failures += r.num_failures
self.num_tests += r.num_tests
self.time += r.time
self.test_case_results.extend(r.test_case_results)
if r.system_out:
self.system_out += '\n'+r.system_out
if r.system_err:
self.system_err += '\n'+r.system_err
def add_test_case_result(self, r):
"""
Add results from a testcase to this result container
@param r: TestCaseResult
@type r: TestCaseResult
"""
self.test_case_results.append(r)
def xml_element(self):
"""
@return: document as unicode (UTF-8 declared) XML according to Ant JUnit spec
"""
testsuite = ET.Element('testsuite')
testsuite.set('tests', str(self.num_tests))
testsuite.set('failures', str(self.num_failures))
testsuite.set('time', str(self.time))
testsuite.set('errors', str(self.num_errors))
testsuite.set('name', self.name)
for tc in self.test_case_results:
tc.xml(testsuite)
system_out = ET.SubElement(testsuite, 'system-out')
system_out.text = cdata(filter_nonprintable_text(self.system_out))
system_err = ET.SubElement(testsuite, 'system-err')
system_err.text = cdata(filter_nonprintable_text(self.system_err))
return ET.tostring(testsuite, encoding='utf-8', method='xml')
def _text(tag):
return reduce(lambda x, y: x + y, [c.data for c in tag.childNodes if c.nodeType in [DomNode.TEXT_NODE, DomNode.CDATA_SECTION_NODE]], '').strip()
def _load_suite_results(test_suite_name, test_suite, result):
nodes = [n for n in test_suite.childNodes
if n.nodeType == DomNode.ELEMENT_NODE]
for node in nodes:
name = node.tagName
if name == 'testsuite':
# for now we flatten this hierarchy
_load_suite_results(test_suite_name, node, result)
elif name == 'system-out':
if _text(node):
system_out = '[%s] stdout' % test_suite_name + '-' * (71 - len(test_suite_name))
system_out += '\n'+_text(node)
result.system_out += system_out
elif name == 'system-err':
if _text(node):
system_err = '[%s] stderr' % test_suite_name + '-' * (71 - len(test_suite_name))
system_err += '\n'+_text(node)
result.system_err += system_err
elif name == 'testcase':
name = node.getAttribute('name') or 'unknown'
classname = node.getAttribute('classname') or 'unknown'
# mangle the classname for some sense of uniformity
# between rostest/unittest/gtest
if '__main__.' in classname:
classname = classname[classname.find('__main__.') + 9:]
if classname == 'rostest.rostest.RosTest':
classname = 'rostest'
elif not classname.startswith(result.name):
classname = '%s.%s' % (result.name, classname)
time = float(node.getAttribute('time')) or 0.0
tc_result = TestCaseResult('%s/%s' % (test_suite_name, name))
tc_result.classname = classname
tc_result.time = time
result.add_test_case_result(tc_result)
for d in [n for n in node.childNodes
if n.nodeType == DomNode.ELEMENT_NODE]:
# convert 'message' attributes to text elements to keep
# python unittest and gtest consistent
if d.tagName == 'failure':
message = d.getAttribute('message') or ''
text = _text(d) or message
x = TestFailure(d.getAttribute('type') or '', text)
tc_result.add_failure(x)
elif d.tagName == 'error':
message = d.getAttribute('message') or ''
text = _text(d) or message
x = TestError(d.getAttribute('type') or '', text)
tc_result.add_error(x)
# #603: unit test suites are not good about screening out illegal
# unicode characters. This little recipe I from http://boodebr.org/main/python/all-about-python-and-unicode#UNI_XML
# screens these out
try:
char = unichr
except NameError:
char = chr
RE_XML_ILLEGAL = '([%s-%s%s-%s%s-%s%s-%s])' + \
'|' + \
'([%s-%s][^%s-%s])|([^%s-%s][%s-%s])|([%s-%s]$)|(^[%s-%s])'
try:
RE_XML_ILLEGAL = unicode(RE_XML_ILLEGAL)
except NameError:
pass
RE_XML_ILLEGAL = RE_XML_ILLEGAL % \
(char(0x0000), char(0x0008), char(0x000b), char(0x000c),
char(0x000e), char(0x001f), char(0xfffe), char(0xffff),
char(0xd800), char(0xdbff), char(0xdc00), char(0xdfff),
char(0xd800), char(0xdbff), char(0xdc00), char(0xdfff),
char(0xd800), char(0xdbff), char(0xdc00), char(0xdfff))
_safe_xml_regex = re.compile(RE_XML_ILLEGAL)
def _read_file_safe_xml(test_file, write_back_sanitized=True):
"""
read in file, screen out unsafe unicode characters
"""
f = None
try:
# this is ugly, but the files in question that are problematic
# do not declare unicode type.
if not os.path.isfile(test_file):
raise Exception('test file does not exist')
try:
f = codecs.open(test_file, 'r', 'utf-8')
x = f.read()
except Exception:
if f is not None:
f.close()
f = codecs.open(test_file, 'r', 'iso8859-1')
x = f.read()
for match in _safe_xml_regex.finditer(x):
x = x[:match.start()] + '?' + x[match.end():]
x = x.encode('utf-8')
if write_back_sanitized:
with open(test_file, 'wb') as h:
h.write(x)
return x
finally:
if f is not None:
f.close()
def read(test_file, test_name):
"""
Read in the test_result file
@param test_file: test file path
@type test_file: str
@param test_name: name of test
@type test_name: str
@return: test results
@rtype: Result
"""
try:
xml_str = _read_file_safe_xml(test_file)
if not xml_str.strip():
print('WARN: test result file is empty [%s]' % (test_file))
return Result(test_name, 0, 0, 0)
test_suites = parseString(xml_str).getElementsByTagName('testsuite')
except Exception as e:
print('WARN: cannot read test result file [%s]: %s' % (test_file, str(e)))
return Result(test_name, 0, 0, 0)
if not test_suites:
print('WARN: test result file [%s] contains no results' % (test_file))
return Result(test_name, 0, 0, 0)
results = Result(test_name, 0, 0, 0)
for index, test_suite in enumerate(test_suites):
# skip test suites which are already covered by a parent test suite
if index > 0 and test_suite.parentNode in test_suites[0:index]:
continue
# test_suite = test_suite[0]
vals = [test_suite.getAttribute(attr) for attr in ['errors', 'failures', 'tests']]
vals = [v or 0 for v in vals]
err, fail, tests = [int(val) for val in vals]
result = Result(test_name, err, fail, tests)
result.time = 0.0 if not len(test_suite.getAttribute('time')) else float(test_suite.getAttribute('time'))
# Create a prefix based on the test result filename. The idea is to
# disambiguate the case when tests of the same name are provided in
# different .xml files. We use the name of the parent directory
test_file_base = os.path.basename(os.path.dirname(os.path.abspath(test_file)))
fname = os.path.basename(test_file)
if fname.startswith('TEST-'):
fname = fname[5:]
if fname.endswith('.xml'):
fname = fname[:-4]
test_file_base = '%s.%s' % (test_file_base, fname)
_load_suite_results(test_file_base, test_suite, result)
results.accumulate(result)
return results
def read_all(filter_=[]):
"""
Read in the test_results and aggregate into a single Result object
@param filter_: list of packages that should be processed
@type filter_: [str]
@return: aggregated result
@rtype: L{Result}
"""
dir_ = rospkg.get_test_results_dir()
root_result = Result('ros', 0, 0, 0)
if not os.path.exists(dir_):
return root_result
for d in os.listdir(dir_):
if filter_ and d not in filter_:
continue
subdir = os.path.join(dir_, d)
if os.path.isdir(subdir):
for filename in os.listdir(subdir):
if filename.endswith('.xml'):
filename = os.path.join(subdir, filename)
result = read(filename, os.path.basename(subdir))
root_result.accumulate(result)
return root_result
def test_failure_junit_xml(test_name, message, stdout=None, class_name='Results', testcase_name='test_ran'):
"""
Generate JUnit XML file for a unary test suite where the test failed
@param test_name: Name of test that failed
@type test_name: str
@param message: failure message
@type message: str
@param stdout: stdout data to include in report
@type stdout: str
"""
testsuite = ET.Element('testsuite')
testsuite.set('tests', '1')
testsuite.set('failures', '1')
testsuite.set('time', '1')
testsuite.set('errors', '0')
testsuite.set('name', test_name)
testcase = ET.SubElement(testsuite, 'testcase')
testcase.set('name', testcase_name)
testcase.set('status', 'run')
testcase.set('time', '1')
testcase.set('classname', class_name)
failure = ET.SubElement(testcase, 'failure')
failure.set('message', message)
failure.set('type', '')
if stdout:
system_out = ET.SubElement(testsuite, 'system-out')
system_out.text = cdata(filter_nonprintable_text(stdout))
return ET.tostring(testsuite, encoding='utf-8', method='xml')
def test_success_junit_xml(test_name, class_name='Results', testcase_name='test_ran'):
"""
Generate JUnit XML file for a unary test suite where the test succeeded.
@param test_name: Name of test that passed
@type test_name: str
"""
testsuite = ET.Element('testsuite')
testsuite.set('tests', '1')
testsuite.set('failures', '0')
testsuite.set('time', '1')
testsuite.set('errors', '0')
testsuite.set('name', test_name)
testcase = ET.SubElement(testsuite, 'testcase')
testcase.set('name', testcase_name)
testcase.set('status', 'run')
testcase.set('time', '1')
testcase.set('classname', class_name)
return ET.tostring(testsuite, encoding='utf-8', method='xml')
def print_summary(junit_results, runner_name='ROSUNIT'):
"""
Print summary of junitxml results to stdout.
"""
# we have two separate result objects, which can be a bit
# confusing. 'result' counts successful _running_ of tests
# (i.e. doesn't check for actual test success). The 'r' result
# object contains results of the actual tests.
buff = StringIO()
buff.write('[%s]' % runner_name + '-' * 71 + '\n\n')
for tc_result in junit_results.test_case_results:
buff.write(tc_result.description)
buff.write('\nSUMMARY\n')
if (junit_results.num_errors + junit_results.num_failures) == 0:
buff.write('\033[32m * RESULT: SUCCESS\033[0m\n')
else:
buff.write('\033[1;31m * RESULT: FAIL\033[0m\n')
# TODO: still some issues with the numbers adding up if tests fail to launch
# number of errors from the inner tests, plus add in count for tests
# that didn't run properly ('result' object).
buff.write(' * TESTS: %s\n' % junit_results.num_tests)
num_errors = junit_results.num_errors
if num_errors:
buff.write('\033[1;31m * ERRORS: %s\033[0m\n' % num_errors)
else:
buff.write(' * ERRORS: 0\n')
num_failures = junit_results.num_failures
if num_failures:
buff.write('\033[1;31m * FAILURES: %s\033[0m\n' % num_failures)
else:
buff.write(' * FAILURES: 0\n')
print(buff.getvalue())
|
import logging
import queue
import threading
import time
from ibmiotf import MissingMessageEncoderException
from ibmiotf.gateway import Client
import voluptuous as vol
from homeassistant.const import (
CONF_DOMAINS,
CONF_ENTITIES,
CONF_EXCLUDE,
CONF_ID,
CONF_INCLUDE,
CONF_TOKEN,
CONF_TYPE,
EVENT_HOMEASSISTANT_STOP,
EVENT_STATE_CHANGED,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
)
from homeassistant.core import callback
from homeassistant.helpers import state as state_helper
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
CONF_ORG = "organization"
DOMAIN = "watson_iot"
MAX_TRIES = 3
RETRY_DELAY = 20
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.All(
vol.Schema(
{
vol.Required(CONF_ORG): cv.string,
vol.Required(CONF_TYPE): cv.string,
vol.Required(CONF_ID): cv.string,
vol.Required(CONF_TOKEN): cv.string,
vol.Optional(CONF_EXCLUDE, default={}): vol.Schema(
{
vol.Optional(CONF_ENTITIES, default=[]): cv.entity_ids,
vol.Optional(CONF_DOMAINS, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
}
),
vol.Optional(CONF_INCLUDE, default={}): vol.Schema(
{
vol.Optional(CONF_ENTITIES, default=[]): cv.entity_ids,
vol.Optional(CONF_DOMAINS, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
}
),
}
)
)
},
extra=vol.ALLOW_EXTRA,
)
def setup(hass, config):
"""Set up the Watson IoT Platform component."""
conf = config[DOMAIN]
include = conf[CONF_INCLUDE]
exclude = conf[CONF_EXCLUDE]
include_e = set(include[CONF_ENTITIES])
include_d = set(include[CONF_DOMAINS])
exclude_e = set(exclude[CONF_ENTITIES])
exclude_d = set(exclude[CONF_DOMAINS])
client_args = {
"org": conf[CONF_ORG],
"type": conf[CONF_TYPE],
"id": conf[CONF_ID],
"auth-method": "token",
"auth-token": conf[CONF_TOKEN],
}
watson_gateway = Client(client_args)
def event_to_json(event):
"""Add an event to the outgoing list."""
state = event.data.get("new_state")
if (
state is None
or state.state in (STATE_UNKNOWN, "", STATE_UNAVAILABLE)
or state.entity_id in exclude_e
or state.domain in exclude_d
):
return
if (include_e and state.entity_id not in include_e) or (
include_d and state.domain not in include_d
):
return
try:
_state_as_value = float(state.state)
except ValueError:
_state_as_value = None
if _state_as_value is None:
try:
_state_as_value = float(state_helper.state_as_number(state))
except ValueError:
_state_as_value = None
out_event = {
"tags": {"domain": state.domain, "entity_id": state.object_id},
"time": event.time_fired.isoformat(),
"fields": {"state": state.state},
}
if _state_as_value is not None:
out_event["fields"]["state_value"] = _state_as_value
for key, value in state.attributes.items():
if key != "unit_of_measurement":
# If the key is already in fields
if key in out_event["fields"]:
key = f"{key}_"
# For each value we try to cast it as float
# But if we can not do it we store the value
# as string
try:
out_event["fields"][key] = float(value)
except (ValueError, TypeError):
out_event["fields"][key] = str(value)
return out_event
instance = hass.data[DOMAIN] = WatsonIOTThread(hass, watson_gateway, event_to_json)
instance.start()
def shutdown(event):
"""Shut down the thread."""
instance.queue.put(None)
instance.join()
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, shutdown)
return True
class WatsonIOTThread(threading.Thread):
"""A threaded event handler class."""
def __init__(self, hass, gateway, event_to_json):
"""Initialize the listener."""
threading.Thread.__init__(self, name="WatsonIOT")
self.queue = queue.Queue()
self.gateway = gateway
self.gateway.connect()
self.event_to_json = event_to_json
self.write_errors = 0
self.shutdown = False
hass.bus.listen(EVENT_STATE_CHANGED, self._event_listener)
@callback
def _event_listener(self, event):
"""Listen for new messages on the bus and queue them for Watson IoT."""
item = (time.monotonic(), event)
self.queue.put(item)
def get_events_json(self):
"""Return an event formatted for writing."""
events = []
try:
item = self.queue.get()
if item is None:
self.shutdown = True
else:
event_json = self.event_to_json(item[1])
if event_json:
events.append(event_json)
except queue.Empty:
pass
return events
def write_to_watson(self, events):
"""Write preprocessed events to watson."""
for event in events:
for retry in range(MAX_TRIES + 1):
try:
for field in event["fields"]:
value = event["fields"][field]
device_success = self.gateway.publishDeviceEvent(
event["tags"]["domain"],
event["tags"]["entity_id"],
field,
"json",
value,
)
if not device_success:
_LOGGER.error("Failed to publish message to Watson IoT")
continue
break
except (MissingMessageEncoderException, OSError):
if retry < MAX_TRIES:
time.sleep(RETRY_DELAY)
else:
_LOGGER.exception("Failed to publish message to Watson IoT")
def run(self):
"""Process incoming events."""
while not self.shutdown:
event = self.get_events_json()
if event:
self.write_to_watson(event)
self.queue.task_done()
def block_till_done(self):
"""Block till all events processed."""
self.queue.join()
|
from __future__ import division
import chainer
import chainer.functions as F
import numpy as np
from chainercv.experimental.links.model.fcis.utils.mask_voting \
import mask_voting
from chainercv.transforms.image.resize import resize
class FCIS(chainer.Chain):
"""Base class for FCIS.
This is a base class for FCIS links supporting instance segmentation
API [#]_. The following three stages constitute FCIS.
1. **Feature extraction**: Images are taken and their \
feature maps are calculated.
2. **Region Proposal Networks**: Given the feature maps calculated in \
the previous stage, produce set of RoIs around objects.
3. **Localization, Segmentation and Classification Heads**: Using feature \
maps that belong to the proposed RoIs, segment regions of the \
objects, classify the categories of the objects in the RoIs and \
improve localizations.
Each stage is carried out by one of the callable
:class:`chainer.Chain` objects :obj:`feature`, :obj:`rpn` and :obj:`head`.
There are two functions :meth:`predict` and :meth:`forward` to conduct
instance segmentation.
:meth:`predict` takes images and returns masks, object labels
and their scores.
:meth:`forward` is provided for a scnerario when intermediate outputs
are needed, for instance, for training and debugging.
Links that support instance segmentation API have method :meth:`predict`
with the same interface. Please refer to :meth:`predict` for further
details.
.. [#] Yi Li, Haozhi Qi, Jifeng Dai, Xiangyang Ji, Yichen Wei. \
Fully Convolutional Instance-aware Semantic Segmentation. CVPR 2017.
Args:
extractor (callable Chain): A callable that takes a BCHW image
array and returns feature maps.
rpn (callable Chain): A callable that has the same interface as
:class:`~chainercv.links.model.faster_rcnn.RegionProposalNetwork`.
Please refer to the documentation found there.
head (callable Chain): A callable that takes a BCHW array,
RoIs and batch indices for RoIs.
This returns class-agnostic segmentation scores, class-agnostic
localization parameters, class scores, improved RoIs and batch
indices for RoIs.
mean (numpy.ndarray): A value to be subtracted from an image
in :meth:`prepare`.
min_size (int): A preprocessing parameter for :meth:`prepare`. Please
refer to a docstring found for :meth:`prepare`.
max_size (int): A preprocessing parameter for :meth:`prepare`.
loc_normalize_mean (tuple of four floats): Mean values of
localization estimates.
loc_normalize_std (tupler of four floats): Standard deviation
of localization estimates.
"""
def __init__(
self, extractor, rpn, head,
mean, min_size, max_size,
loc_normalize_mean, loc_normalize_std,
):
super(FCIS, self).__init__()
with self.init_scope():
self.extractor = extractor
self.rpn = rpn
self.head = head
self.mean = mean
self.min_size = min_size
self.max_size = max_size
self.loc_normalize_mean = loc_normalize_mean
self.loc_normalize_std = loc_normalize_std
self.use_preset('visualize')
@property
def n_class(self):
# Total number of classes including the background.
return self.head.n_class
def forward(self, x, scales=None):
"""Forward FCIS.
Scaling paramter :obj:`scale` is used by RPN to determine the
threshold to select small objects, which are going to be
rejected irrespective of their confidence scores.
Here are notations used.
* :math:`N` is the number of batch size
* :math:`R'` is the total number of RoIs produced across batches. \
Given :math:`R_i` proposed RoIs from the :math:`i` th image, \
:math:`R' = \\sum _{i=1} ^ N R_i`.
* :math:`L` is the number of classes excluding the background.
* :math:`RH` is the height of pooled image by Position Sensitive \
ROI pooling.
* :math:`RW` is the height of pooled image by Position Sensitive \
ROI pooling.
Classes are ordered by the background, the first class, ..., and
the :math:`L` th class.
Args:
x (~chainer.Variable): 4D image variable.
scales (tuple of floats): Amount of scaling applied to each input
image during preprocessing.
Returns:
Variable, Variable, Variable, array, array:
Returns tuple of five values listed below.
* **roi_ag_seg_scores**: Class-agnostic clipped mask scores for \
the proposed ROIs. Its shape is :math:`(R', 2, RH, RW)`
* **ag_locs**: Class-agnostic offsets and scalings for \
the proposed RoIs. Its shape is :math:`(R', 2, 4)`.
* **roi_cls_scores**: Class predictions for the proposed RoIs. \
Its shape is :math:`(R', L + 1)`.
* **rois**: RoIs proposed by RPN. Its shape is \
:math:`(R', 4)`.
* **roi_indices**: Batch indices of RoIs. Its shape is \
:math:`(R',)`.
"""
img_size = x.shape[2:]
# Feature Extractor
rpn_features, roi_features = self.extractor(x)
rpn_locs, rpn_scores, rois, roi_indices, anchor = self.rpn(
rpn_features, img_size, scales)
roi_ag_seg_scores, roi_ag_locs, roi_cls_scores, rois, roi_indices = \
self.head(roi_features, rois, roi_indices, img_size)
return roi_ag_seg_scores, roi_ag_locs, roi_cls_scores, \
rois, roi_indices
def prepare(self, img):
"""Preprocess an image for feature extraction.
The length of the shorter edge is scaled to :obj:`self.min_size`.
After the scaling, if the length of the longer edge is longer than
:obj:`self.max_size`, the image is scaled to fit the longer edge
to :obj:`self.max_size`.
After resizing the image, the image is subtracted by a mean image value
:obj:`self.mean`.
Args:
img (~numpy.ndarray): An image. This is in CHW and RGB format.
The range of its value is :math:`[0, 255]`.
Returns:
~numpy.ndarray:
A preprocessed image.
"""
_, H, W = img.shape
scale = self.min_size / min(H, W)
if scale * max(H, W) > self.max_size:
scale = self.max_size / max(H, W)
img = resize(img, (int(H * scale), int(W * scale)))
img = (img - self.mean).astype(np.float32, copy=False)
return img
def use_preset(self, preset):
"""Use the given preset during prediction.
This method changes values of :obj:`self.nms_thresh`,
:obj:`self.score_thresh`, :obj:`self.mask_merge_thresh`,
:obj:`self.binary_thresh`, :obj:`self.binary_thresh` and
:obj:`self.min_drop_size`. These values are a threshold value
used for non maximum suppression, a threshold value
to discard low confidence proposals in :meth:`predict`,
a threshold value to merge mask in :meth:`predict`,
a threshold value to binalize segmentation scores in :meth:`predict`,
a limit number of predicted masks in one image and
a threshold value to discard small bounding boxes respectively.
If the attributes need to be changed to something
other than the values provided in the presets, please modify
them by directly accessing the public attributes.
Args:
preset ({'visualize', 'evaluate'): A string to determine the
preset to use.
"""
if preset == 'visualize':
self.nms_thresh = 0.3
self.score_thresh = 0.7
self.mask_merge_thresh = 0.5
self.binary_thresh = 0.4
self.limit = 100
self.min_drop_size = 16
elif preset == 'evaluate':
self.nms_thresh = 0.3
self.score_thresh = 1e-3
self.mask_merge_thresh = 0.5
self.binary_thresh = 0.4
self.limit = 100
self.min_drop_size = 16
elif preset == 'coco_evaluate':
self.nms_thresh = 0.3
self.score_thresh = 1e-3
self.mask_merge_thresh = 0.5
self.binary_thresh = 0.4
self.limit = 100
self.min_drop_size = 2
else:
raise ValueError('preset must be visualize or evaluate')
def predict(self, imgs):
"""Segment object instances from images.
This method predicts instance-aware object regions for each image.
Args:
imgs (iterable of numpy.ndarray): Arrays holding images of shape
:math:`(B, C, H, W)`. All images are in CHW and RGB format
and the range of their value is :math:`[0, 255]`.
Returns:
tuple of lists:
This method returns a tuple of three lists,
:obj:`(masks, labels, scores)`.
* **masks**: A list of boolean arrays of shape :math:`(R, H, W)`, \
where :math:`R` is the number of masks in a image. \
Each pixel holds value if it is inside the object inside or not.
* **labels** : A list of integer arrays of shape :math:`(R,)`. \
Each value indicates the class of the masks. \
Values are in range :math:`[0, L - 1]`, where :math:`L` is the \
number of the foreground classes.
* **scores** : A list of float arrays of shape :math:`(R,)`. \
Each value indicates how confident the prediction is.
"""
prepared_imgs = []
sizes = []
for img in imgs:
size = img.shape[1:]
img = self.prepare(img.astype(np.float32))
prepared_imgs.append(img)
sizes.append(size)
masks = []
labels = []
scores = []
for img, size in zip(prepared_imgs, sizes):
with chainer.using_config('train', False), \
chainer.function.no_backprop_mode():
# inference
img_var = chainer.Variable(self.xp.array(img[None]))
scale = img_var.shape[3] / size[1]
roi_ag_seg_scores, _, roi_cls_scores, bboxes, _ = \
self.forward(img_var, scales=[scale])
# We are assuming that batch size is 1.
roi_ag_seg_score = chainer.cuda.to_cpu(roi_ag_seg_scores.array)
roi_cls_score = chainer.cuda.to_cpu(roi_cls_scores.array)
bbox = chainer.cuda.to_cpu(bboxes)
# filter bounding boxes with min_size
height = bbox[:, 2] - bbox[:, 0]
width = bbox[:, 3] - bbox[:, 1]
keep_indices = np.where(
(height >= self.min_drop_size) &
(width >= self.min_drop_size))[0]
roi_ag_seg_score = roi_ag_seg_score[keep_indices, :, :]
roi_cls_score = roi_cls_score[keep_indices]
bbox = bbox[keep_indices, :]
# scale bbox
bbox = bbox / scale
# shape: (n_rois, 4)
bbox[:, 0::2] = self.xp.clip(bbox[:, 0::2], 0, size[0])
bbox[:, 1::2] = self.xp.clip(bbox[:, 1::2], 0, size[1])
# shape: (n_roi, roi_size, roi_size)
roi_seg_prob = F.softmax(roi_ag_seg_score).array[:, 1]
roi_cls_prob = F.softmax(roi_cls_score).array
roi_seg_prob, bbox, label, roi_cls_prob = mask_voting(
roi_seg_prob, bbox, roi_cls_prob, size,
self.score_thresh, self.nms_thresh,
self.mask_merge_thresh, self.binary_thresh,
limit=self.limit, bg_label=0)
mask = np.zeros(
(len(roi_seg_prob), size[0], size[1]), dtype=np.bool)
for i, (roi_seg_pb, bb) in enumerate(zip(roi_seg_prob, bbox)):
bb = np.round(bb).astype(np.int32)
y_min, x_min, y_max, x_max = bb
roi_msk_pb = resize(
roi_seg_pb.astype(np.float32)[None],
(y_max - y_min, x_max - x_min))
roi_msk = (roi_msk_pb > self.binary_thresh)[0]
mask[i, y_min:y_max, x_min:x_max] = roi_msk
masks.append(mask)
labels.append(label)
scores.append(roi_cls_prob)
return masks, labels, scores
|
import unittest
from trashcli.fstab import FakeIsMount
class TestOnDefault(unittest.TestCase):
def setUp(self):
self.ismount = FakeIsMount()
def test_by_default_root_is_mount(self):
assert self.ismount('/')
def test_while_by_default_any_other_is_not_a_mount_point(self):
assert not self.ismount('/any/other')
class WhenOneFakeVolumeIsDefined(unittest.TestCase):
def setUp(self):
self.ismount = FakeIsMount()
self.ismount.add_mount('/fake-vol')
def test_accept_fake_mount_point(self):
assert self.ismount('/fake-vol')
def test_other_still_are_not_mounts(self):
assert not self.ismount('/other')
def test_dont_get_confused_by_traling_slash(self):
assert self.ismount('/fake-vol/')
class TestWhenMultipleFakesMountPoints(unittest.TestCase):
def setUp(self):
self.ismount = FakeIsMount()
self.ismount.add_mount('/vol1')
self.ismount.add_mount('/vol2')
def test_recognize_both(self):
assert self.ismount('/vol1')
assert self.ismount('/vol2')
assert not self.ismount('/other')
def test_should_handle_relative_volumes():
ismount = FakeIsMount()
ismount.add_mount('fake-vol')
assert ismount('fake-vol')
|
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.forms import UserChangeForm, UserCreationForm
from django.utils.safestring import mark_safe
from django.utils.translation import gettext_lazy as _
from weblate.accounts.forms import FullNameField, UniqueEmailMixin, UniqueUsernameField
from weblate.accounts.utils import remove_user
from weblate.auth.data import ROLES
from weblate.auth.models import AutoGroup, Group, User
from weblate.wladmin.models import WeblateModelAdmin
BUILT_IN_ROLES = {role[0] for role in ROLES}
def block_group_edit(obj):
"""Whether to allo user editing of an group."""
return obj and obj.internal and "@" in obj.name
def block_role_edit(obj):
return obj and obj.name in BUILT_IN_ROLES
class InlineAutoGroupAdmin(admin.TabularInline):
model = AutoGroup
extra = 0
def has_add_permission(self, request, obj=None):
if block_group_edit(obj):
return False
return super().has_add_permission(request, obj)
def has_change_permission(self, request, obj=None):
if block_group_edit(obj):
return False
return super().has_change_permission(request, obj)
def has_delete_permission(self, request, obj=None):
if block_group_edit(obj):
return False
return super().has_delete_permission(request, obj)
class RoleAdmin(WeblateModelAdmin):
list_display = ("name",)
filter_horizontal = ("permissions",)
def has_change_permission(self, request, obj=None):
if block_role_edit(obj):
return False
return super().has_change_permission(request, obj)
def has_delete_permission(self, request, obj=None):
if block_role_edit(obj):
return False
return super().has_delete_permission(request, obj)
class WeblateUserChangeForm(UserChangeForm):
class Meta:
model = User
fields = "__all__"
field_classes = {"username": UniqueUsernameField, "full_name": FullNameField}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields["email"].required = True
self.fields["username"].valid = self.instance.username
class WeblateUserCreationForm(UserCreationForm, UniqueEmailMixin):
validate_unique_mail = True
class Meta:
model = User
fields = ("username", "email", "full_name")
field_classes = {"username": UniqueUsernameField, "full_name": FullNameField}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields["email"].required = True
class WeblateUserAdmin(WeblateModelAdmin, UserAdmin):
"""Custom UserAdmin class.
Used to add listing of group membership and whether user is active.
"""
list_display = (
"username",
"email",
"full_name",
"user_groups",
"is_active",
"is_superuser",
)
search_fields = ("username", "full_name", "email")
form = WeblateUserChangeForm
add_form = WeblateUserCreationForm
add_fieldsets = (
(None, {"fields": ("username",)}),
(_("Personal info"), {"fields": ("full_name", "email")}),
(_("Authentication"), {"fields": ("password1", "password2")}),
)
fieldsets = (
(None, {"fields": ("username", "password")}),
(_("Personal info"), {"fields": ("full_name", "email")}),
(_("Permissions"), {"fields": ("is_active", "is_superuser", "groups")}),
(_("Important dates"), {"fields": ("last_login", "date_joined")}),
)
list_filter = ("is_superuser", "is_active", "groups")
filter_horizontal = ("groups",)
def user_groups(self, obj):
"""Display comma separated list of user groups."""
return ",".join(g.name for g in obj.groups.iterator())
def action_checkbox(self, obj):
if obj.is_anonymous:
return ""
return super().action_checkbox(obj)
action_checkbox.short_description = mark_safe(
'<input type="checkbox" id="action-toggle" />'
)
def has_delete_permission(self, request, obj=None):
if obj and obj.is_anonymous:
return False
return super().has_delete_permission(request, obj)
def delete_model(self, request, obj):
"""Given a model instance delete it from the database."""
remove_user(obj, request)
def delete_queryset(self, request, queryset):
"""Given a queryset, delete it from the database."""
for obj in queryset.iterator():
self.delete_model(request, obj)
class WeblateGroupAdmin(WeblateModelAdmin):
save_as = True
model = Group
inlines = [InlineAutoGroupAdmin]
search_fields = ("name",)
ordering = ("name",)
list_filter = ("internal", "project_selection", "language_selection")
filter_horizontal = ("roles", "projects", "languages")
new_obj = None
def action_checkbox(self, obj):
if obj.internal:
return ""
return super().action_checkbox(obj)
action_checkbox.short_description = mark_safe(
'<input type="checkbox" id="action-toggle" />'
)
def has_delete_permission(self, request, obj=None):
if obj and obj.internal:
return False
return super().has_delete_permission(request, obj)
def has_change_permission(self, request, obj=None):
if block_group_edit(obj):
return False
return super().has_change_permission(request, obj)
def save_model(self, request, obj, form, change):
"""Fix saving of automatic language/project selection, part 1.
Stores saved object as an attribute to be used by save_related.
"""
super().save_model(request, obj, form, change)
self.new_obj = obj
def save_related(self, request, form, formsets, change):
"""Fix saving of automatic language/project selection, part 2.
Uses stored attribute to save the model again. Saving triggers the automation
and adjusts project/language selection according to the chosen value.
"""
super().save_related(request, form, formsets, change)
self.new_obj.save()
|
import numpy as np
import networkx as nx
import unittest
from pgmpy.readwrite import UAIReader, UAIWriter
from pgmpy.models import BayesianModel, MarkovModel
from pgmpy.factors.discrete import TabularCPD, DiscreteFactor
class TestUAIReader(unittest.TestCase):
def setUp(self):
string = """MARKOV
3
2 2 3
2
2 0 1
3 0 1 2
4
4.000 2.400
1.000 0.000
12
2.2500 3.2500 3.7500
0.0000 0.0000 10.0000
1.8750 4.0000 3.3330
2.0000 2.0000 3.4000"""
self.maxDiff = None
self.reader_string = UAIReader(string=string)
self.reader_file = UAIReader("pgmpy/tests/test_readwrite/testdata/grid4x4.uai")
def test_get_network_type(self):
network_type_expected = "MARKOV"
self.assertEqual(self.reader_string.network_type, network_type_expected)
def test_get_variables(self):
variables_expected = ["var_0", "var_1", "var_2"]
self.assertListEqual(self.reader_string.variables, variables_expected)
def test_get_domain(self):
domain_expected = {"var_1": "2", "var_2": "3", "var_0": "2"}
self.assertDictEqual(self.reader_string.domain, domain_expected)
def test_get_edges(self):
edges_expected = {("var_0", "var_1"), ("var_0", "var_2"), ("var_1", "var_2")}
self.assertSetEqual(self.reader_string.edges, edges_expected)
def test_get_tables(self):
tables_expected = [
(["var_0", "var_1"], ["4.000", "2.400", "1.000", "0.000"]),
(
["var_0", "var_1", "var_2"],
[
"2.2500",
"3.2500",
"3.7500",
"0.0000",
"0.0000",
"10.0000",
"1.8750",
"4.0000",
"3.3330",
"2.0000",
"2.0000",
"3.4000",
],
),
]
self.assertListEqual(self.reader_string.tables, tables_expected)
def test_get_model(self):
model = self.reader_string.get_model()
edge_expected = {
"var_2": {"var_0": {"weight": None}, "var_1": {"weight": None}},
"var_0": {"var_2": {"weight": None}, "var_1": {"weight": None}},
"var_1": {"var_2": {"weight": None}, "var_0": {"weight": None}},
}
self.assertListEqual(sorted(model.nodes()), sorted(["var_0", "var_2", "var_1"]))
if nx.__version__.startswith("1"):
self.assertDictEqual(dict(model.edge), edge_expected)
else:
self.assertDictEqual(dict(model.adj), edge_expected)
def test_read_file(self):
model = self.reader_file.get_model()
node_expected = {
"var_3": {},
"var_8": {},
"var_5": {},
"var_14": {},
"var_15": {},
"var_0": {},
"var_9": {},
"var_7": {},
"var_6": {},
"var_13": {},
"var_10": {},
"var_12": {},
"var_1": {},
"var_11": {},
"var_2": {},
"var_4": {},
}
self.assertDictEqual(dict(model.nodes), node_expected)
class TestUAIWriter(unittest.TestCase):
def setUp(self):
self.maxDiff = None
variables = [
"kid",
"bowel-problem",
"dog-out",
"family-out",
"hear-bark",
"light-on",
]
edges = [
["family-out", "dog-out"],
["bowel-problem", "dog-out"],
["family-out", "light-on"],
["dog-out", "hear-bark"],
]
cpds = {
"kid": np.array([[0.3], [0.7]]),
"bowel-problem": np.array([[0.01], [0.99]]),
"dog-out": np.array([[0.99, 0.01, 0.97, 0.03], [0.9, 0.1, 0.3, 0.7]]),
"family-out": np.array([[0.15], [0.85]]),
"hear-bark": np.array([[0.7, 0.3], [0.01, 0.99]]),
"light-on": np.array([[0.6, 0.4], [0.05, 0.95]]),
}
states = {
"kid": ["true", "false"],
"bowel-problem": ["true", "false"],
"dog-out": ["true", "false"],
"family-out": ["true", "false"],
"hear-bark": ["true", "false"],
"light-on": ["true", "false"],
}
parents = {
"kid": [],
"bowel-problem": [],
"dog-out": ["bowel-problem", "family-out"],
"family-out": [],
"hear-bark": ["dog-out"],
"light-on": ["family-out"],
}
self.bayesmodel = BayesianModel()
self.bayesmodel.add_nodes_from(variables)
self.bayesmodel.add_edges_from(edges)
tabular_cpds = []
for var, values in cpds.items():
cpd = TabularCPD(
var,
len(states[var]),
values,
evidence=parents[var],
evidence_card=[
len(states[evidence_var]) for evidence_var in parents[var]
],
)
tabular_cpds.append(cpd)
self.bayesmodel.add_cpds(*tabular_cpds)
self.bayeswriter = UAIWriter(self.bayesmodel)
edges = {("var_0", "var_1"), ("var_0", "var_2"), ("var_1", "var_2")}
self.markovmodel = MarkovModel(edges)
tables = [
(["var_0", "var_1"], ["4.000", "2.400", "1.000", "0.000"]),
(
["var_0", "var_1", "var_2"],
[
"2.2500",
"3.2500",
"3.7500",
"0.0000",
"0.0000",
"10.0000",
"1.8750",
"4.0000",
"3.3330",
"2.0000",
"2.0000",
"3.4000",
],
),
]
domain = {"var_1": "2", "var_2": "3", "var_0": "2"}
factors = []
for table in tables:
variables = table[0]
cardinality = [int(domain[var]) for var in variables]
values = list(map(float, table[1]))
factor = DiscreteFactor(variables, cardinality, values)
factors.append(factor)
self.markovmodel.add_factors(*factors)
self.markovwriter = UAIWriter(self.markovmodel)
def test_bayes_model(self):
self.expected_bayes_file = """BAYES
6
2 2 2 2 2 2
6
1 0
3 2 0 1
1 2
2 1 3
1 4
2 2 5
2
0.01 0.99
8
0.99 0.01 0.97 0.03 0.9 0.1 0.3 0.7
2
0.15 0.85
4
0.7 0.3 0.01 0.99
2
0.3 0.7
4
0.6 0.4 0.05 0.95"""
self.assertEqual(str(self.bayeswriter.__str__()), str(self.expected_bayes_file))
def test_markov_model(self):
self.expected_markov_file = """MARKOV
3
2 2 3
2
2 0 1
3 0 1 2
4
4.0 2.4 1.0 0.0
12
2.25 3.25 3.75 0.0 0.0 10.0 1.875 4.0 3.333 2.0 2.0 3.4"""
self.assertEqual(
str(self.markovwriter.__str__()), str(self.expected_markov_file)
)
|
from django.contrib.auth.decorators import login_required
from django.core.exceptions import PermissionDenied
from django.db import transaction
from django.shortcuts import get_object_or_404, redirect
from django.urls import reverse
from django.utils.translation import gettext as _
from django.utils.translation import ngettext
from django.views.decorators.cache import never_cache
from django.views.decorators.http import require_POST
from weblate.lang.models import Language
from weblate.trans.bulk import bulk_perform
from weblate.trans.forms import (
BulkEditForm,
ReplaceConfirmForm,
ReplaceForm,
SearchForm,
)
from weblate.trans.models import Change, Unit
from weblate.trans.util import render
from weblate.utils import messages
from weblate.utils.ratelimit import check_rate_limit
from weblate.utils.views import (
get_component,
get_paginator,
get_project,
get_sort_name,
get_translation,
import_message,
show_form_errors,
)
def parse_url(request, project, component=None, lang=None):
context = {}
if component is None:
obj = get_project(request, project)
unit_set = Unit.objects.filter(translation__component__project=obj)
context["project"] = obj
elif lang is None:
obj = get_component(request, project, component)
unit_set = Unit.objects.filter(translation__component=obj)
context["component"] = obj
context["project"] = obj.project
else:
obj = get_translation(request, project, component, lang)
unit_set = obj.unit_set.all()
context["translation"] = obj
context["component"] = obj.component
context["project"] = obj.component.project
if not request.user.has_perm("unit.edit", obj):
raise PermissionDenied()
return obj, unit_set, context
@login_required
@require_POST
def search_replace(request, project, component=None, lang=None):
obj, unit_set, context = parse_url(request, project, component, lang)
form = ReplaceForm(request.POST)
if not form.is_valid():
messages.error(request, _("Failed to process form!"))
show_form_errors(request, form)
return redirect(obj)
search_text = form.cleaned_data["search"]
replacement = form.cleaned_data["replacement"]
matching = unit_set.filter(target__contains=search_text)
updated = 0
if matching.exists():
confirm = ReplaceConfirmForm(matching, request.POST)
limited = False
if matching.count() > 300:
matching = matching.order_by("id")[:250]
limited = True
if not confirm.is_valid():
for unit in matching:
unit.replacement = unit.target.replace(search_text, replacement)
context.update(
{
"matching": matching,
"search_query": search_text,
"replacement": replacement,
"form": form,
"limited": limited,
"confirm": ReplaceConfirmForm(matching),
}
)
return render(request, "replace.html", context)
matching = confirm.cleaned_data["units"]
with transaction.atomic():
for unit in matching.select_for_update():
if not request.user.has_perm("unit.edit", unit):
continue
unit.translate(
request.user,
unit.target.replace(search_text, replacement),
unit.state,
change_action=Change.ACTION_REPLACE,
)
updated += 1
import_message(
request,
updated,
_("Search and replace completed, no strings were updated."),
ngettext(
"Search and replace completed, %d string was updated.",
"Search and replace completed, %d strings were updated.",
updated,
),
)
return redirect(obj)
@never_cache
def search(request, project=None, component=None, lang=None):
"""Perform site-wide search on units."""
is_ratelimited = not check_rate_limit("search", request)
search_form = SearchForm(user=request.user, data=request.GET)
sort = get_sort_name(request)
context = {"search_form": search_form}
if component:
obj = get_component(request, project, component)
context["component"] = obj
context["project"] = obj.project
context["back_url"] = obj.get_absolute_url()
elif project:
obj = get_project(request, project)
context["project"] = obj
context["back_url"] = obj.get_absolute_url()
else:
obj = None
context["back_url"] = None
if lang:
s_language = get_object_or_404(Language, code=lang)
context["language"] = s_language
if obj:
if component:
context["back_url"] = obj.translation_set.get(
language=s_language
).get_absolute_url()
else:
context["back_url"] = reverse(
"project-language", kwargs={"project": project, "lang": lang}
)
else:
context["back_url"] = s_language.get_absolute_url()
if not is_ratelimited and request.GET and search_form.is_valid():
# This is ugly way to hide query builder when showing results
search_form = SearchForm(
user=request.user, data=request.GET, show_builder=False
)
search_form.is_valid()
# Filter results by ACL
units = Unit.objects.prefetch_full().prefetch()
if component:
units = units.filter(translation__component=obj)
elif project:
units = units.filter(translation__component__project=obj)
else:
units = units.filter_access(request.user)
units = units.search(search_form.cleaned_data.get("q", "")).distinct()
if lang:
units = units.filter(translation__language=context["language"])
units = get_paginator(request, units.order_by_request(search_form.cleaned_data))
# Rebuild context from scratch here to get new form
context = {
"search_form": search_form,
"show_results": True,
"page_obj": units,
"title": _("Search for %s") % (search_form.cleaned_data["q"]),
"query_string": search_form.urlencode(),
"search_query": search_form.cleaned_data["q"],
"search_items": search_form.items(),
"filter_name": search_form.get_name(),
"sort_name": sort["name"],
"sort_query": sort["query"],
}
elif is_ratelimited:
messages.error(request, _("Too many search queries, please try again later."))
elif request.GET:
messages.error(request, _("Invalid search query!"))
show_form_errors(request, search_form)
return render(request, "search.html", context)
@login_required
@require_POST
@never_cache
def bulk_edit(request, project, component=None, lang=None):
obj, unit_set, context = parse_url(request, project, component, lang)
if not request.user.has_perm("translation.auto", obj):
raise PermissionDenied()
form = BulkEditForm(request.user, obj, request.POST, project=context["project"])
if not form.is_valid():
messages.error(request, _("Failed to process form!"))
show_form_errors(request, form)
return redirect(obj)
updated = bulk_perform(
request.user,
unit_set,
query=form.cleaned_data["q"],
target_state=form.cleaned_data["state"],
add_flags=form.cleaned_data["add_flags"],
remove_flags=form.cleaned_data["remove_flags"],
add_labels=form.cleaned_data["add_labels"],
remove_labels=form.cleaned_data["remove_labels"],
)
import_message(
request,
updated,
_("Bulk edit completed, no strings were updated."),
ngettext(
"Bulk edit completed, %d string was updated.",
"Bulk edit completed, %d strings were updated.",
updated,
),
)
return redirect(obj)
|
import json
from homeassistant.util import slugify
from .const import COMPONENT_DIR
from .error import ExitApp
from .model import Info
CHECK_EMPTY = ["Cannot be empty", lambda value: value]
def gather_info(arguments) -> Info:
"""Gather info."""
if arguments.integration:
info = {"domain": arguments.integration}
elif arguments.develop:
print("Running in developer mode. Automatically filling in info.")
print()
info = {"domain": "develop"}
else:
info = _gather_info(
{
"domain": {
"prompt": "What is the domain?",
"validators": [
CHECK_EMPTY,
[
"Domains cannot contain spaces or special characters.",
lambda value: value == slugify(value),
],
],
}
}
)
info["is_new"] = not (COMPONENT_DIR / info["domain"] / "manifest.json").exists()
if not info["is_new"]:
return _load_existing_integration(info["domain"])
if arguments.develop:
info.update(
{
"name": "Develop Hub",
"codeowner": "@developer",
"requirement": "aiodevelop==1.2.3",
"oauth2": True,
}
)
else:
info.update(gather_new_integration(arguments.template == "integration"))
return Info(**info)
YES_NO = {
"validators": [["Type either 'yes' or 'no'", lambda value: value in ("yes", "no")]],
"converter": lambda value: value == "yes",
}
def gather_new_integration(determine_auth: bool) -> Info:
"""Gather info about new integration from user."""
fields = {
"name": {
"prompt": "What is the name of your integration?",
"validators": [CHECK_EMPTY],
},
"codeowner": {
"prompt": "What is your GitHub handle?",
"validators": [
CHECK_EMPTY,
[
'GitHub handles need to start with an "@"',
lambda value: value.startswith("@"),
],
],
},
"requirement": {
"prompt": "What PyPI package and version do you depend on? Leave blank for none.",
"validators": [
[
"Versions should be pinned using '=='.",
lambda value: not value or "==" in value,
]
],
},
}
if determine_auth:
fields.update(
{
"authentication": {
"prompt": "Does Home Assistant need the user to authenticate to control the device/service? (yes/no)",
"default": "yes",
**YES_NO,
},
"discoverable": {
"prompt": "Is the device/service discoverable on the local network? (yes/no)",
"default": "no",
**YES_NO,
},
"oauth2": {
"prompt": "Can the user authenticate the device using OAuth2? (yes/no)",
"default": "no",
**YES_NO,
},
}
)
return _gather_info(fields)
def _load_existing_integration(domain) -> Info:
"""Load an existing integration."""
if not (COMPONENT_DIR / domain).exists():
raise ExitApp("Integration does not exist", 1)
manifest = json.loads((COMPONENT_DIR / domain / "manifest.json").read_text())
return Info(domain=domain, name=manifest["name"], is_new=False)
def _gather_info(fields) -> dict:
"""Gather info from user."""
answers = {}
for key, info in fields.items():
hint = None
while key not in answers:
if hint is not None:
print()
print(f"Error: {hint}")
try:
print()
msg = info["prompt"]
if "default" in info:
msg += f" [{info['default']}]"
value = input(f"{msg}\n> ")
except (KeyboardInterrupt, EOFError):
raise ExitApp("Interrupted!", 1)
value = value.strip()
if value == "" and "default" in info:
value = info["default"]
hint = None
for validator_hint, validator in info["validators"]:
if not validator(value):
hint = validator_hint
break
if hint is None:
if "converter" in info:
value = info["converter"](value)
answers[key] = value
return answers
|
import os
import sys
import random
import numpy as np
import tensorflow as tf
import xml.etree.ElementTree as ET
from datasets.dataset_utils import int64_feature, float_feature, bytes_feature
from datasets.pascalvoc_common import VOC_LABELS
# Original dataset organisation.
DIRECTORY_ANNOTATIONS = 'Annotations/'
DIRECTORY_IMAGES = 'JPEGImages/'
# TFRecords convertion parameters.
RANDOM_SEED = 4242
SAMPLES_PER_FILES = 200
def _process_image(directory, name):
"""Process a image and annotation file.
Args:
filename: string, path to an image file e.g., '/path/to/example.JPG'.
coder: instance of ImageCoder to provide TensorFlow image coding utils.
Returns:
image_buffer: string, JPEG encoding of RGB image.
height: integer, image height in pixels.
width: integer, image width in pixels.
"""
# Read the image file.
filename = os.path.join(directory, DIRECTORY_IMAGES, name + '.jpg')
image_data = tf.gfile.FastGFile(filename, 'r').read()
# Read the XML annotation file.
filename = os.path.join(directory, DIRECTORY_ANNOTATIONS, name + '.xml')
tree = ET.parse(filename)
root = tree.getroot()
# Image shape.
size = root.find('size')
shape = [int(size.find('height').text),
int(size.find('width').text),
int(size.find('depth').text)]
# Find annotations.
bboxes = []
labels = []
labels_text = []
difficult = []
truncated = []
for obj in root.findall('object'):
label = obj.find('name').text
labels.append(int(VOC_LABELS[label][0]))
labels_text.append(label.encode('ascii'))
if obj.find('difficult'):
difficult.append(int(obj.find('difficult').text))
else:
difficult.append(0)
if obj.find('truncated'):
truncated.append(int(obj.find('truncated').text))
else:
truncated.append(0)
bbox = obj.find('bndbox')
bboxes.append((float(bbox.find('ymin').text) / shape[0],
float(bbox.find('xmin').text) / shape[1],
float(bbox.find('ymax').text) / shape[0],
float(bbox.find('xmax').text) / shape[1]
))
return image_data, shape, bboxes, labels, labels_text, difficult, truncated
def _convert_to_example(image_data, labels, labels_text, bboxes, shape,
difficult, truncated):
"""Build an Example proto for an image example.
Args:
image_data: string, JPEG encoding of RGB image;
labels: list of integers, identifier for the ground truth;
labels_text: list of strings, human-readable labels;
bboxes: list of bounding boxes; each box is a list of integers;
specifying [xmin, ymin, xmax, ymax]. All boxes are assumed to belong
to the same label as the image label.
shape: 3 integers, image shapes in pixels.
Returns:
Example proto
"""
xmin = []
ymin = []
xmax = []
ymax = []
for b in bboxes:
assert len(b) == 4
# pylint: disable=expression-not-assigned
[l.append(point) for l, point in zip([ymin, xmin, ymax, xmax], b)]
# pylint: enable=expression-not-assigned
image_format = b'JPEG'
example = tf.train.Example(features=tf.train.Features(feature={
'image/height': int64_feature(shape[0]),
'image/width': int64_feature(shape[1]),
'image/channels': int64_feature(shape[2]),
'image/shape': int64_feature(shape),
'image/object/bbox/xmin': float_feature(xmin),
'image/object/bbox/xmax': float_feature(xmax),
'image/object/bbox/ymin': float_feature(ymin),
'image/object/bbox/ymax': float_feature(ymax),
'image/object/bbox/label': int64_feature(labels),
'image/object/bbox/label_text': bytes_feature(labels_text),
'image/object/bbox/difficult': int64_feature(difficult),
'image/object/bbox/truncated': int64_feature(truncated),
'image/format': bytes_feature(image_format),
'image/encoded': bytes_feature(image_data)}))
return example
def _add_to_tfrecord(dataset_dir, name, tfrecord_writer):
"""Loads data from image and annotations files and add them to a TFRecord.
Args:
dataset_dir: Dataset directory;
name: Image name to add to the TFRecord;
tfrecord_writer: The TFRecord writer to use for writing.
"""
image_data, shape, bboxes, labels, labels_text, difficult, truncated = \
_process_image(dataset_dir, name)
example = _convert_to_example(image_data, labels, labels_text,
bboxes, shape, difficult, truncated)
tfrecord_writer.write(example.SerializeToString())
def _get_output_filename(output_dir, name, idx):
return '%s/%s_%03d.tfrecord' % (output_dir, name, idx)
def run(voc_root, year, split, output_dir, shuffling=False):
"""Runs the conversion operation.
Args:
year: VOC year, 2007/2012/0712
voc_root: VOC root dir.
output_dir: Output directory.
"""
if not tf.gfile.Exists(output_dir):
tf.gfile.MakeDirs(output_dir)
# Dataset filenames, and shuffling.
split_file_path = os.path.join(voc_root,
'VOC%s'%year,
'ImageSets',
'Main',
'%s.txt'%split)
with open(split_file_path) as f:
filenames = f.readlines()
if shuffling:
random.seed(RANDOM_SEED)
random.shuffle(filenames)
# Process dataset files.
i = 0
fidx = 0
dataset_dir = os.path.join(voc_root, 'VOC%s'%year)
while i < len(filenames):
# Open new TFRecord file.
tf_filename = _get_output_filename(output_dir, split, fidx)
with tf.python_io.TFRecordWriter(tf_filename) as tfrecord_writer:
j = 0
while i < len(filenames) and j < SAMPLES_PER_FILES:
sys.stdout.write('\r>> Converting image %d/%d' % (i+1, len(filenames)))
sys.stdout.flush()
filename = filenames[i].strip()
_add_to_tfrecord(dataset_dir, filename, tfrecord_writer)
i += 1
j += 1
fidx += 1
print('\nFinished converting the Pascal VOC dataset!')
|
from __future__ import print_function
import sys
import argparse
import threading
def main(args):
ap = argparse.ArgumentParser()
ap.add_argument('job_id', nargs='?', type=int, help='ID of a running background job')
ns = ap.parse_args(args)
_stash = globals()['_stash']
worker_registry = _stash.runtime.worker_registry
if ns.job_id is None:
worker = worker_registry.get_first_bg_worker()
else:
worker = worker_registry.get_worker(ns.job_id)
if worker is None:
print('no background job running' \
+ (' with id {}'.format(ns.job_id) if ns.job_id else ''))
return
def f():
_stash.runtime.push_to_foreground(worker)
t = threading.Timer(1.0, f)
print('pushing job {} to foreground ...'.format(worker.job_id))
t.start()
if __name__ == '__main__':
main(sys.argv[1:])
|
import dataclasses
import re
from gogogate2_api.common import AbstractInfoResponse, ApiError
from gogogate2_api.const import GogoGate2ApiErrorCode, ISmartGateApiErrorCode
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.config_entries import SOURCE_IMPORT, ConfigFlow
from homeassistant.const import (
CONF_DEVICE,
CONF_IP_ADDRESS,
CONF_PASSWORD,
CONF_USERNAME,
)
from .common import get_api
from .const import DEVICE_TYPE_GOGOGATE2, DEVICE_TYPE_ISMARTGATE
from .const import DOMAIN # pylint: disable=unused-import
class Gogogate2FlowHandler(ConfigFlow, domain=DOMAIN):
"""Gogogate2 config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL
def __init__(self):
"""Initialize the config flow."""
self._ip_address = None
self._device_type = None
async def async_step_import(self, config_data: dict = None):
"""Handle importing of configuration."""
result = await self.async_step_user(config_data)
self._abort_if_unique_id_configured()
return result
async def async_step_homekit(self, discovery_info):
"""Handle homekit discovery."""
await self.async_set_unique_id(discovery_info["properties"]["id"])
self._abort_if_unique_id_configured({CONF_IP_ADDRESS: discovery_info["host"]})
ip_address = discovery_info["host"]
for entry in self._async_current_entries():
if entry.data.get(CONF_IP_ADDRESS) == ip_address:
return self.async_abort(reason="already_configured")
self._ip_address = ip_address
self._device_type = DEVICE_TYPE_ISMARTGATE
return await self.async_step_user()
async def async_step_user(self, user_input: dict = None):
"""Handle user initiated flow."""
user_input = user_input or {}
errors = {}
if user_input:
api = get_api(user_input)
try:
data: AbstractInfoResponse = await self.hass.async_add_executor_job(
api.info
)
data_dict = dataclasses.asdict(data)
title = data_dict.get(
"gogogatename", data_dict.get("ismartgatename", "Cover")
)
await self.async_set_unique_id(re.sub("\\..*$", "", data.remoteaccess))
return self.async_create_entry(title=title, data=user_input)
except ApiError as api_error:
device_type = user_input[CONF_DEVICE]
is_invalid_auth = (
device_type == DEVICE_TYPE_GOGOGATE2
and api_error.code
in (
GogoGate2ApiErrorCode.CREDENTIALS_NOT_SET,
GogoGate2ApiErrorCode.CREDENTIALS_INCORRECT,
)
) or (
device_type == DEVICE_TYPE_ISMARTGATE
and api_error.code
in (
ISmartGateApiErrorCode.CREDENTIALS_NOT_SET,
ISmartGateApiErrorCode.CREDENTIALS_INCORRECT,
)
)
if is_invalid_auth:
errors["base"] = "invalid_auth"
else:
errors["base"] = "cannot_connect"
except Exception: # pylint: disable=broad-except
errors["base"] = "cannot_connect"
if errors and self.source == SOURCE_IMPORT:
return self.async_abort(reason="cannot_connect")
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(
CONF_DEVICE,
default=self._device_type
or user_input.get(CONF_DEVICE, DEVICE_TYPE_GOGOGATE2),
): vol.In((DEVICE_TYPE_GOGOGATE2, DEVICE_TYPE_ISMARTGATE)),
vol.Required(
CONF_IP_ADDRESS,
default=user_input.get(CONF_IP_ADDRESS, self._ip_address),
): str,
vol.Required(
CONF_USERNAME, default=user_input.get(CONF_USERNAME, "")
): str,
vol.Required(
CONF_PASSWORD, default=user_input.get(CONF_PASSWORD, "")
): str,
}
),
errors=errors,
)
|
from test import CollectorTestCase
from test import get_collector_config
from ossec import OssecCollector
class TestOssecCollector(CollectorTestCase):
def setUp(self, allowed_names=None):
if not allowed_names:
allowed_names = []
config = get_collector_config('OssecCollector', {
})
self.collector = OssecCollector(config, None)
def test_import(self):
self.assertTrue(OssecCollector)
|
import cherrypy
from cherrypy._cpcompat import ntou
from cherrypy.test import helper
class ETagTest(helper.CPWebCase):
@staticmethod
def setup_server():
class Root:
@cherrypy.expose
def resource(self):
return 'Oh wah ta goo Siam.'
@cherrypy.expose
def fail(self, code):
code = int(code)
if 300 <= code <= 399:
raise cherrypy.HTTPRedirect([], code)
else:
raise cherrypy.HTTPError(code)
@cherrypy.expose
# In Python 3, tools.encode is on by default
@cherrypy.config(**{'tools.encode.on': True})
def unicoded(self):
return ntou('I am a \u1ee4nicode string.', 'escape')
conf = {'/': {'tools.etags.on': True,
'tools.etags.autotags': True,
}}
cherrypy.tree.mount(Root(), config=conf)
def test_etags(self):
self.getPage('/resource')
self.assertStatus('200 OK')
self.assertHeader('Content-Type', 'text/html;charset=utf-8')
self.assertBody('Oh wah ta goo Siam.')
etag = self.assertHeader('ETag')
# Test If-Match (both valid and invalid)
self.getPage('/resource', headers=[('If-Match', etag)])
self.assertStatus('200 OK')
self.getPage('/resource', headers=[('If-Match', '*')])
self.assertStatus('200 OK')
self.getPage('/resource', headers=[('If-Match', '*')], method='POST')
self.assertStatus('200 OK')
self.getPage('/resource', headers=[('If-Match', 'a bogus tag')])
self.assertStatus('412 Precondition Failed')
# Test If-None-Match (both valid and invalid)
self.getPage('/resource', headers=[('If-None-Match', etag)])
self.assertStatus(304)
self.getPage('/resource', method='POST',
headers=[('If-None-Match', etag)])
self.assertStatus('412 Precondition Failed')
self.getPage('/resource', headers=[('If-None-Match', '*')])
self.assertStatus(304)
self.getPage('/resource', headers=[('If-None-Match', 'a bogus tag')])
self.assertStatus('200 OK')
def test_errors(self):
self.getPage('/resource')
self.assertStatus(200)
etag = self.assertHeader('ETag')
# Test raising errors in page handler
self.getPage('/fail/412', headers=[('If-Match', etag)])
self.assertStatus(412)
self.getPage('/fail/304', headers=[('If-Match', etag)])
self.assertStatus(304)
self.getPage('/fail/412', headers=[('If-None-Match', '*')])
self.assertStatus(412)
self.getPage('/fail/304', headers=[('If-None-Match', '*')])
self.assertStatus(304)
def test_unicode_body(self):
self.getPage('/unicoded')
self.assertStatus(200)
etag1 = self.assertHeader('ETag')
self.getPage('/unicoded', headers=[('If-Match', etag1)])
self.assertStatus(200)
self.assertHeader('ETag', etag1)
|
from datetime import timedelta
import logging
import random
import discogs_client
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_ATTRIBUTION,
CONF_MONITORED_CONDITIONS,
CONF_NAME,
CONF_TOKEN,
)
from homeassistant.helpers.aiohttp_client import SERVER_SOFTWARE
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
ATTR_IDENTITY = "identity"
ATTRIBUTION = "Data provided by Discogs"
DEFAULT_NAME = "Discogs"
ICON_RECORD = "mdi:album"
ICON_PLAYER = "mdi:record-player"
UNIT_RECORDS = "records"
SCAN_INTERVAL = timedelta(minutes=10)
SENSOR_COLLECTION_TYPE = "collection"
SENSOR_WANTLIST_TYPE = "wantlist"
SENSOR_RANDOM_RECORD_TYPE = "random_record"
SENSORS = {
SENSOR_COLLECTION_TYPE: {
"name": "Collection",
"icon": ICON_RECORD,
"unit_of_measurement": UNIT_RECORDS,
},
SENSOR_WANTLIST_TYPE: {
"name": "Wantlist",
"icon": ICON_RECORD,
"unit_of_measurement": UNIT_RECORDS,
},
SENSOR_RANDOM_RECORD_TYPE: {
"name": "Random Record",
"icon": ICON_PLAYER,
"unit_of_measurement": None,
},
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_TOKEN): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_MONITORED_CONDITIONS, default=list(SENSORS)): vol.All(
cv.ensure_list, [vol.In(SENSORS)]
),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Discogs sensor."""
token = config[CONF_TOKEN]
name = config[CONF_NAME]
try:
_discogs_client = discogs_client.Client(SERVER_SOFTWARE, user_token=token)
discogs_data = {
"user": _discogs_client.identity().name,
"folders": _discogs_client.identity().collection_folders,
"collection_count": _discogs_client.identity().num_collection,
"wantlist_count": _discogs_client.identity().num_wantlist,
}
except discogs_client.exceptions.HTTPError:
_LOGGER.error("API token is not valid")
return
sensors = []
for sensor_type in config[CONF_MONITORED_CONDITIONS]:
sensors.append(DiscogsSensor(discogs_data, name, sensor_type))
add_entities(sensors, True)
class DiscogsSensor(Entity):
"""Create a new Discogs sensor for a specific type."""
def __init__(self, discogs_data, name, sensor_type):
"""Initialize the Discogs sensor."""
self._discogs_data = discogs_data
self._name = name
self._type = sensor_type
self._state = None
self._attrs = {}
@property
def name(self):
"""Return the name of the sensor."""
return f"{self._name} {SENSORS[self._type]['name']}"
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return SENSORS[self._type]["icon"]
@property
def unit_of_measurement(self):
"""Return the unit this state is expressed in."""
return SENSORS[self._type]["unit_of_measurement"]
@property
def device_state_attributes(self):
"""Return the device state attributes of the sensor."""
if self._state is None or self._attrs is None:
return None
if self._type == SENSOR_RANDOM_RECORD_TYPE and self._state is not None:
return {
"cat_no": self._attrs["labels"][0]["catno"],
"cover_image": self._attrs["cover_image"],
"format": f"{self._attrs['formats'][0]['name']} ({self._attrs['formats'][0]['descriptions'][0]})",
"label": self._attrs["labels"][0]["name"],
"released": self._attrs["year"],
ATTR_ATTRIBUTION: ATTRIBUTION,
ATTR_IDENTITY: self._discogs_data["user"],
}
return {
ATTR_ATTRIBUTION: ATTRIBUTION,
ATTR_IDENTITY: self._discogs_data["user"],
}
def get_random_record(self):
"""Get a random record suggestion from the user's collection."""
# Index 0 in the folders is the 'All' folder
collection = self._discogs_data["folders"][0]
if collection.count > 0:
random_index = random.randrange(collection.count)
random_record = collection.releases[random_index].release
self._attrs = random_record.data
return f"{random_record.data['artists'][0]['name']} - {random_record.data['title']}"
return None
def update(self):
"""Set state to the amount of records in user's collection."""
if self._type == SENSOR_COLLECTION_TYPE:
self._state = self._discogs_data["collection_count"]
elif self._type == SENSOR_WANTLIST_TYPE:
self._state = self._discogs_data["wantlist_count"]
else:
self._state = self.get_random_record()
|
import pytest
from homeassistant.components.tibber.const import DOMAIN
from homeassistant.const import CONF_ACCESS_TOKEN
from tests.async_mock import AsyncMock, MagicMock, PropertyMock, patch
from tests.common import MockConfigEntry
@pytest.fixture(name="tibber_setup", autouse=True)
def tibber_setup_fixture():
"""Patch tibber setup entry."""
with patch("homeassistant.components.tibber.async_setup_entry", return_value=True):
yield
async def test_show_config_form(hass):
"""Test show configuration form."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
async def test_create_entry(hass):
"""Test create entry from user input."""
test_data = {
CONF_ACCESS_TOKEN: "valid",
}
unique_user_id = "unique_user_id"
title = "title"
tibber_mock = MagicMock()
type(tibber_mock).update_info = AsyncMock(return_value=True)
type(tibber_mock).user_id = PropertyMock(return_value=unique_user_id)
type(tibber_mock).name = PropertyMock(return_value=title)
with patch("tibber.Tibber", return_value=tibber_mock):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=test_data
)
assert result["type"] == "create_entry"
assert result["title"] == title
assert result["data"] == test_data
async def test_flow_entry_already_exists(hass):
"""Test user input for config_entry that already exists."""
first_entry = MockConfigEntry(
domain="tibber",
data={CONF_ACCESS_TOKEN: "valid"},
unique_id="tibber",
)
first_entry.add_to_hass(hass)
test_data = {
CONF_ACCESS_TOKEN: "valid",
}
with patch("tibber.Tibber.update_info", return_value=None):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=test_data
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
|
from __future__ import unicode_literals
from rules.BaseTrick import numjoinum
def NNrule(nums1, nums2):
for num1 in nums1:
for num2 in nums2:
for _ in numjoinum(num1, num2):
yield _
|
import os.path
import sys
import threading
import warnings
from contextlib import contextmanager
import vim # noqa
from ._compat import StringIO
DEBUG = int(vim.eval('g:pymode_debug'))
warnings.filterwarnings('ignore')
@contextmanager
def silence_stderr():
"""Redirect stderr."""
if DEBUG:
yield
else:
with threading.Lock():
stderr = sys.stderr
sys.stderr = StringIO()
yield
with threading.Lock():
sys.stderr = stderr
def patch_paths():
"""Patch python sys.path.
Load required modules from the plugin's sources.
"""
dir_script = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, os.path.join(dir_script, 'libs'))
if sys.platform == 'win32' or sys.platform == 'msys':
dir_submodule = os.path.abspath(os.path.join(dir_script,
'..', 'submodules'))
sub_modules = os.listdir(dir_submodule)
for module in sub_modules:
module_full_path = os.path.join(dir_submodule, module)
if module_full_path not in sys.path:
sys.path.insert(0, module_full_path)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import logging
import os
import posixpath
import time
from absl import flags
from perfkitbenchmarker import data
from perfkitbenchmarker import errors
from perfkitbenchmarker import linux_packages
from perfkitbenchmarker import os_types
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.linux_packages.ant import ANT_HOME_DIR
from six.moves import range
JNA_JAR_URL = ('https://maven.java.net/content/repositories/releases/'
'net/java/dev/jna/jna/4.1.0/jna-4.1.0.jar')
CASSANDRA_GIT_REPRO = 'https://github.com/apache/cassandra.git'
CASSANDRA_VERSION = 'cassandra-2.1'
CASSANDRA_YAML_TEMPLATE = 'cassandra/cassandra.yaml.j2'
CASSANDRA_ENV_TEMPLATE = 'cassandra/cassandra-env.sh.j2'
CASSANDRA_DIR = posixpath.join(linux_packages.INSTALL_DIR, 'cassandra')
CASSANDRA_PID = posixpath.join(CASSANDRA_DIR, 'cassandra.pid')
CASSANDRA_OUT = posixpath.join(CASSANDRA_DIR, 'cassandra.out')
CASSANDRA_ERR = posixpath.join(CASSANDRA_DIR, 'cassandra.err')
NODETOOL = posixpath.join(CASSANDRA_DIR, 'bin', 'nodetool')
# Number of times to attempt to start the cluster.
CLUSTER_START_TRIES = 10
CLUSTER_START_SLEEP = 60
# Time, in seconds, to sleep between node starts.
NODE_START_SLEEP = 5
# for setting a maven repo with --cassandra_maven_repo_url
_MAVEN_REPO_PARAMS = """
artifact.remoteRepository.central: {0}
artifact.remoteRepository.apache: {0}
"""
FLAGS = flags.FLAGS
flags.DEFINE_integer('cassandra_replication_factor', 3, 'Num of replicas.')
flags.DEFINE_integer('cassandra_concurrent_reads', 32,
'Concurrent read requests each server accepts.')
# Partial list of known mirrors:
# https://repo.maven.apache.org/maven2/.meta/repository-metadata.xml
# See instructions for setting up own mirror:
# https://maven.apache.org/guides/mini/guide-mirror-settings.html
flags.DEFINE_boolean('cassandra_maven_repo_url', None,
'Optional maven repo mirror to use.')
def CheckPrerequisites():
"""Verifies that the required resources are present.
Raises:
perfkitbenchmarker.data.ResourceNotFound: On missing resource.
"""
for resource in (CASSANDRA_YAML_TEMPLATE,
CASSANDRA_ENV_TEMPLATE):
data.ResourcePath(resource)
def _Install(vm):
"""Installs Cassandra from a tarball."""
vm.Install('ant')
vm.Install('build_tools')
vm.Install('openjdk')
vm.Install('curl')
vm.RemoteCommand('cd {0}; git clone {1}; cd {2}; git checkout {3}'.format(
linux_packages.INSTALL_DIR, CASSANDRA_GIT_REPRO, CASSANDRA_DIR,
CASSANDRA_VERSION))
if FLAGS.cassandra_maven_repo_url:
# sets maven repo properties in the build.properties
file_contents = _MAVEN_REPO_PARAMS.format(FLAGS.cassandra_maven_repo_url)
vm.RemoteCommand('echo "{}" > {}/build.properties'.format(
file_contents, CASSANDRA_DIR))
vm.RemoteCommand('cd {}; {}/bin/ant'.format(CASSANDRA_DIR, ANT_HOME_DIR))
# Add JNA
vm.RemoteCommand('cd {0} && curl -LJO {1}'.format(
posixpath.join(CASSANDRA_DIR, 'lib'),
JNA_JAR_URL))
def YumInstall(vm):
"""Installs Cassandra on the VM."""
_Install(vm)
def AptInstall(vm):
"""Installs Cassandra on the VM."""
_Install(vm)
def JujuInstall(vm, vm_group_name):
"""Installs the Cassandra charm on the VM."""
vm.JujuDeploy('cs:trusty/cassandra', vm_group_name)
# The charm defaults to Cassandra 2.2.x, which has deprecated
# cassandra-cli. Specify the sources to downgrade to Cassandra 2.1.x
# to match the cassandra benchmark(s) expectations.
sources = ['deb https://www.apache.org/dist/cassandra/debian 21x main',
'ppa:openjdk-r/ppa',
'ppa:stub/cassandra']
keys = ['F758CE318D77295D',
'null',
'null']
vm.JujuSet(
'cassandra',
[
# Allow authentication from all units
'authenticator=AllowAllAuthenticator',
'install_sources="[%s]"' %
', '.join(["'" + x + "'" for x in sources]),
'install_keys="[%s]"' % ', '.join(keys)
])
# Wait for cassandra to be installed and configured
vm.JujuWait()
for unit in vm.units:
# Make sure the cassandra/conf dir is created, since we're skipping
# the manual installation to /opt/pkb.
remote_path = posixpath.join(CASSANDRA_DIR, 'conf')
unit.RemoteCommand('mkdir -p %s' % remote_path)
def Configure(vm, seed_vms):
"""Configure Cassandra on 'vm'.
Args:
vm: VirtualMachine. The VM to configure.
seed_vms: List of VirtualMachine. The seed virtual machine(s).
"""
context = {'ip_address': vm.internal_ip,
'data_path': posixpath.join(vm.GetScratchDir(), 'cassandra'),
'seeds': ','.join(vm.internal_ip for vm in seed_vms),
'num_cpus': vm.NumCpusForBenchmark(),
'cluster_name': 'Test cluster',
'concurrent_reads': FLAGS.cassandra_concurrent_reads}
for config_file in [CASSANDRA_ENV_TEMPLATE, CASSANDRA_YAML_TEMPLATE]:
local_path = data.ResourcePath(config_file)
remote_path = posixpath.join(
CASSANDRA_DIR, 'conf',
os.path.splitext(os.path.basename(config_file))[0])
vm.RenderTemplate(local_path, remote_path, context=context)
def Start(vm):
"""Start Cassandra on a VM.
Args:
vm: The target vm. Should already be configured via 'Configure'.
"""
if vm.OS_TYPE == os_types.JUJU:
return
vm.RemoteCommand(
'nohup {0}/bin/cassandra -p "{1}" 1> {2} 2> {3} &'.format(
CASSANDRA_DIR, CASSANDRA_PID, CASSANDRA_OUT, CASSANDRA_ERR))
def Stop(vm):
"""Stops Cassandra on 'vm'."""
if vm.OS_TYPE == os_types.JUJU:
return
vm.RemoteCommand('kill $(cat {0})'.format(CASSANDRA_PID),
ignore_failure=True)
def IsRunning(vm):
"""Returns a boolean indicating whether Cassandra is running on 'vm'."""
cassandra_pid = vm.RemoteCommand(
'cat {0} || true'.format(CASSANDRA_PID))[0].strip()
if not cassandra_pid:
return False
try:
vm.RemoteCommand('kill -0 {0}'.format(cassandra_pid))
return True
except errors.VirtualMachine.RemoteCommandError:
logging.warn('%s: Cassandra is not running. '
'Startup STDOUT:\n%s\n\nSTDERR:\n%s',
vm,
vm.RemoteCommand('cat ' + CASSANDRA_OUT),
vm.RemoteCommand('cat ' + CASSANDRA_ERR))
return False
def CleanNode(vm):
"""Remove Cassandra data from 'vm'.
Args:
vm: VirtualMachine. VM to clean.
"""
if vm.OS_TYPE == os_types.JUJU:
return
data_path = posixpath.join(vm.GetScratchDir(), 'cassandra')
vm.RemoteCommand('rm -rf {0}'.format(data_path))
def _StartCassandraIfNotRunning(vm):
"""Starts Cassandra on 'vm' if not currently running."""
if not IsRunning(vm):
logging.info('Retrying starting cassandra on %s', vm)
Start(vm)
def GetCassandraCliPath(vm):
if vm.OS_TYPE == os_types.JUJU:
# Replace the stock CASSANDRA_CLI so that it uses the binary
# installed by the cassandra charm.
return '/usr/bin/cassandra-cli'
return posixpath.join(CASSANDRA_DIR, 'bin',
'cassandra-cli')
def GetCassandraStressPath(vm):
if vm.OS_TYPE == os_types.JUJU:
# Replace the stock CASSANDRA_STRESS so that it uses the binary
# installed by the cassandra-stress charm.
return '/usr/bin/cassandra-stress'
return posixpath.join(CASSANDRA_DIR, 'tools', 'bin',
'cassandra-stress')
def GetNumberOfNodesUp(vm):
"""Gets the number of VMs which are up in a Cassandra cluster.
Args:
vm: VirtualMachine. The VM to use to check the cluster status.
"""
vms_up = vm.RemoteCommand(
'{0} status | grep -c "^UN"'.format(NODETOOL))[0].strip()
return int(vms_up)
def StartCluster(seed_vm, vms):
"""Starts a Cassandra cluster.
Starts a Cassandra cluster, first starting 'seed_vm', then remaining VMs in
'vms'.
Args:
seed_vm: VirtualMachine. Machine which will function as the sole seed. It
will be started before all other VMs.
vms: list of VirtualMachines. VMs *other than* seed_vm which should be
started.
"""
if seed_vm.OS_TYPE == os_types.JUJU:
# Juju automatically configures and starts the Cassandra cluster.
return
vm_count = len(vms) + 1
# Cassandra setup
logging.info('Starting seed VM %s', seed_vm)
Start(seed_vm)
logging.info('Waiting %ds for seed to start', NODE_START_SLEEP)
time.sleep(NODE_START_SLEEP)
for i in range(5):
if not IsRunning(seed_vm):
logging.warn('Seed %s: Cassandra not running yet (try %d). Waiting %ds.',
seed_vm, i, NODE_START_SLEEP)
time.sleep(NODE_START_SLEEP)
else:
break
else:
raise ValueError('Cassandra failed to start on seed.')
if vms:
logging.info('Starting remaining %d nodes', len(vms))
# Start the VMs with a small pause in between each, to allow the node to
# join.
# Starting Cassandra nodes fails when multiple nodes attempt to join the
# cluster concurrently.
for i, vm in enumerate(vms):
time.sleep(NODE_START_SLEEP)
logging.info('Starting non-seed VM %d/%d.', i + 1, len(vms))
Start(vm)
logging.info('Waiting %ds for nodes to join', CLUSTER_START_SLEEP)
time.sleep(CLUSTER_START_SLEEP)
for i in range(CLUSTER_START_TRIES):
vms_up = GetNumberOfNodesUp(seed_vm)
if vms_up == vm_count:
logging.info('All %d nodes up!', vm_count)
break
logging.warn('Try %d: only %s of %s up. Restarting and sleeping %ds', i,
vms_up, vm_count, NODE_START_SLEEP)
vm_util.RunThreaded(_StartCassandraIfNotRunning, vms)
time.sleep(NODE_START_SLEEP)
else:
raise IOError('Failed to start Cassandra cluster.')
|
from __future__ import division
import itertools
import numpy as np
import chainer
from chainer.backends import cuda
import chainer.functions as F
from chainer.links import Convolution2D
from chainercv.links import Conv2DBNActiv
from chainercv import utils
from chainercv.links.model.yolo.yolo_base import YOLOBase
def _leaky_relu(x):
return F.leaky_relu(x, slope=0.1)
def _maxpool(x, ksize, stride=None):
if stride is None:
stride = ksize
h = F.max_pooling_2d(x, ksize, stride=stride, pad=ksize - stride)
if ksize > stride:
h = h[:, :, ksize - stride:, ksize - stride:]
return h
def _reorg(x):
n, c, h, w = x.shape
x = F.reshape(x, (n, c // 4, h, 2, w, 2))
x = F.transpose(x, (0, 3, 5, 1, 2, 4))
return F.reshape(x, (n, c * 4, h // 2, w // 2))
class YOLOv2Base(YOLOBase):
"""Base class for YOLOv2 and YOLOv2Tiny.
A subclass of this class should have :obj:`_extractor`,
:obj:`_models`, and :obj:`_anchors`.
"""
def __init__(self, n_fg_class=None, pretrained_model=None):
super(YOLOv2Base, self).__init__()
param, path = utils.prepare_pretrained_model(
{'n_fg_class': n_fg_class}, pretrained_model, self._models)
self.n_fg_class = param['n_fg_class']
self.use_preset('visualize')
with self.init_scope():
self.extractor = self._extractor()
self.subnet = Convolution2D(
len(self._anchors) * (4 + 1 + self.n_fg_class), 1)
default_bbox = []
for v, u in itertools.product(range(self.extractor.grid), repeat=2):
for h, w in self._anchors:
default_bbox.append((v, u, h, w))
self._default_bbox = np.array(default_bbox, dtype=np.float32)
if path:
chainer.serializers.load_npz(path, self, strict=False)
def to_cpu(self):
super(YOLOv2Base, self).to_cpu()
self._default_bbox = cuda.to_cpu(self._default_bbox)
def to_gpu(self, device=None):
super(YOLOv2Base, self).to_gpu(device)
self._default_bbox = cuda.to_gpu(self._default_bbox, device)
def forward(self, x):
"""Compute localization, objectness, and classification from a batch of images.
This method computes three variables, :obj:`locs`, :obj:`objs`,
and :obj:`confs`.
:meth:`self._decode` converts these variables to bounding box
coordinates and confidence scores.
These variables are also used in training YOLOv2.
Args:
x (chainer.Variable): A variable holding a batch of images.
Returns:
tuple of chainer.Variable:
This method returns three variables, :obj:`locs`,
:obj:`objs`, and :obj:`confs`.
* **locs**: A variable of float arrays of shape \
:math:`(B, K, 4)`, \
where :math:`B` is the number of samples in the batch and \
:math:`K` is the number of default bounding boxes.
* **objs**: A variable of float arrays of shape \
:math:`(B, K)`.
* **confs**: A variable of float arrays of shape \
:math:`(B, K, n\_fg\_class)`.
"""
h = self.subnet(self.extractor(x))
h = F.transpose(h, (0, 2, 3, 1))
h = F.reshape(h, (h.shape[0], -1, 4 + 1 + self.n_fg_class))
locs = h[:, :, :4]
objs = h[:, :, 4]
confs = h[:, :, 5:]
return locs, objs, confs
def _decode(self, loc, obj, conf):
raw_bbox = self._default_bbox.copy()
raw_bbox[:, :2] += 1 / (1 + self.xp.exp(-loc[:, :2]))
raw_bbox[:, 2:] *= self.xp.exp(loc[:, 2:])
raw_bbox[:, :2] -= raw_bbox[:, 2:] / 2
raw_bbox[:, 2:] += raw_bbox[:, :2]
raw_bbox *= self.insize / self.extractor.grid
obj = 1 / (1 + self.xp.exp(-obj))
conf = self.xp.exp(conf)
conf /= conf.sum(axis=1, keepdims=True)
raw_score = obj[:, None] * conf
bbox = []
label = []
score = []
for l in range(self.n_fg_class):
bbox_l = raw_bbox
score_l = raw_score[:, l]
mask = score_l >= self.score_thresh
bbox_l = bbox_l[mask]
score_l = score_l[mask]
indices = utils.non_maximum_suppression(
bbox_l, self.nms_thresh, score_l)
bbox_l = bbox_l[indices]
score_l = score_l[indices]
bbox.append(bbox_l)
label.append(self.xp.array((l,) * len(bbox_l)))
score.append(score_l)
bbox = self.xp.vstack(bbox).astype(np.float32)
label = self.xp.hstack(label).astype(np.int32)
score = self.xp.hstack(score).astype(np.float32)
return bbox, label, score
class Darknet19Extractor(chainer.ChainList):
"""A Darknet19 based feature extractor for YOLOv2.
This is a feature extractor for :class:`~chainercv.links.model.yolo.YOLOv2`
"""
insize = 416
grid = 13
def __init__(self):
super(Darknet19Extractor, self).__init__()
# Darknet19
for k, n_conv in enumerate((1, 1, 3, 3, 5, 5)):
for i in range(n_conv):
if i % 2 == 0:
self.append(
Conv2DBNActiv(32 << k, 3, pad=1, activ=_leaky_relu))
else:
self.append(
Conv2DBNActiv(32 << (k - 1), 1, activ=_leaky_relu))
# additional links
self.append(Conv2DBNActiv(1024, 3, pad=1, activ=_leaky_relu))
self.append(Conv2DBNActiv(1024, 3, pad=1, activ=_leaky_relu))
self.append(Conv2DBNActiv(64, 1, activ=_leaky_relu))
self.append(Conv2DBNActiv(1024, 3, pad=1, activ=_leaky_relu))
def forward(self, x):
"""Compute a feature map from a batch of images.
Args:
x (ndarray): An array holding a batch of images.
The images should be resized to :math:`416\\times 416`.
Returns:
Variable:
"""
h = x
for i, link in enumerate(self):
h = link(h)
if i == 12:
tmp = h
elif i == 19:
h, tmp = tmp, h
elif i == 20:
h = F.concat((_reorg(h), tmp))
if i in {0, 1, 4, 7, 12}:
h = _maxpool(h, 2)
return h
class YOLOv2(YOLOv2Base):
"""YOLOv2.
This is a model of YOLOv2 [#]_.
This model uses :class:`~chainercv.links.model.yolo.Darknet19Extractor` as
its feature extractor.
.. [#] Joseph Redmon, Ali Farhadi.
YOLO9000: Better, Faster, Stronger. CVPR 2017.
Args:
n_fg_class (int): The number of classes excluding the background.
pretrained_model (string): The weight file to be loaded.
This can take :obj:`'voc0712'`, `filepath` or :obj:`None`.
The default value is :obj:`None`.
* :obj:`'voc0712'`: Load weights trained on trainval split of \
PASCAL VOC 2007 and 2012. \
The weight file is downloaded and cached automatically. \
:obj:`n_fg_class` must be :obj:`20` or :obj:`None`. \
These weights were converted from the darknet model \
provided by `the original implementation \
<https://pjreddie.com/darknet/yolov2/>`_. \
The conversion code is \
`chainercv/examples/yolo/darknet2npz.py`.
* `filepath`: A path of npz file. In this case, :obj:`n_fg_class` \
must be specified properly.
* :obj:`None`: Do not load weights.
"""
_extractor = Darknet19Extractor
_models = {
'voc0712': {
'param': {'n_fg_class': 20},
'url': 'https://chainercv-models.preferred.jp/'
'yolo_v2_voc0712_converted_2018_05_03.npz',
'cv2': True
},
}
_anchors = (
(1.73145, 1.3221),
(4.00944, 3.19275),
(8.09892, 5.05587),
(4.84053, 9.47112),
(10.0071, 11.2364))
|
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.components.mqtt import valid_subscribe_topic
from .const import ( # pylint:disable=unused-import
CONF_DISCOVERY_PREFIX,
DEFAULT_PREFIX,
DOMAIN,
)
class FlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_PUSH
def __init__(self):
"""Initialize flow."""
self._prefix = DEFAULT_PREFIX
async def async_step_mqtt(self, discovery_info=None):
"""Handle a flow initialized by MQTT discovery."""
if self._async_in_progress() or self._async_current_entries():
return self.async_abort(reason="single_instance_allowed")
await self.async_set_unique_id(DOMAIN)
# Validate the topic, will throw if it fails
prefix = discovery_info.subscribed_topic
if prefix.endswith("/#"):
prefix = prefix[:-2]
try:
valid_subscribe_topic(f"{prefix}/#")
except vol.Invalid:
return self.async_abort(reason="invalid_discovery_info")
self._prefix = prefix
return await self.async_step_confirm()
async def async_step_user(self, user_input=None):
"""Handle a flow initialized by the user."""
if self._async_current_entries():
return self.async_abort(reason="single_instance_allowed")
if self.show_advanced_options:
return await self.async_step_config()
return await self.async_step_confirm()
async def async_step_config(self, user_input=None):
"""Confirm the setup."""
errors = {}
data = {CONF_DISCOVERY_PREFIX: self._prefix}
if user_input is not None:
bad_prefix = False
prefix = user_input[CONF_DISCOVERY_PREFIX]
if prefix.endswith("/#"):
prefix = prefix[:-2]
try:
valid_subscribe_topic(f"{prefix}/#")
except vol.Invalid:
errors["base"] = "invalid_discovery_topic"
bad_prefix = True
else:
data[CONF_DISCOVERY_PREFIX] = prefix
if not bad_prefix:
return self.async_create_entry(title="Tasmota", data=data)
fields = {}
fields[vol.Optional(CONF_DISCOVERY_PREFIX, default=self._prefix)] = str
return self.async_show_form(
step_id="config", data_schema=vol.Schema(fields), errors=errors
)
async def async_step_confirm(self, user_input=None):
"""Confirm the setup."""
data = {CONF_DISCOVERY_PREFIX: self._prefix}
if user_input is not None:
return self.async_create_entry(title="Tasmota", data=data)
return self.async_show_form(step_id="confirm")
|
import logging
from blebox_uniapi.error import Error, UnsupportedBoxVersion
from blebox_uniapi.products import Products
from blebox_uniapi.session import ApiHost
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_HOST, CONF_PORT
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import (
ADDRESS_ALREADY_CONFIGURED,
CANNOT_CONNECT,
DEFAULT_HOST,
DEFAULT_PORT,
DEFAULT_SETUP_TIMEOUT,
DOMAIN,
UNKNOWN,
UNSUPPORTED_VERSION,
)
_LOGGER = logging.getLogger(__name__)
def host_port(data):
"""Return a list with host and port."""
return (data[CONF_HOST], data[CONF_PORT])
def create_schema(previous_input=None):
"""Create a schema with given values as default."""
if previous_input is not None:
host, port = host_port(previous_input)
else:
host = DEFAULT_HOST
port = DEFAULT_PORT
return vol.Schema(
{
vol.Required(CONF_HOST, default=host): str,
vol.Required(CONF_PORT, default=port): int,
}
)
LOG_MSG = {
UNSUPPORTED_VERSION: "Outdated firmware",
CANNOT_CONNECT: "Failed to identify device",
UNKNOWN: "Unknown error while identifying device",
}
class BleBoxConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow for BleBox devices."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL
def __init__(self):
"""Initialize the BleBox config flow."""
self.device_config = {}
def handle_step_exception(
self, step, exception, schema, host, port, message_id, log_fn
):
"""Handle step exceptions."""
log_fn("%s at %s:%d (%s)", LOG_MSG[message_id], host, port, exception)
return self.async_show_form(
step_id="user",
data_schema=schema,
errors={"base": message_id},
description_placeholders={"address": f"{host}:{port}"},
)
async def async_step_user(self, user_input=None):
"""Handle initial user-triggered config step."""
hass = self.hass
schema = create_schema(user_input)
if user_input is None:
return self.async_show_form(
step_id="user",
data_schema=schema,
errors={},
description_placeholders={},
)
addr = host_port(user_input)
for entry in hass.config_entries.async_entries(DOMAIN):
if addr == host_port(entry.data):
host, port = addr
return self.async_abort(
reason=ADDRESS_ALREADY_CONFIGURED,
description_placeholders={"address": f"{host}:{port}"},
)
websession = async_get_clientsession(hass)
api_host = ApiHost(*addr, DEFAULT_SETUP_TIMEOUT, websession, hass.loop, _LOGGER)
try:
product = await Products.async_from_host(api_host)
except UnsupportedBoxVersion as ex:
return self.handle_step_exception(
"user", ex, schema, *addr, UNSUPPORTED_VERSION, _LOGGER.debug
)
except Error as ex:
return self.handle_step_exception(
"user", ex, schema, *addr, CANNOT_CONNECT, _LOGGER.warning
)
except RuntimeError as ex:
return self.handle_step_exception(
"user", ex, schema, *addr, UNKNOWN, _LOGGER.error
)
# Check if configured but IP changed since
await self.async_set_unique_id(product.unique_id)
self._abort_if_unique_id_configured()
return self.async_create_entry(title=product.name, data=user_input)
|
import unittest
import numpy as np
import PIL
from chainercv.transforms import resize_contain
from chainercv.utils import testing
@testing.parameterize(*testing.product_dict(
[
{'size': (48, 96), 'scaled_size': (32, 64),
'y_offset': 8, 'x_offset': 16},
{'size': (16, 68), 'scaled_size': (16, 32),
'y_offset': 0, 'x_offset': 18},
{'size': (24, 16), 'scaled_size': (8, 16),
'y_offset': 8, 'x_offset': 0},
{'size': (47, 97), 'scaled_size': (32, 64),
'y_offset': 7, 'x_offset': 16},
],
[
{'fill': 128},
{'fill': (104, 117, 123)},
{'fill': np.random.uniform(255, size=(3, 1, 1))},
],
[
{'interpolation': PIL.Image.NEAREST},
{'interpolation': PIL.Image.BILINEAR},
{'interpolation': PIL.Image.BICUBIC},
{'interpolation': PIL.Image.LANCZOS},
]
))
class TestResizeContain(unittest.TestCase):
def test_resize_contain(self):
H, W = 32, 64
img = np.random.uniform(255, size=(3, H, W))\
out, param = resize_contain(
img, self.size, fill=self.fill,
interpolation=self.interpolation,
return_param=True)
self.assertEqual(param['scaled_size'], self.scaled_size)
self.assertEqual(param['y_offset'], self.y_offset)
self.assertEqual(param['x_offset'], self.x_offset)
if self.scaled_size == (H, W):
np.testing.assert_array_equal(
out[:,
self.y_offset:self.y_offset + H,
self.x_offset:self.x_offset + W],
img)
if self.y_offset > 0 or self.x_offset > 0:
if isinstance(self.fill, int):
fill = (self.fill,) * 3
else:
fill = self.fill
np.testing.assert_array_equal(
out[:, 0, 0], np.array(fill).flatten())
testing.run_module(__name__, __file__)
|
import time
class PersistenceAutoSave(object):
def __init__(self, persistence, persistence_file, persistence_status, logger, interval, persistence_save_fn):
self.persistence = persistence
self.persistence_file = persistence_file
self.persistence_status = persistence_status
self.persistence_save_fn = persistence_save_fn
self.logger = logger
self.interval = interval
def watch(self):
# Run indefinitely until process is terminated
while True:
time.sleep(self.interval)
if self.persistence_status["changed"]:
self.logger.debug("State recently changed, writing to disk")
self.persistence_status["changed"] = False
self.persistence_save_fn(self.persistence_file)
|
import warnings
import functools
def unsafe(f, message=None):
"""
Decorator form for marking a function as unsafe.
This form may not get used much, but there are a few cases
we may want to add something unsafe generally, but safe in specific uses.
The warning can be suppressed in the safe context with warnings.catch_warnings
This should be used sparingly at most.
"""
def wrapper(func):
@functools.wraps(func)
def get_wrapped(*args, **kwargs):
actual_message = message or f"{func.__name__} is unsafe for use"
warnings.warn(actual_message, stacklevel=3, category=RuntimeWarning)
return func(*args, **kwargs)
return get_wrapped
return wrapper
def warn_unsafe(f, message=None):
"""
Function to mark function from dependencies as unsafe for use.
Warning: There is no check that a function has already been modified.
This form should only be used in init, if you want to mark an internal function
as unsafe, use the decorator form above.
The warning can be suppressed in safe contexts with warnings.catch_warnings
This should be used sparingly at most.
"""
def wrapper(func):
@functools.wraps(func)
def get_wrapped(*args, **kwargs):
actual_message = message or f"{func.__name__} is unsafe for use"
warnings.warn(actual_message, stacklevel=3, category=RuntimeWarning)
return func(*args, **kwargs)
return get_wrapped
return wrapper(f)
|
from typing import Any, Dict, Iterable, Optional
from homeassistant.core import Context, State
from homeassistant.helpers.state import async_reproduce_state
from homeassistant.helpers.typing import HomeAssistantType
from . import get_entity_ids
async def async_reproduce_states(
hass: HomeAssistantType,
states: Iterable[State],
*,
context: Optional[Context] = None,
reproduce_options: Optional[Dict[str, Any]] = None,
) -> None:
"""Reproduce component states."""
states_copy = []
for state in states:
members = get_entity_ids(hass, state.entity_id)
for member in members:
states_copy.append(
State(
member,
state.state,
state.attributes,
last_changed=state.last_changed,
last_updated=state.last_updated,
context=state.context,
)
)
await async_reproduce_state(
hass, states_copy, context=context, reproduce_options=reproduce_options
)
|
import boto3
from django.conf import settings
from weblate.machinery.base import MachineTranslation
class AWSTranslation(MachineTranslation):
"""AWS machine translation."""
name = "AWS"
max_score = 88
def __init__(self):
super().__init__()
self.client = boto3.client(
"translate",
region_name=settings.MT_AWS_REGION,
aws_access_key_id=settings.MT_AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.MT_AWS_SECRET_ACCESS_KEY,
)
def download_languages(self):
return (
"en",
"ar",
"zh",
"fr",
"de",
"pt",
"es",
"ja",
"ru",
"it",
"zh-TW",
"tr",
"cs",
)
def download_translations(
self,
source,
language,
text: str,
unit,
user,
search: bool,
threshold: int = 75,
):
response = self.client.translate_text(
Text=text, SourceLanguageCode=source, TargetLanguageCode=language
)
yield {
"text": response["TranslatedText"],
"quality": self.max_score,
"service": self.name,
"source": text,
}
|
import logging
from bimmer_connected.account import ConnectedDriveAccount
from bimmer_connected.country_selector import get_region_from_name
import voluptuous as vol
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from homeassistant.helpers import discovery
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import track_utc_time_change
import homeassistant.util.dt as dt_util
_LOGGER = logging.getLogger(__name__)
DOMAIN = "bmw_connected_drive"
CONF_REGION = "region"
CONF_READ_ONLY = "read_only"
ATTR_VIN = "vin"
ACCOUNT_SCHEMA = vol.Schema(
{
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_REGION): vol.Any("north_america", "china", "rest_of_world"),
vol.Optional(CONF_READ_ONLY, default=False): cv.boolean,
}
)
CONFIG_SCHEMA = vol.Schema({DOMAIN: {cv.string: ACCOUNT_SCHEMA}}, extra=vol.ALLOW_EXTRA)
SERVICE_SCHEMA = vol.Schema({vol.Required(ATTR_VIN): cv.string})
BMW_COMPONENTS = ["binary_sensor", "device_tracker", "lock", "notify", "sensor"]
UPDATE_INTERVAL = 5 # in minutes
SERVICE_UPDATE_STATE = "update_state"
_SERVICE_MAP = {
"light_flash": "trigger_remote_light_flash",
"sound_horn": "trigger_remote_horn",
"activate_air_conditioning": "trigger_remote_air_conditioning",
"find_vehicle": "trigger_remote_vehicle_finder",
}
def setup(hass, config: dict):
"""Set up the BMW connected drive components."""
accounts = []
for name, account_config in config[DOMAIN].items():
accounts.append(setup_account(account_config, hass, name))
hass.data[DOMAIN] = accounts
def _update_all(call) -> None:
"""Update all BMW accounts."""
for cd_account in hass.data[DOMAIN]:
cd_account.update()
# Service to manually trigger updates for all accounts.
hass.services.register(DOMAIN, SERVICE_UPDATE_STATE, _update_all)
_update_all(None)
for component in BMW_COMPONENTS:
discovery.load_platform(hass, component, DOMAIN, {}, config)
return True
def setup_account(account_config: dict, hass, name: str) -> "BMWConnectedDriveAccount":
"""Set up a new BMWConnectedDriveAccount based on the config."""
username = account_config[CONF_USERNAME]
password = account_config[CONF_PASSWORD]
region = account_config[CONF_REGION]
read_only = account_config[CONF_READ_ONLY]
_LOGGER.debug("Adding new account %s", name)
cd_account = BMWConnectedDriveAccount(username, password, region, name, read_only)
def execute_service(call):
"""Execute a service for a vehicle.
This must be a member function as we need access to the cd_account
object here.
"""
vin = call.data[ATTR_VIN]
vehicle = cd_account.account.get_vehicle(vin)
if not vehicle:
_LOGGER.error("Could not find a vehicle for VIN %s", vin)
return
function_name = _SERVICE_MAP[call.service]
function_call = getattr(vehicle.remote_services, function_name)
function_call()
if not read_only:
# register the remote services
for service in _SERVICE_MAP:
hass.services.register(
DOMAIN, service, execute_service, schema=SERVICE_SCHEMA
)
# update every UPDATE_INTERVAL minutes, starting now
# this should even out the load on the servers
now = dt_util.utcnow()
track_utc_time_change(
hass,
cd_account.update,
minute=range(now.minute % UPDATE_INTERVAL, 60, UPDATE_INTERVAL),
second=now.second,
)
return cd_account
class BMWConnectedDriveAccount:
"""Representation of a BMW vehicle."""
def __init__(
self, username: str, password: str, region_str: str, name: str, read_only
) -> None:
"""Initialize account."""
region = get_region_from_name(region_str)
self.read_only = read_only
self.account = ConnectedDriveAccount(username, password, region)
self.name = name
self._update_listeners = []
def update(self, *_):
"""Update the state of all vehicles.
Notify all listeners about the update.
"""
_LOGGER.debug(
"Updating vehicle state for account %s, notifying %d listeners",
self.name,
len(self._update_listeners),
)
try:
self.account.update_vehicle_states()
for listener in self._update_listeners:
listener()
except OSError as exception:
_LOGGER.error(
"Could not connect to the BMW Connected Drive portal. "
"The vehicle state could not be updated"
)
_LOGGER.exception(exception)
def add_update_listener(self, listener):
"""Add a listener for update notifications."""
self._update_listeners.append(listener)
|
import asyncio
from datetime import timedelta
import logging
from pytradfri import Gateway, RequestError
from pytradfri.api.aiocoap_api import APIFactory
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
from homeassistant.exceptions import ConfigEntryNotReady
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import async_track_time_interval
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
from homeassistant.util.json import load_json
from . import config_flow # noqa: F401
from .const import (
ATTR_TRADFRI_GATEWAY,
ATTR_TRADFRI_GATEWAY_MODEL,
ATTR_TRADFRI_MANUFACTURER,
CONF_ALLOW_TRADFRI_GROUPS,
CONF_GATEWAY_ID,
CONF_HOST,
CONF_IDENTITY,
CONF_IMPORT_GROUPS,
CONF_KEY,
CONFIG_FILE,
DEFAULT_ALLOW_TRADFRI_GROUPS,
DEVICES,
DOMAIN,
GROUPS,
KEY_API,
PLATFORMS,
)
_LOGGER = logging.getLogger(__name__)
FACTORY = "tradfri_factory"
LISTENERS = "tradfri_listeners"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Optional(CONF_HOST): cv.string,
vol.Optional(
CONF_ALLOW_TRADFRI_GROUPS, default=DEFAULT_ALLOW_TRADFRI_GROUPS
): cv.boolean,
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass: HomeAssistantType, config: ConfigType):
"""Set up the Tradfri component."""
conf = config.get(DOMAIN)
if conf is None:
return True
configured_hosts = [
entry.data.get("host") for entry in hass.config_entries.async_entries(DOMAIN)
]
legacy_hosts = await hass.async_add_executor_job(
load_json, hass.config.path(CONFIG_FILE)
)
for host, info in legacy_hosts.items():
if host in configured_hosts:
continue
info[CONF_HOST] = host
info[CONF_IMPORT_GROUPS] = conf[CONF_ALLOW_TRADFRI_GROUPS]
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=info
)
)
host = conf.get(CONF_HOST)
import_groups = conf[CONF_ALLOW_TRADFRI_GROUPS]
if host is None or host in configured_hosts or host in legacy_hosts:
return True
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={CONF_HOST: host, CONF_IMPORT_GROUPS: import_groups},
)
)
return True
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry):
"""Create a gateway."""
# host, identity, key, allow_tradfri_groups
tradfri_data = hass.data.setdefault(DOMAIN, {})[entry.entry_id] = {}
listeners = tradfri_data[LISTENERS] = []
factory = await APIFactory.init(
entry.data[CONF_HOST],
psk_id=entry.data[CONF_IDENTITY],
psk=entry.data[CONF_KEY],
)
async def on_hass_stop(event):
"""Close connection when hass stops."""
await factory.shutdown()
listeners.append(hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, on_hass_stop))
api = factory.request
gateway = Gateway()
try:
gateway_info = await api(gateway.get_gateway_info())
devices_commands = await api(gateway.get_devices())
devices = await api(devices_commands)
groups_commands = await api(gateway.get_groups())
groups = await api(groups_commands)
except RequestError as err:
await factory.shutdown()
raise ConfigEntryNotReady from err
tradfri_data[KEY_API] = api
tradfri_data[FACTORY] = factory
tradfri_data[DEVICES] = devices
tradfri_data[GROUPS] = groups
dev_reg = await hass.helpers.device_registry.async_get_registry()
dev_reg.async_get_or_create(
config_entry_id=entry.entry_id,
connections=set(),
identifiers={(DOMAIN, entry.data[CONF_GATEWAY_ID])},
manufacturer=ATTR_TRADFRI_MANUFACTURER,
name=ATTR_TRADFRI_GATEWAY,
# They just have 1 gateway model. Type is not exposed yet.
model=ATTR_TRADFRI_GATEWAY_MODEL,
sw_version=gateway_info.firmware_version,
)
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
async def async_keep_alive(now):
if hass.is_stopping:
return
try:
await api(gateway.get_gateway_info())
except RequestError:
_LOGGER.error("Keep-alive failed")
listeners.append(
async_track_time_interval(hass, async_keep_alive, timedelta(seconds=60))
)
return True
async def async_unload_entry(hass: HomeAssistantType, entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
if unload_ok:
tradfri_data = hass.data[DOMAIN].pop(entry.entry_id)
factory = tradfri_data[FACTORY]
await factory.shutdown()
# unsubscribe listeners
for listener in tradfri_data[LISTENERS]:
listener()
return unload_ok
|
import os.path as op
import numpy as np
from numpy.testing import assert_allclose
import pytest
from mne import pick_types
from mne.io import read_raw_fif
from mne.datasets import testing
from mne.io.tag import _loc_to_coil_trans
from mne.preprocessing import (read_fine_calibration, write_fine_calibration,
compute_fine_calibration, maxwell_filter)
from mne.preprocessing.tests.test_maxwell import _assert_shielding
from mne.transforms import rot_to_quat, _angle_between_quats
from mne.utils import object_diff
# Define fine calibration filepaths
data_path = testing.data_path(download=False)
fine_cal_fname = op.join(data_path, 'SSS', 'sss_cal_3053.dat')
fine_cal_fname_3d = op.join(data_path, 'SSS', 'sss_cal_3053_3d.dat')
erm_fname = op.join(data_path, 'SSS', '141027_cropped_90Hz_raw.fif')
ctc = op.join(data_path, 'SSS', 'ct_sparse.fif')
cal_mf_fname = op.join(data_path, 'SSS', '141027.dat')
@pytest.mark.parametrize('fname', (cal_mf_fname, fine_cal_fname,
fine_cal_fname_3d))
@testing.requires_testing_data
def test_fine_cal_io(tmpdir, fname):
"""Test round trip reading/writing of fine calibration .dat file."""
temp_fname = op.join(str(tmpdir), 'fine_cal_temp.dat')
# Load fine calibration file
fine_cal_dict = read_fine_calibration(fname)
# Save temp version of fine calibration file
write_fine_calibration(temp_fname, fine_cal_dict)
fine_cal_dict_reload = read_fine_calibration(temp_fname)
# Load temp version of fine calibration file and compare hashes
assert object_diff(fine_cal_dict, fine_cal_dict_reload) == ''
@pytest.mark.slowtest
@testing.requires_testing_data
def test_compute_fine_cal():
"""Test computing fine calibration coefficients."""
raw = read_raw_fif(erm_fname)
want_cal = read_fine_calibration(cal_mf_fname)
got_cal, counts = compute_fine_calibration(
raw, cross_talk=ctc, n_imbalance=1, verbose='debug')
assert counts == 1
assert set(got_cal.keys()) == set(want_cal.keys())
assert got_cal['ch_names'] == want_cal['ch_names']
# in practice these should never be exactly 1.
assert sum([(ic == 1.).any() for ic in want_cal['imb_cals']]) == 0
assert sum([(ic == 1.).any() for ic in got_cal['imb_cals']]) == 0
got_imb = np.array(got_cal['imb_cals'], float)
want_imb = np.array(want_cal['imb_cals'], float)
assert got_imb.shape == want_imb.shape == (306, 1)
got_imb, want_imb = got_imb[:, 0], want_imb[:, 0]
orig_locs = np.array([ch['loc'] for ch in raw.info['chs'][:306]])
want_locs = want_cal['locs']
got_locs = got_cal['locs']
assert want_locs.shape == got_locs.shape
orig_trans = _loc_to_coil_trans(orig_locs)
want_trans = _loc_to_coil_trans(want_locs)
got_trans = _loc_to_coil_trans(got_locs)
dist = np.linalg.norm(got_trans[:, :3, 3] - want_trans[:, :3, 3], axis=1)
assert_allclose(dist, 0., atol=1e-6)
dist = np.linalg.norm(got_trans[:, :3, 3] - orig_trans[:, :3, 3], axis=1)
assert_allclose(dist, 0., atol=1e-6)
orig_quat = rot_to_quat(orig_trans[:, :3, :3])
want_quat = rot_to_quat(want_trans[:, :3, :3])
got_quat = rot_to_quat(got_trans[:, :3, :3])
want_orig_angles = np.rad2deg(_angle_between_quats(want_quat, orig_quat))
got_want_angles = np.rad2deg(_angle_between_quats(got_quat, want_quat))
got_orig_angles = np.rad2deg(_angle_between_quats(got_quat, orig_quat))
for key in ('mag', 'grad'):
# imb_cals value
p = pick_types(raw.info, meg=key, exclude=())
r2 = np.dot(got_imb[p], want_imb[p]) / (
np.linalg.norm(want_imb[p]) * np.linalg.norm(got_imb[p]))
assert 0.99 < r2 <= 1.00001, f'{key}: {r2:0.3f}'
# rotation angles
want_orig_max_angle = want_orig_angles[p].max()
got_orig_max_angle = got_orig_angles[p].max()
got_want_max_angle = got_want_angles[p].max()
if key == 'mag':
assert 8 < want_orig_max_angle < 11, want_orig_max_angle
assert 1 < got_orig_max_angle < 2, got_orig_max_angle
assert 9 < got_want_max_angle < 11, got_want_max_angle
else:
# Some of these angles are large, but mostly this has to do with
# processing a very short (one 10-sec segment), downsampled (90 Hz)
# file
assert 66 < want_orig_max_angle < 68, want_orig_max_angle
assert 67 < got_orig_max_angle < 107, got_orig_max_angle
assert 53 < got_want_max_angle < 60, got_want_max_angle
kwargs = dict(bad_condition='warning', cross_talk=ctc, coord_frame='meg')
raw_sss = maxwell_filter(raw, **kwargs)
raw_sss_mf = maxwell_filter(raw, calibration=cal_mf_fname, **kwargs)
raw_sss_py = maxwell_filter(raw, calibration=got_cal, **kwargs)
_assert_shielding(raw_sss, raw, 26, 27)
_assert_shielding(raw_sss_mf, raw, 61, 63)
_assert_shielding(raw_sss_py, raw, 61, 63)
# redoing with given mag data should yield same result
got_cal_redo, _ = compute_fine_calibration(
raw, cross_talk=ctc, n_imbalance=1, calibration=got_cal,
verbose='debug')
assert got_cal['ch_names'] == got_cal_redo['ch_names']
assert_allclose(got_cal['imb_cals'], got_cal_redo['imb_cals'], atol=5e-5)
assert_allclose(got_cal['locs'], got_cal_redo['locs'], atol=1e-6)
assert sum([(ic == 1.).any() for ic in got_cal['imb_cals']]) == 0
# redoing with 3 imlabance parameters should improve the shielding factor
grad_picks = pick_types(raw.info, meg='grad')
assert len(grad_picks) == 204 and grad_picks[0] == 0
got_grad_imbs = np.array(
[got_cal['imb_cals'][pick] for pick in grad_picks])
assert got_grad_imbs.shape == (204, 1)
got_cal_3, _ = compute_fine_calibration(
raw, cross_talk=ctc, n_imbalance=3, calibration=got_cal,
verbose='debug')
got_grad_3_imbs = np.array([
got_cal_3['imb_cals'][pick] for pick in grad_picks])
assert got_grad_3_imbs.shape == (204, 3)
corr = np.corrcoef(got_grad_3_imbs[:, 0], got_grad_imbs[:, 0])[0, 1]
assert 0.6 < corr < 0.7
raw_sss_py = maxwell_filter(raw, calibration=got_cal_3, **kwargs)
_assert_shielding(raw_sss_py, raw, 68, 70)
|
from homeassistant.components.water_heater import (
ATTR_AWAY_MODE,
ATTR_OPERATION_MODE,
ATTR_TEMPERATURE,
SERVICE_SET_AWAY_MODE,
SERVICE_SET_OPERATION_MODE,
SERVICE_SET_TEMPERATURE,
STATE_ECO,
STATE_GAS,
)
from homeassistant.const import SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON
from homeassistant.core import State
from tests.common import async_mock_service
async def test_reproducing_states(hass, caplog):
"""Test reproducing Water heater states."""
hass.states.async_set("water_heater.entity_off", STATE_OFF, {})
hass.states.async_set("water_heater.entity_on", STATE_ON, {ATTR_TEMPERATURE: 45})
hass.states.async_set("water_heater.entity_away", STATE_ON, {ATTR_AWAY_MODE: True})
hass.states.async_set("water_heater.entity_gas", STATE_GAS, {})
hass.states.async_set(
"water_heater.entity_all",
STATE_ECO,
{ATTR_AWAY_MODE: True, ATTR_TEMPERATURE: 45},
)
turn_on_calls = async_mock_service(hass, "water_heater", SERVICE_TURN_ON)
turn_off_calls = async_mock_service(hass, "water_heater", SERVICE_TURN_OFF)
set_op_calls = async_mock_service(hass, "water_heater", SERVICE_SET_OPERATION_MODE)
set_temp_calls = async_mock_service(hass, "water_heater", SERVICE_SET_TEMPERATURE)
set_away_calls = async_mock_service(hass, "water_heater", SERVICE_SET_AWAY_MODE)
# These calls should do nothing as entities already in desired state
await hass.helpers.state.async_reproduce_state(
[
State("water_heater.entity_off", STATE_OFF),
State("water_heater.entity_on", STATE_ON, {ATTR_TEMPERATURE: 45}),
State("water_heater.entity_away", STATE_ON, {ATTR_AWAY_MODE: True}),
State("water_heater.entity_gas", STATE_GAS, {}),
State(
"water_heater.entity_all",
STATE_ECO,
{ATTR_AWAY_MODE: True, ATTR_TEMPERATURE: 45},
),
]
)
assert len(turn_on_calls) == 0
assert len(turn_off_calls) == 0
assert len(set_op_calls) == 0
assert len(set_temp_calls) == 0
assert len(set_away_calls) == 0
# Test invalid state is handled
await hass.helpers.state.async_reproduce_state(
[State("water_heater.entity_off", "not_supported")]
)
assert "not_supported" in caplog.text
assert len(turn_on_calls) == 0
assert len(turn_off_calls) == 0
assert len(set_op_calls) == 0
assert len(set_temp_calls) == 0
assert len(set_away_calls) == 0
# Make sure correct services are called
await hass.helpers.state.async_reproduce_state(
[
State("water_heater.entity_on", STATE_OFF),
State("water_heater.entity_off", STATE_ON, {ATTR_TEMPERATURE: 45}),
State("water_heater.entity_all", STATE_ECO, {ATTR_AWAY_MODE: False}),
State("water_heater.entity_away", STATE_GAS, {}),
State(
"water_heater.entity_gas",
STATE_ECO,
{ATTR_AWAY_MODE: True, ATTR_TEMPERATURE: 45},
),
# Should not raise
State("water_heater.non_existing", "on"),
],
)
assert len(turn_on_calls) == 1
assert turn_on_calls[0].domain == "water_heater"
assert turn_on_calls[0].data == {"entity_id": "water_heater.entity_off"}
assert len(turn_off_calls) == 1
assert turn_off_calls[0].domain == "water_heater"
assert turn_off_calls[0].data == {"entity_id": "water_heater.entity_on"}
valid_op_calls = [
{"entity_id": "water_heater.entity_away", ATTR_OPERATION_MODE: STATE_GAS},
{"entity_id": "water_heater.entity_gas", ATTR_OPERATION_MODE: STATE_ECO},
]
assert len(set_op_calls) == 2
for call in set_op_calls:
assert call.domain == "water_heater"
assert call.data in valid_op_calls
valid_op_calls.remove(call.data)
valid_temp_calls = [
{"entity_id": "water_heater.entity_off", ATTR_TEMPERATURE: 45},
{"entity_id": "water_heater.entity_gas", ATTR_TEMPERATURE: 45},
]
assert len(set_temp_calls) == 2
for call in set_temp_calls:
assert call.domain == "water_heater"
assert call.data in valid_temp_calls
valid_temp_calls.remove(call.data)
valid_away_calls = [
{"entity_id": "water_heater.entity_all", ATTR_AWAY_MODE: False},
{"entity_id": "water_heater.entity_gas", ATTR_AWAY_MODE: True},
]
assert len(set_away_calls) == 2
for call in set_away_calls:
assert call.domain == "water_heater"
assert call.data in valid_away_calls
valid_away_calls.remove(call.data)
|
from functools import partial
from itertools import product
import pytest
from numpy.testing import (assert_array_almost_equal, assert_allclose,
assert_array_less)
import numpy as np
import scipy.stats
import mne
from mne.stats.parametric import (f_mway_rm, f_threshold_mway_rm,
_map_effects)
# hardcoded external test results, manually transferred
test_external = {
# SPSS, manually conducted analysis
'spss_fvals': np.array([2.568, 0.240, 1.756]),
'spss_pvals_uncorrected': np.array([0.126, 0.788, 0.186]),
'spss_pvals_corrected': np.array([0.126, 0.784, 0.192]),
# R 15.2
# data generated using this code http://goo.gl/7UcKb
'r_fvals': np.array([2.567619, 0.24006, 1.756380]),
'r_pvals_uncorrected': np.array([0.12557, 0.78776, 0.1864]),
# and https://gist.github.com/dengemann/5539403
'r_fvals_3way': np.array([
0.74783999999999995, # A
0.20895, # B
0.21378, # A:B
0.99404000000000003, # C
0.094039999999999999, # A:C
0.11685, # B:C
2.78749]), # A:B:C
'r_fvals_1way': np.array([0.67571999999999999])
}
def generate_data(n_subjects, n_conditions):
"""Generate testing data."""
rng = np.random.RandomState(42)
data = rng.randn(n_subjects * n_conditions).reshape(
n_subjects, n_conditions)
return data
def test_map_effects():
"""Test ANOVA effects parsing."""
selection, names = _map_effects(n_factors=2, effects='A')
assert names == ['A']
selection, names = _map_effects(n_factors=2, effects=['A', 'A:B'])
assert names == ['A', 'A:B']
selection, names = _map_effects(n_factors=3, effects='A*B')
assert names == ['A', 'B', 'A:B']
# XXX this might be wrong?
selection, names = _map_effects(n_factors=3, effects='A*C')
assert names == ['A', 'B', 'A:B', 'C', 'A:C']
pytest.raises(ValueError, _map_effects, n_factors=2, effects='C')
pytest.raises(ValueError, _map_effects, n_factors=27, effects='all')
def test_f_twoway_rm():
"""Test 2-way anova."""
rng = np.random.RandomState(42)
iter_params = product([4, 10], [2, 15], [4, 6, 8],
['A', 'B', 'A:B'],
[False, True])
_effects = {
4: [2, 2],
6: [2, 3],
8: [2, 4]
}
for params in iter_params:
n_subj, n_obs, n_levels, effects, correction = params
data = rng.random_sample([n_subj, n_levels, n_obs])
fvals, pvals = f_mway_rm(data, _effects[n_levels], effects,
correction=correction)
assert (fvals >= 0).all()
if pvals.any():
assert ((0 <= pvals) & (1 >= pvals)).all()
n_effects = len(_map_effects(n_subj, effects)[0])
assert fvals.size == n_obs * n_effects
if n_effects == 1: # test for principle of least surprise ...
assert fvals.ndim == 1
fvals_ = f_threshold_mway_rm(n_subj, _effects[n_levels], effects)
assert (fvals_ >= 0).all()
assert fvals_.size == n_effects
data = rng.random_sample([n_subj, n_levels, 1])
pytest.raises(ValueError, f_mway_rm, data, _effects[n_levels],
effects='C', correction=correction)
data = rng.random_sample([n_subj, n_levels, n_obs, 3])
# check for dimension handling
f_mway_rm(data, _effects[n_levels], effects, correction=correction)
# now check against external software results
test_data = generate_data(n_subjects=20, n_conditions=6)
fvals, pvals = f_mway_rm(test_data, [2, 3])
assert_array_almost_equal(fvals, test_external['spss_fvals'], 3)
assert_array_almost_equal(pvals, test_external['spss_pvals_uncorrected'],
3)
assert_array_almost_equal(fvals, test_external['r_fvals'], 4)
assert_array_almost_equal(pvals, test_external['r_pvals_uncorrected'], 3)
_, pvals = f_mway_rm(test_data, [2, 3], correction=True)
assert_array_almost_equal(pvals, test_external['spss_pvals_corrected'], 3)
test_data = generate_data(n_subjects=20, n_conditions=8)
fvals, _ = f_mway_rm(test_data, [2, 2, 2])
assert_array_almost_equal(fvals, test_external['r_fvals_3way'], 5)
fvals, _ = f_mway_rm(test_data, [8], 'A')
assert_array_almost_equal(fvals, test_external['r_fvals_1way'], 5)
@pytest.mark.parametrize('kind, kwargs', [
('1samp', {}),
('ind', {}), # equal_var=True is the default
('ind', dict(equal_var=True)),
('ind', dict(equal_var=False)),
])
@pytest.mark.parametrize('sigma', (0., 1e-3,))
@pytest.mark.parametrize('seed', [0, 42, 1337])
def test_ttest_equiv(kind, kwargs, sigma, seed):
"""Test t-test equivalence."""
rng = np.random.RandomState(seed)
def theirs(*a, **kw):
f = getattr(scipy.stats, 'ttest_%s' % (kind,))
if kind == '1samp':
func = partial(f, popmean=0, **kwargs)
else:
func = partial(f, **kwargs)
return func(*a, **kw)[0]
ours = partial(getattr(mne.stats, 'ttest_%s_no_p' % (kind,)),
sigma=sigma, **kwargs)
X = rng.randn(3, 4, 5)
if kind == 'ind':
X = [X, rng.randn(30, 4, 5)] # should differ based on equal_var
got = ours(*X)
want = theirs(*X)
else:
got = ours(X)
want = theirs(X)
if sigma == 0.:
assert_allclose(got, want, rtol=1e-7, atol=1e-6)
else:
assert not np.allclose(got, want, rtol=1e-7, atol=1e-6)
# should mostly be similar, but uniformly smaller because we add
# something to the divisor (var)
assert_allclose(got, want, rtol=2e-1, atol=1e-2)
assert_array_less(np.abs(got), np.abs(want))
|
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
from homeassistant.core import callback
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC
from .config_flow import get_controller_id_from_config_entry
from .const import (
ATTR_MANUFACTURER,
DOMAIN as UNIFI_DOMAIN,
LOGGER,
UNIFI_WIRELESS_CLIENTS,
)
from .controller import UniFiController
SAVE_DELAY = 10
STORAGE_KEY = "unifi_data"
STORAGE_VERSION = 1
async def async_setup(hass, config):
"""Component doesn't support configuration through configuration.yaml."""
hass.data[UNIFI_WIRELESS_CLIENTS] = wireless_clients = UnifiWirelessClients(hass)
await wireless_clients.async_load()
return True
async def async_setup_entry(hass, config_entry):
"""Set up the UniFi component."""
hass.data.setdefault(UNIFI_DOMAIN, {})
controller = UniFiController(hass, config_entry)
if not await controller.async_setup():
return False
hass.data[UNIFI_DOMAIN][config_entry.entry_id] = controller
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, controller.shutdown)
LOGGER.debug("UniFi config options %s", config_entry.options)
if controller.mac is None:
return True
device_registry = await hass.helpers.device_registry.async_get_registry()
device_registry.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(CONNECTION_NETWORK_MAC, controller.mac)},
default_manufacturer=ATTR_MANUFACTURER,
default_model="UniFi Controller",
default_name="UniFi Controller",
)
return True
async def async_unload_entry(hass, config_entry):
"""Unload a config entry."""
controller = hass.data[UNIFI_DOMAIN].pop(config_entry.entry_id)
return await controller.async_reset()
class UnifiWirelessClients:
"""Class to store clients known to be wireless.
This is needed since wireless devices going offline might get marked as wired by UniFi.
"""
def __init__(self, hass):
"""Set up client storage."""
self.hass = hass
self.data = {}
self._store = hass.helpers.storage.Store(STORAGE_VERSION, STORAGE_KEY)
async def async_load(self):
"""Load data from file."""
data = await self._store.async_load()
if data is not None:
self.data = data
@callback
def get_data(self, config_entry):
"""Get data related to a specific controller."""
controller_id = get_controller_id_from_config_entry(config_entry)
key = config_entry.entry_id
if controller_id in self.data:
key = controller_id
data = self.data.get(key, {"wireless_devices": []})
return set(data["wireless_devices"])
@callback
def update_data(self, data, config_entry):
"""Update data and schedule to save to file."""
controller_id = get_controller_id_from_config_entry(config_entry)
if controller_id in self.data:
self.data.pop(controller_id)
self.data[config_entry.entry_id] = {"wireless_devices": list(data)}
self._store.async_delay_save(self._data_to_save, SAVE_DELAY)
@callback
def _data_to_save(self):
"""Return data of UniFi wireless clients to store in a file."""
return self.data
|
import logging
from aiohttp import ContentTypeError
from requests.exceptions import ConnectTimeout, HTTPError
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, EVENT_HOMEASSISTANT_STOP
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
import homeassistant.helpers.config_validation as cv
from .const import DOMAIN
from .utils import load_plum
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
}
)
},
extra=vol.ALLOW_EXTRA,
)
PLATFORMS = ["light"]
async def async_setup(hass: HomeAssistant, config: dict):
"""Plum Lightpad Platform initialization."""
if DOMAIN not in config:
return True
conf = config[DOMAIN]
_LOGGER.info("Found Plum Lightpad configuration in config, importing...")
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=conf
)
)
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Set up Plum Lightpad from a config entry."""
_LOGGER.debug("Setting up config entry with ID = %s", entry.unique_id)
username = entry.data.get(CONF_USERNAME)
password = entry.data.get(CONF_PASSWORD)
try:
plum = await load_plum(username, password, hass)
except ContentTypeError as ex:
_LOGGER.error("Unable to authenticate to Plum cloud: %s", ex)
return False
except (ConnectTimeout, HTTPError) as ex:
_LOGGER.error("Unable to connect to Plum cloud: %s", ex)
raise ConfigEntryNotReady from ex
hass.data.setdefault(DOMAIN, {})
hass.data[DOMAIN][entry.entry_id] = plum
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
def cleanup(event):
"""Clean up resources."""
plum.cleanup()
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, cleanup)
return True
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from sqs import SqsCollector
###############################################################################
class TestSqsCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('SqsCollector', {
})
self.collector = SqsCollector(config, None)
def test_import(self):
self.assertTrue(SqsCollector)
###############################################################################
if __name__ == "__main__":
unittest.main()
|
from os import path
from collections import OrderedDict
import numpy as np
from .io.meas_info import Info
from .io.pick import _pick_data_channels, pick_types
from .utils import logger, verbose, _get_stim_channel
_SELECTIONS = ['Vertex', 'Left-temporal', 'Right-temporal', 'Left-parietal',
'Right-parietal', 'Left-occipital', 'Right-occipital',
'Left-frontal', 'Right-frontal']
_EEG_SELECTIONS = ['EEG 1-32', 'EEG 33-64', 'EEG 65-96', 'EEG 97-128']
@verbose
def read_selection(name, fname=None, info=None, verbose=None):
"""Read channel selection from file.
By default, the selections used in ``mne_browse_raw`` are supported.
Additional selections can be added by specifying a selection file (e.g.
produced using ``mne_browse_raw``) using the ``fname`` parameter.
The ``name`` parameter can be a string or a list of string. The returned
selection will be the combination of all selections in the file where
(at least) one element in name is a substring of the selection name in
the file. For example, ``name=['temporal', 'Right-frontal']`` will produce
a combination of ``'Left-temporal'``, ``'Right-temporal'``, and
``'Right-frontal'``.
The included selections are:
* ``'Vertex'``
* ``'Left-temporal'``
* ``'Right-temporal'``
* ``'Left-parietal'``
* ``'Right-parietal'``
* ``'Left-occipital'``
* ``'Right-occipital'``
* ``'Left-frontal'``
* ``'Right-frontal'``
Parameters
----------
name : str or list of str
Name of the selection. If is a list, the selections are combined.
fname : str
Filename of the selection file (if None, built-in selections are used).
info : instance of Info
Measurement info file, which will be used to determine the spacing
of channel names to return, e.g. ``'MEG 0111'`` for old Neuromag
systems and ``'MEG0111'`` for new ones.
%(verbose)s
Returns
-------
sel : list of string
List with channel names in the selection.
"""
# convert name to list of string
if not isinstance(name, (list, tuple)):
name = [name]
if isinstance(info, Info):
picks = pick_types(info, meg=True, exclude=())
if len(picks) > 0 and ' ' not in info['ch_names'][picks[0]]:
spacing = 'new'
else:
spacing = 'old'
elif info is not None:
raise TypeError('info must be an instance of Info or None, not %s'
% (type(info),))
else: # info is None
spacing = 'old'
# use built-in selections by default
if fname is None:
fname = path.join(path.dirname(__file__), 'data', 'mne_analyze.sel')
if not path.isfile(fname):
raise ValueError('The file %s does not exist.' % fname)
# use this to make sure we find at least one match for each name
name_found = {n: False for n in name}
with open(fname, 'r') as fid:
sel = []
for line in fid:
line = line.strip()
# skip blank lines and comments
if len(line) == 0 or line[0] == '#':
continue
# get the name of the selection in the file
pos = line.find(':')
if pos < 0:
logger.info('":" delimiter not found in selections file, '
'skipping line')
continue
sel_name_file = line[:pos]
# search for substring match with name provided
for n in name:
if sel_name_file.find(n) >= 0:
sel.extend(line[pos + 1:].split('|'))
name_found[n] = True
break
# make sure we found at least one match for each name
for n, found in name_found.items():
if not found:
raise ValueError('No match for selection name "%s" found' % n)
# make the selection a sorted list with unique elements
sel = list(set(sel))
sel.sort()
if spacing == 'new': # "new" or "old" by now, "old" is default
sel = [s.replace('MEG ', 'MEG') for s in sel]
return sel
def _divide_to_regions(info, add_stim=True):
"""Divide channels to regions by positions."""
from scipy.stats import zscore
picks = _pick_data_channels(info, exclude=[])
chs_in_lobe = len(picks) // 4
pos = np.array([ch['loc'][:3] for ch in info['chs']])
x, y, z = pos.T
frontal = picks[np.argsort(y[picks])[-chs_in_lobe:]]
picks = np.setdiff1d(picks, frontal)
occipital = picks[np.argsort(y[picks])[:chs_in_lobe]]
picks = np.setdiff1d(picks, occipital)
temporal = picks[np.argsort(z[picks])[:chs_in_lobe]]
picks = np.setdiff1d(picks, temporal)
lt, rt = _divide_side(temporal, x)
lf, rf = _divide_side(frontal, x)
lo, ro = _divide_side(occipital, x)
lp, rp = _divide_side(picks, x) # Parietal lobe from the remaining picks.
# Because of the way the sides are divided, there may be outliers in the
# temporal lobes. Here we switch the sides for these outliers. For other
# lobes it is not a big problem because of the vicinity of the lobes.
with np.errstate(invalid='ignore'): # invalid division, greater compare
zs = np.abs(zscore(x[rt]))
outliers = np.array(rt)[np.where(zs > 2.)[0]]
rt = list(np.setdiff1d(rt, outliers))
with np.errstate(invalid='ignore'): # invalid division, greater compare
zs = np.abs(zscore(x[lt]))
outliers = np.append(outliers, (np.array(lt)[np.where(zs > 2.)[0]]))
lt = list(np.setdiff1d(lt, outliers))
l_mean = np.mean(x[lt])
r_mean = np.mean(x[rt])
for outlier in outliers:
if abs(l_mean - x[outlier]) < abs(r_mean - x[outlier]):
lt.append(outlier)
else:
rt.append(outlier)
if add_stim:
stim_ch = _get_stim_channel(None, info, raise_error=False)
if len(stim_ch) > 0:
for region in [lf, rf, lo, ro, lp, rp, lt, rt]:
region.append(info['ch_names'].index(stim_ch[0]))
return OrderedDict([('Left-frontal', lf), ('Right-frontal', rf),
('Left-parietal', lp), ('Right-parietal', rp),
('Left-occipital', lo), ('Right-occipital', ro),
('Left-temporal', lt), ('Right-temporal', rt)])
def _divide_side(lobe, x):
"""Make a separation between left and right lobe evenly."""
lobe = np.asarray(lobe)
median = np.median(x[lobe])
left = lobe[np.where(x[lobe] < median)[0]]
right = lobe[np.where(x[lobe] > median)[0]]
medians = np.where(x[lobe] == median)[0]
left = np.sort(np.concatenate([left, lobe[medians[1::2]]]))
right = np.sort(np.concatenate([right, lobe[medians[::2]]]))
return list(left), list(right)
|
import glob
import json
import os
from nikola.plugin_categories import ShortcodePlugin
from nikola import utils
TABLE = {}
LOGGER = utils.get_logger('scan_posts')
def _populate():
for fname in glob.glob(os.path.join(os.path.dirname(__file__), 'data', '*.json')):
with open(fname, encoding="utf-8-sig") as inf:
data = json.load(inf)
data = data[list(data.keys())[0]]
data = data[list(data.keys())[0]]
for item in data:
if item['key'] in TABLE:
LOGGER.warning('Repeated emoji {}'.format(item['key']))
else:
TABLE[item['key']] = item['value']
class Plugin(ShortcodePlugin):
"""Plugin for gist directive."""
name = "emoji"
def handler(self, name, filename=None, site=None, data=None, lang=None, post=None):
"""Create HTML for emoji."""
if not TABLE:
_populate()
try:
output = u'''<span class="emoji">{}</span>'''.format(TABLE[name])
except KeyError:
LOGGER.warning('Unknown emoji {}'.format(name))
output = u'''<span class="emoji error">{}</span>'''.format(name)
return output, []
|
from pyopenuv.errors import InvalidApiKeyError
import pytest
from homeassistant import data_entry_flow
from homeassistant.components.openuv import DOMAIN
from homeassistant.config_entries import SOURCE_USER
from homeassistant.const import (
CONF_API_KEY,
CONF_ELEVATION,
CONF_LATITUDE,
CONF_LONGITUDE,
)
from tests.async_mock import patch
from tests.common import MockConfigEntry
@pytest.fixture(autouse=True)
def mock_setup():
"""Prevent setup."""
with patch(
"homeassistant.components.openuv.async_setup",
return_value=True,
), patch(
"homeassistant.components.openuv.async_setup_entry",
return_value=True,
):
yield
async def test_duplicate_error(hass):
"""Test that errors are shown when duplicates are added."""
conf = {
CONF_API_KEY: "12345abcde",
CONF_ELEVATION: 59.1234,
CONF_LATITUDE: 39.128712,
CONF_LONGITUDE: -104.9812612,
}
MockConfigEntry(
domain=DOMAIN, unique_id="39.128712, -104.9812612", data=conf
).add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data=conf
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_invalid_api_key(hass):
"""Test that an invalid API key throws an error."""
conf = {
CONF_API_KEY: "12345abcde",
CONF_ELEVATION: 59.1234,
CONF_LATITUDE: 39.128712,
CONF_LONGITUDE: -104.9812612,
}
with patch(
"pyopenuv.client.Client.uv_index",
side_effect=InvalidApiKeyError,
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data=conf
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {CONF_API_KEY: "invalid_api_key"}
async def test_step_user(hass):
"""Test that the user step works."""
conf = {
CONF_API_KEY: "12345abcde",
CONF_ELEVATION: 59.1234,
CONF_LATITUDE: 39.128712,
CONF_LONGITUDE: -104.9812612,
}
with patch(
"homeassistant.components.airvisual.async_setup_entry", return_value=True
), patch("pyopenuv.client.Client.uv_index"):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data=conf
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "39.128712, -104.9812612"
assert result["data"] == {
CONF_API_KEY: "12345abcde",
CONF_ELEVATION: 59.1234,
CONF_LATITUDE: 39.128712,
CONF_LONGITUDE: -104.9812612,
}
|
from tempfile import TemporaryFile
from xmlrpc.client import Binary
from xmlrpc.client import Fault
from xmlrpc.client import ServerProxy
from django.contrib.sites.models import Site
from django.core.files.storage import default_storage
from django.test import TestCase
from django.test.utils import override_settings
from tagging.models import Tag
from zinnia.managers import DRAFT
from zinnia.managers import PUBLISHED
from zinnia.models.author import Author
from zinnia.models.category import Category
from zinnia.models.entry import Entry
from zinnia.settings import UPLOAD_TO
from zinnia.signals import disconnect_entry_signals
from zinnia.tests.utils import TestTransport
from zinnia.tests.utils import datetime
from zinnia.tests.utils import skip_if_custom_user
from zinnia.xmlrpc.metaweblog import authenticate
from zinnia.xmlrpc.metaweblog import post_structure
@skip_if_custom_user
@override_settings(
ROOT_URLCONF='zinnia.tests.implementations.urls.default'
)
class MetaWeblogTestCase(TestCase):
"""Test cases for MetaWeblog"""
def setUp(self):
disconnect_entry_signals()
# Create data
self.webmaster = Author.objects.create_superuser(
username='webmaster',
email='[email protected]',
password='password')
self.contributor = Author.objects.create_user(
username='contributor',
email='[email protected]',
password='password')
self.site = Site.objects.get_current()
self.categories = [
Category.objects.create(title='Category 1',
slug='category-1'),
Category.objects.create(title='Category 2',
slug='category-2')]
params = {'title': 'My entry 1', 'content': 'My content 1',
'tags': 'zinnia, test', 'slug': 'my-entry-1',
'publication_date': datetime(2010, 1, 1, 12),
'creation_date': datetime(2010, 1, 1, 12),
'status': PUBLISHED}
self.entry_1 = Entry.objects.create(**params)
self.entry_1.authors.add(self.webmaster)
self.entry_1.categories.add(*self.categories)
self.entry_1.sites.add(self.site)
params = {'title': 'My entry 2', 'content': 'My content 2',
'publication_date': datetime(2010, 3, 15),
'creation_date': datetime(2010, 3, 15),
'tags': 'zinnia, test', 'slug': 'my-entry-2'}
self.entry_2 = Entry.objects.create(**params)
self.entry_2.authors.add(self.webmaster)
self.entry_2.categories.add(self.categories[0])
self.entry_2.sites.add(self.site)
# Instanciating the server proxy
self.server = ServerProxy('http://localhost:8000/xmlrpc/',
transport=TestTransport())
def test_authenticate(self):
self.assertRaises(Fault, authenticate, 'badcontributor', 'badpassword')
self.assertRaises(Fault, authenticate, 'contributor', 'badpassword')
self.assertRaises(Fault, authenticate, 'contributor', 'password')
self.contributor.is_staff = True
self.contributor.save()
self.assertEqual(authenticate('contributor', 'password'),
self.contributor)
self.assertRaises(Fault, authenticate, 'contributor',
'password', 'zinnia.change_entry')
self.assertEqual(authenticate('webmaster', 'password'),
self.webmaster)
self.assertEqual(authenticate('webmaster', 'password',
'zinnia.change_entry'),
self.webmaster)
def test_get_users_blogs(self):
self.assertRaises(Fault, self.server.blogger.getUsersBlogs,
'apikey', 'contributor', 'password')
self.assertEqual(
self.server.blogger.getUsersBlogs(
'apikey', 'webmaster', 'password'),
[{'url': 'http://example.com/',
'blogid': 1,
'blogName': 'example.com'}])
def test_get_user_info(self):
self.assertRaises(Fault, self.server.blogger.getUserInfo,
'apikey', 'contributor', 'password')
self.webmaster.first_name = 'John'
self.webmaster.save()
self.assertEqual(self.server.blogger.getUserInfo(
'apikey', 'webmaster', 'password'),
{'firstname': 'John', 'lastname': '',
'url': 'http://example.com/authors/webmaster/',
'userid': self.webmaster.pk,
'nickname': 'webmaster',
'email': '[email protected]'})
self.webmaster.last_name = 'Doe'
self.webmaster.save()
self.assertEqual(self.server.blogger.getUserInfo(
'apikey', 'webmaster', 'password'),
{'firstname': 'John', 'lastname': 'Doe',
'url': 'http://example.com/authors/webmaster/',
'userid': self.webmaster.pk,
'nickname': 'webmaster',
'email': '[email protected]'})
def test_get_authors(self):
self.assertRaises(Fault, self.server.wp.getAuthors,
'apikey', 'contributor', 'password')
self.assertEqual(
self.server.wp.getAuthors(
'apikey', 'webmaster', 'password'),
[{'user_login': 'webmaster',
'user_id': self.webmaster.pk,
'user_email': '[email protected]',
'display_name': 'webmaster'}])
def test_get_tags(self):
self.assertRaises(Fault, self.server.wp.getTags,
1, 'contributor', 'password')
self.assertEqual(
self.server.wp.getTags('apikey', 'webmaster', 'password'),
[{'count': 1,
'html_url': 'http://example.com/tags/test/',
'name': 'test',
'rss_url': 'http://example.com/feeds/tags/test/',
'slug': 'test',
'tag_id': Tag.objects.get(name='test').pk},
{'count': 1,
'html_url': 'http://example.com/tags/zinnia/',
'name': 'zinnia',
'rss_url': 'http://example.com/feeds/tags/zinnia/',
'slug': 'zinnia',
'tag_id': Tag.objects.get(name='zinnia').pk}])
def test_get_categories(self):
self.assertRaises(Fault, self.server.metaWeblog.getCategories,
1, 'contributor', 'password')
self.assertEqual(
self.server.metaWeblog.getCategories('apikey',
'webmaster', 'password'),
[{'rssUrl': 'http://example.com/feeds/categories/category-1/',
'description': 'Category 1',
'htmlUrl': 'http://example.com/categories/category-1/',
'categoryId': self.categories[0].pk, 'parentId': 0,
'categoryName': 'Category 1',
'categoryDescription': ''},
{'rssUrl': 'http://example.com/feeds/categories/category-2/',
'description': 'Category 2',
'htmlUrl': 'http://example.com/categories/category-2/',
'categoryId': self.categories[1].pk, 'parentId': 0,
'categoryName': 'Category 2',
'categoryDescription': ''}])
self.categories[1].parent = self.categories[0]
self.categories[1].description = 'category 2 description'
self.categories[1].save()
self.assertEqual(
self.server.metaWeblog.getCategories('apikey',
'webmaster', 'password'),
[{'rssUrl': 'http://example.com/feeds/categories/category-1/',
'description': 'Category 1',
'htmlUrl': 'http://example.com/categories/category-1/',
'categoryId': self.categories[0].pk, 'parentId': 0,
'categoryName': 'Category 1',
'categoryDescription': ''},
{'rssUrl':
'http://example.com/feeds/categories/category-1/category-2/',
'description': 'Category 2',
'htmlUrl':
'http://example.com/categories/category-1/category-2/',
'categoryId': self.categories[1].pk,
'parentId': self.categories[0].pk,
'categoryName': 'Category 2',
'categoryDescription': 'category 2 description'}])
def test_new_category(self):
category_struct = {'name': 'Category 3', 'slug': 'category-3',
'description': 'Category 3 description',
'parent_id': self.categories[0].pk}
self.assertRaises(Fault, self.server.wp.newCategory,
1, 'contributor', 'password', category_struct)
self.assertEqual(Category.objects.count(), 2)
new_category_id = self.server.wp.newCategory(
1, 'webmaster', 'password', category_struct)
self.assertEqual(Category.objects.count(), 3)
category = Category.objects.get(pk=new_category_id)
self.assertEqual(category.title, 'Category 3')
self.assertEqual(category.description, 'Category 3 description')
self.assertEqual(category.slug, 'category-3')
self.assertEqual(category.parent, self.categories[0])
def test_get_recent_posts(self):
self.assertRaises(Fault, self.server.metaWeblog.getRecentPosts,
1, 'contributor', 'password', 10)
self.assertEqual(len(self.server.metaWeblog.getRecentPosts(
1, 'webmaster', 'password', 10)), 2)
def test_delete_post(self):
self.assertRaises(Fault, self.server.blogger.deletePost,
'apikey', 1, 'contributor', 'password', 'publish')
self.assertEqual(Entry.objects.count(), 2)
self.assertTrue(
self.server.blogger.deletePost(
'apikey', self.entry_1.pk, 'webmaster', 'password', 'publish'))
self.assertEqual(Entry.objects.count(), 1)
def test_get_post(self):
self.assertRaises(Fault, self.server.metaWeblog.getPost,
1, 'contributor', 'password')
post = self.server.metaWeblog.getPost(
self.entry_1.pk, 'webmaster', 'password')
self.assertEqual(post['title'], self.entry_1.title)
self.assertEqual(post['description'], '<p>My content 1</p>')
self.assertEqual(post['categories'], ['Category 1', 'Category 2'])
self.assertTrue('2010-01-01T12:00:00' in post['dateCreated'].value)
self.assertEqual(post['link'],
'http://example.com/2010/01/01/my-entry-1/')
self.assertEqual(post['permaLink'],
'http://example.com/2010/01/01/my-entry-1/')
self.assertEqual(post['postid'], self.entry_1.pk)
self.assertEqual(post['userid'], 'webmaster')
self.assertEqual(post['mt_excerpt'], 'My content 1')
self.assertEqual(post['mt_allow_comments'], 1)
self.assertEqual(post['mt_allow_pings'], 1)
self.assertEqual(post['mt_keywords'], self.entry_1.tags)
self.assertEqual(post['wp_author'], 'webmaster')
self.assertEqual(post['wp_author_id'], self.webmaster.pk)
self.assertEqual(post['wp_author_display_name'], 'webmaster')
self.assertEqual(post['wp_password'], '')
self.assertEqual(post['wp_slug'], self.entry_1.slug)
def test_new_post(self):
post = post_structure(self.entry_2, self.site)
self.assertRaises(Fault, self.server.metaWeblog.newPost,
1, 'contributor', 'password', post, 1)
self.assertEqual(Entry.objects.count(), 2)
self.assertEqual(Entry.published.count(), 1)
self.server.metaWeblog.newPost(
1, 'webmaster', 'password', post, 1)
self.assertEqual(Entry.objects.count(), 3)
self.assertEqual(Entry.published.count(), 2)
del post['dateCreated']
post['wp_author_id'] = self.contributor.pk
self.server.metaWeblog.newPost(
1, 'webmaster', 'password', post, 0)
self.assertEqual(Entry.objects.count(), 4)
self.assertEqual(Entry.published.count(), 2)
def test_edit_post(self):
post = post_structure(self.entry_2, self.site)
self.assertRaises(Fault, self.server.metaWeblog.editPost,
1, 'contributor', 'password', post, 1)
new_post_id = self.server.metaWeblog.newPost(
1, 'webmaster', 'password', post, 0)
entry = Entry.objects.get(pk=new_post_id)
self.assertEqual(entry.title, self.entry_2.title)
self.assertEqual(entry.content, self.entry_2.html_content)
self.assertEqual(entry.excerpt, self.entry_2.excerpt)
self.assertEqual(entry.slug, self.entry_2.slug)
self.assertEqual(entry.status, DRAFT)
self.assertEqual(entry.password, self.entry_2.password)
self.assertEqual(entry.comment_enabled, True)
self.assertEqual(entry.pingback_enabled, True)
self.assertEqual(entry.categories.count(), 1)
self.assertEqual(entry.authors.count(), 1)
self.assertEqual(entry.authors.all()[0], self.webmaster)
self.assertEqual(entry.creation_date, self.entry_2.creation_date)
self.assertEqual(entry.publication_date, self.entry_2.creation_date)
entry.title = 'Title edited'
entry.creation_date = datetime(2000, 1, 1)
post = post_structure(entry, self.site)
post['categories'] = ''
post['description'] = 'Content edited'
post['mt_excerpt'] = 'Content edited'
post['wp_slug'] = 'slug-edited'
post['wp_password'] = 'password'
post['mt_allow_comments'] = 2
post['mt_allow_pings'] = 0
response = self.server.metaWeblog.editPost(
new_post_id, 'webmaster', 'password', post, 1)
self.assertEqual(response, True)
entry = Entry.objects.get(pk=new_post_id)
self.assertEqual(entry.title, post['title'])
self.assertEqual(entry.content, post['description'])
self.assertEqual(entry.excerpt, post['mt_excerpt'])
self.assertEqual(entry.slug, 'slug-edited')
self.assertEqual(entry.status, PUBLISHED)
self.assertEqual(entry.password, 'password')
self.assertEqual(entry.comment_enabled, False)
self.assertEqual(entry.pingback_enabled, False)
self.assertEqual(entry.categories.count(), 0)
self.assertEqual(entry.creation_date, datetime(2000, 1, 1))
self.assertEqual(entry.publication_date, datetime(2000, 1, 1))
del post['dateCreated']
post['wp_author_id'] = self.contributor.pk
response = self.server.metaWeblog.editPost(
new_post_id, 'webmaster', 'password', post, 1)
entry = Entry.objects.get(pk=new_post_id)
self.assertEqual(entry.authors.count(), 1)
self.assertEqual(entry.authors.all()[0], self.contributor)
self.assertEqual(entry.creation_date, datetime(2000, 1, 1))
self.assertEqual(entry.publication_date, datetime(2000, 1, 1))
def test_new_media_object(self):
file_ = TemporaryFile()
file_.write('My test content'.encode('utf-8'))
file_.seek(0)
media = {'name': 'test file.txt',
'type': 'text/plain',
'bits': Binary(file_.read())}
file_.close()
self.assertRaises(Fault, self.server.metaWeblog.newMediaObject,
1, 'contributor', 'password', media)
new_media = self.server.metaWeblog.newMediaObject(
1, 'webmaster', 'password', media)
self.assertTrue('/test-file' in new_media['url'])
default_storage.delete('/'.join([
UPLOAD_TO, new_media['url'].split('/')[-1]]))
|
import logging
import struct
from typing import Any, Optional, Union
from pymodbus.exceptions import ConnectionException, ModbusException
from pymodbus.pdu import ExceptionResponse
import voluptuous as vol
from homeassistant.components.sensor import DEVICE_CLASSES_SCHEMA, PLATFORM_SCHEMA
from homeassistant.const import (
CONF_DEVICE_CLASS,
CONF_NAME,
CONF_OFFSET,
CONF_SLAVE,
CONF_STRUCTURE,
CONF_UNIT_OF_MEASUREMENT,
)
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.restore_state import RestoreEntity
from .const import (
CALL_TYPE_REGISTER_HOLDING,
CALL_TYPE_REGISTER_INPUT,
CONF_COUNT,
CONF_DATA_TYPE,
CONF_HUB,
CONF_PRECISION,
CONF_REGISTER,
CONF_REGISTER_TYPE,
CONF_REGISTERS,
CONF_REVERSE_ORDER,
CONF_SCALE,
DATA_TYPE_CUSTOM,
DATA_TYPE_FLOAT,
DATA_TYPE_INT,
DATA_TYPE_STRING,
DATA_TYPE_UINT,
DEFAULT_HUB,
DEFAULT_STRUCT_FORMAT,
MODBUS_DOMAIN,
)
_LOGGER = logging.getLogger(__name__)
def number(value: Any) -> Union[int, float]:
"""Coerce a value to number without losing precision."""
if isinstance(value, int):
return value
if isinstance(value, str):
try:
value = int(value)
return value
except (TypeError, ValueError):
pass
try:
value = float(value)
return value
except (TypeError, ValueError) as err:
raise vol.Invalid(f"invalid number {value}") from err
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_REGISTERS): [
{
vol.Required(CONF_NAME): cv.string,
vol.Required(CONF_REGISTER): cv.positive_int,
vol.Optional(CONF_COUNT, default=1): cv.positive_int,
vol.Optional(CONF_DATA_TYPE, default=DATA_TYPE_INT): vol.In(
[
DATA_TYPE_INT,
DATA_TYPE_UINT,
DATA_TYPE_FLOAT,
DATA_TYPE_STRING,
DATA_TYPE_CUSTOM,
]
),
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,
vol.Optional(CONF_HUB, default=DEFAULT_HUB): cv.string,
vol.Optional(CONF_OFFSET, default=0): number,
vol.Optional(CONF_PRECISION, default=0): cv.positive_int,
vol.Optional(
CONF_REGISTER_TYPE, default=CALL_TYPE_REGISTER_HOLDING
): vol.In([CALL_TYPE_REGISTER_HOLDING, CALL_TYPE_REGISTER_INPUT]),
vol.Optional(CONF_REVERSE_ORDER, default=False): cv.boolean,
vol.Optional(CONF_SCALE, default=1): number,
vol.Optional(CONF_SLAVE): cv.positive_int,
vol.Optional(CONF_STRUCTURE): cv.string,
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
}
]
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Modbus sensors."""
sensors = []
for register in config[CONF_REGISTERS]:
if register[CONF_DATA_TYPE] == DATA_TYPE_STRING:
structure = str(register[CONF_COUNT] * 2) + "s"
elif register[CONF_DATA_TYPE] != DATA_TYPE_CUSTOM:
try:
structure = f">{DEFAULT_STRUCT_FORMAT[register[CONF_DATA_TYPE]][register[CONF_COUNT]]}"
except KeyError:
_LOGGER.error(
"Unable to detect data type for %s sensor, try a custom type",
register[CONF_NAME],
)
continue
else:
structure = register.get(CONF_STRUCTURE)
try:
size = struct.calcsize(structure)
except struct.error as err:
_LOGGER.error("Error in sensor %s structure: %s", register[CONF_NAME], err)
continue
if register[CONF_COUNT] * 2 != size:
_LOGGER.error(
"Structure size (%d bytes) mismatch registers count (%d words)",
size,
register[CONF_COUNT],
)
continue
hub_name = register[CONF_HUB]
hub = hass.data[MODBUS_DOMAIN][hub_name]
sensors.append(
ModbusRegisterSensor(
hub,
register[CONF_NAME],
register.get(CONF_SLAVE),
register[CONF_REGISTER],
register[CONF_REGISTER_TYPE],
register.get(CONF_UNIT_OF_MEASUREMENT),
register[CONF_COUNT],
register[CONF_REVERSE_ORDER],
register[CONF_SCALE],
register[CONF_OFFSET],
structure,
register[CONF_PRECISION],
register[CONF_DATA_TYPE],
register.get(CONF_DEVICE_CLASS),
)
)
if not sensors:
return False
add_entities(sensors)
class ModbusRegisterSensor(RestoreEntity):
"""Modbus register sensor."""
def __init__(
self,
hub,
name,
slave,
register,
register_type,
unit_of_measurement,
count,
reverse_order,
scale,
offset,
structure,
precision,
data_type,
device_class,
):
"""Initialize the modbus register sensor."""
self._hub = hub
self._name = name
self._slave = int(slave) if slave else None
self._register = int(register)
self._register_type = register_type
self._unit_of_measurement = unit_of_measurement
self._count = int(count)
self._reverse_order = reverse_order
self._scale = scale
self._offset = offset
self._precision = precision
self._structure = structure
self._data_type = data_type
self._device_class = device_class
self._value = None
self._available = True
async def async_added_to_hass(self):
"""Handle entity which will be added."""
state = await self.async_get_last_state()
if not state:
return
self._value = state.state
@property
def state(self):
"""Return the state of the sensor."""
return self._value
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return self._unit_of_measurement
@property
def device_class(self) -> Optional[str]:
"""Return the device class of the sensor."""
return self._device_class
@property
def available(self) -> bool:
"""Return True if entity is available."""
return self._available
def update(self):
"""Update the state of the sensor."""
try:
if self._register_type == CALL_TYPE_REGISTER_INPUT:
result = self._hub.read_input_registers(
self._slave, self._register, self._count
)
else:
result = self._hub.read_holding_registers(
self._slave, self._register, self._count
)
except ConnectionException:
self._available = False
return
if isinstance(result, (ModbusException, ExceptionResponse)):
self._available = False
return
registers = result.registers
if self._reverse_order:
registers.reverse()
byte_string = b"".join([x.to_bytes(2, byteorder="big") for x in registers])
if self._data_type != DATA_TYPE_STRING:
val = struct.unpack(self._structure, byte_string)[0]
val = self._scale * val + self._offset
if isinstance(val, int):
self._value = str(val)
if self._precision > 0:
self._value += "." + "0" * self._precision
else:
self._value = f"{val:.{self._precision}f}"
else:
self._value = byte_string.decode()
self._available = True
|
import logging
from enum import Enum
from gi.repository import Gdk, Gio, GLib, GObject, Gtk, GtkSource, Pango
from meld.meldbuffer import MeldBuffer
from meld.settings import bind_settings, get_meld_settings, settings
from meld.style import colour_lookup_with_fallback, get_common_theme
log = logging.getLogger(__name__)
def get_custom_encoding_candidates():
custom_candidates = []
try:
for charset in settings.get_value('detect-encodings'):
encoding = GtkSource.Encoding.get_from_charset(charset)
if not encoding:
log.warning('Invalid charset "%s" skipped', charset)
continue
custom_candidates.append(encoding)
if custom_candidates:
custom_candidates.extend(
GtkSource.Encoding.get_default_candidates())
except AttributeError:
# get_default_candidates() is only available in GtkSourceView 3.18
# and we'd rather use their defaults than our old detect list.
pass
return custom_candidates
class LanguageManager:
manager = GtkSource.LanguageManager()
@classmethod
def get_language_from_file(cls, gfile):
try:
info = gfile.query_info(
Gio.FILE_ATTRIBUTE_STANDARD_CONTENT_TYPE, 0, None)
except (GLib.GError, AttributeError):
return None
content_type = info.get_content_type()
return cls.manager.guess_language(gfile.get_basename(), content_type)
@classmethod
def get_language_from_mime_type(cls, mime_type):
content_type = Gio.content_type_from_mime_type(mime_type)
return cls.manager.guess_language(None, content_type)
class TextviewLineAnimationType(Enum):
fill = 'fill'
stroke = 'stroke'
class TextviewLineAnimation:
__slots__ = ("start_mark", "end_mark", "start_rgba", "end_rgba",
"start_time", "duration", "anim_type")
def __init__(self, mark0, mark1, rgba0, rgba1, duration, anim_type):
self.start_mark = mark0
self.end_mark = mark1
self.start_rgba = rgba0
self.end_rgba = rgba1
self.start_time = GLib.get_monotonic_time()
self.duration = duration
self.anim_type = anim_type
class SourceViewHelperMixin:
def get_y_for_line_num(self, line):
buf = self.get_buffer()
it = buf.get_iter_at_line(line)
y, h = self.get_line_yrange(it)
if line >= buf.get_line_count():
return y + h
return y
def get_line_num_for_y(self, y):
return self.get_line_at_y(y)[0].get_line()
class MeldSourceView(GtkSource.View, SourceViewHelperMixin):
__gtype_name__ = "MeldSourceView"
__gsettings_bindings_view__ = (
('highlight-current-line', 'highlight-current-line-local'),
('indent-width', 'tab-width'),
('insert-spaces-instead-of-tabs', 'insert-spaces-instead-of-tabs'),
('enable-space-drawer', 'draw-spaces-bool'),
('wrap-mode', 'wrap-mode'),
('show-line-numbers', 'show-line-numbers'),
)
# Named so as not to conflict with the GtkSourceView property
highlight_current_line_local = GObject.Property(type=bool, default=False)
def get_show_line_numbers(self):
return self._show_line_numbers
def set_show_line_numbers(self, show):
if show == self._show_line_numbers:
return
if getattr(self, 'line_renderer', None):
self.line_renderer.set_visible(show)
self._show_line_numbers = bool(show)
self.notify("show-line-numbers")
show_line_numbers = GObject.Property(
type=bool, default=False, getter=get_show_line_numbers,
setter=set_show_line_numbers)
wrap_mode_bool = GObject.Property(
type=bool, default=False,
nick="Wrap mode (Boolean version)",
blurb=(
"Mirror of the wrap-mode GtkTextView property, reduced to "
"a single Boolean for UI ease-of-use."
),
)
draw_spaces_bool = GObject.Property(
type=bool, default=False,
nick="Draw spaces (Boolean version)",
blurb=(
"Mirror of the draw-spaces GtkSourceView property, "
"reduced to a single Boolean for UI ease-of-use."
),
)
overscroll_num_lines = GObject.Property(
type=int, default=5, minimum=0, maximum=100,
nick="Overscroll line count",
flags=(
GObject.ParamFlags.READWRITE |
GObject.ParamFlags.CONSTRUCT
),
)
replaced_entries = (
# We replace the default GtkSourceView undo mechanism
(Gdk.KEY_z, Gdk.ModifierType.CONTROL_MASK),
(Gdk.KEY_z, Gdk.ModifierType.CONTROL_MASK |
Gdk.ModifierType.SHIFT_MASK),
# We replace the default line movement behaviour of Alt+Up/Down
(Gdk.KEY_Up, Gdk.ModifierType.MOD1_MASK),
(Gdk.KEY_KP_Up, Gdk.ModifierType.MOD1_MASK),
(Gdk.KEY_KP_Up, Gdk.ModifierType.MOD1_MASK |
Gdk.ModifierType.SHIFT_MASK),
(Gdk.KEY_Down, Gdk.ModifierType.MOD1_MASK),
(Gdk.KEY_KP_Down, Gdk.ModifierType.MOD1_MASK),
(Gdk.KEY_KP_Down, Gdk.ModifierType.MOD1_MASK |
Gdk.ModifierType.SHIFT_MASK),
# ...and Alt+Left/Right
(Gdk.KEY_Left, Gdk.ModifierType.MOD1_MASK),
(Gdk.KEY_KP_Left, Gdk.ModifierType.MOD1_MASK),
(Gdk.KEY_Right, Gdk.ModifierType.MOD1_MASK),
(Gdk.KEY_KP_Right, Gdk.ModifierType.MOD1_MASK),
# ...and Ctrl+Page Up/Down
(Gdk.KEY_Page_Up, Gdk.ModifierType.CONTROL_MASK),
(Gdk.KEY_KP_Page_Up, Gdk.ModifierType.CONTROL_MASK),
(Gdk.KEY_Page_Down, Gdk.ModifierType.CONTROL_MASK),
(Gdk.KEY_KP_Page_Down, Gdk.ModifierType.CONTROL_MASK),
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.drag_dest_add_uri_targets()
# Most bindings are on SourceView, except the Page Up/Down ones
# which are on TextView.
binding_set_names = ('GtkSourceView', 'GtkTextView')
for set_name in binding_set_names:
binding_set = Gtk.binding_set_find(set_name)
for key, modifiers in self.replaced_entries:
Gtk.binding_entry_remove(binding_set, key, modifiers)
self.anim_source_id = None
self.animating_chunks = []
self.syncpoints = []
self._show_line_numbers = None
buf = MeldBuffer()
inline_tag = GtkSource.Tag.new("inline")
inline_tag.props.draw_spaces = True
buf.get_tag_table().add(inline_tag)
buf.create_tag("dimmed")
self.set_buffer(buf)
self.connect('notify::overscroll-num-lines', self.notify_overscroll)
@property
def line_height(self) -> int:
if not getattr(self, '_approx_line_height', None):
context = self.get_pango_context()
layout = Pango.Layout(context)
layout.set_text('X')
_width, self._approx_line_height = layout.get_pixel_size()
return self._approx_line_height
def notify_overscroll(self, view, param):
self.props.bottom_margin = self.overscroll_num_lines * self.line_height
def do_paste_clipboard(self, *args):
# This is an awful hack to replace another awful hack. The idea
# here is to sanitise the clipboard contents so that it doesn't
# contain GtkTextTags, by requesting and setting plain text.
def text_received_cb(clipboard, text, *user_data):
# Manual encoding is required here, or the length will be
# incorrect, and the API requires a UTF-8 bytestring.
utf8_text = text.encode('utf-8')
clipboard.set_text(text, len(utf8_text))
self.get_buffer().paste_clipboard(
clipboard, None, self.get_editable())
clipboard = self.get_clipboard(Gdk.SELECTION_CLIPBOARD)
clipboard.request_text(text_received_cb)
def add_fading_highlight(
self, mark0, mark1, colour_name, duration,
anim_type=TextviewLineAnimationType.fill, starting_alpha=1.0):
if not self.get_realized():
return
rgba0 = self.fill_colors[colour_name].copy()
rgba1 = self.fill_colors[colour_name].copy()
rgba0.alpha = starting_alpha
rgba1.alpha = 0.0
anim = TextviewLineAnimation(
mark0, mark1, rgba0, rgba1, duration, anim_type)
self.animating_chunks.append(anim)
def on_setting_changed(self, settings, key):
if key == 'font':
self.override_font(settings.font)
self._approx_line_height = None
elif key == 'style-scheme':
self.highlight_color = colour_lookup_with_fallback(
"meld:current-line-highlight", "background")
self.syncpoint_color = colour_lookup_with_fallback(
"meld:syncpoint-outline", "foreground")
self.fill_colors, self.line_colors = get_common_theme()
buf = self.get_buffer()
buf.set_style_scheme(settings.style_scheme)
tag = buf.get_tag_table().lookup("inline")
tag.props.background_rgba = colour_lookup_with_fallback(
"meld:inline", "background")
tag = buf.get_tag_table().lookup("dimmed")
tag.props.foreground_rgba = colour_lookup_with_fallback(
"meld:dimmed", "foreground")
def do_realize(self):
bind_settings(self)
def wrap_mode_from_bool(binding, from_value):
if from_value:
settings_mode = settings.get_enum('wrap-mode')
if settings_mode == Gtk.WrapMode.NONE:
mode = Gtk.WrapMode.WORD
else:
mode = settings_mode
else:
mode = Gtk.WrapMode.NONE
return mode
def wrap_mode_to_bool(binding, from_value):
return bool(from_value)
self.bind_property(
'wrap-mode-bool', self, 'wrap-mode',
GObject.BindingFlags.BIDIRECTIONAL,
wrap_mode_from_bool,
wrap_mode_to_bool,
)
self.wrap_mode_bool = wrap_mode_to_bool(None, self.props.wrap_mode)
self.bind_property(
'draw-spaces-bool', self.props.space_drawer, 'enable-matrix',
GObject.BindingFlags.DEFAULT | GObject.BindingFlags.SYNC_CREATE,
)
meld_settings = get_meld_settings()
self.on_setting_changed(meld_settings, 'font')
self.on_setting_changed(meld_settings, 'style-scheme')
self.get_buffer().set_style_scheme(meld_settings.style_scheme)
meld_settings.connect('changed', self.on_setting_changed)
return GtkSource.View.do_realize(self)
def do_draw_layer(self, layer, context):
if layer != Gtk.TextViewLayer.BELOW_TEXT:
return GtkSource.View.do_draw_layer(self, layer, context)
context.save()
context.set_line_width(1.0)
_, clip = Gdk.cairo_get_clip_rectangle(context)
clip_end = clip.y + clip.height
bounds = (
self.get_line_num_for_y(clip.y),
self.get_line_num_for_y(clip_end),
)
x = clip.x - 0.5
width = clip.width + 1
# Paint chunk backgrounds and outlines
for change in self.chunk_iter(bounds):
ypos0 = self.get_y_for_line_num(change[1])
ypos1 = self.get_y_for_line_num(change[2])
height = max(0, ypos1 - ypos0 - 1)
context.rectangle(x, ypos0 + 0.5, width, height)
if change[1] != change[2]:
context.set_source_rgba(*self.fill_colors[change[0]])
context.fill_preserve()
if self.current_chunk_check(change):
highlight = self.fill_colors['current-chunk-highlight']
context.set_source_rgba(*highlight)
context.fill_preserve()
context.set_source_rgba(*self.line_colors[change[0]])
context.stroke()
textbuffer = self.get_buffer()
# Check whether we're drawing past the last line in the buffer
# (i.e., the overscroll) and draw a custom background if so.
end_y, end_height = self.get_line_yrange(textbuffer.get_end_iter())
end_y += end_height
visible_bottom_margin = clip_end - end_y
if visible_bottom_margin > 0:
context.rectangle(x + 1, end_y, width - 1, visible_bottom_margin)
context.set_source_rgba(*self.fill_colors['overscroll'])
context.fill()
# Paint current line highlight
if self.props.highlight_current_line_local and self.is_focus():
it = textbuffer.get_iter_at_mark(textbuffer.get_insert())
ypos, line_height = self.get_line_yrange(it)
context.rectangle(x, ypos, width, line_height)
context.set_source_rgba(*self.highlight_color)
context.fill()
# Draw syncpoint indicator lines
for syncpoint in self.syncpoints:
if syncpoint is None:
continue
syncline = textbuffer.get_iter_at_mark(syncpoint).get_line()
if bounds[0] <= syncline <= bounds[1]:
ypos = self.get_y_for_line_num(syncline)
context.rectangle(x, ypos - 0.5, width, 1)
context.set_source_rgba(*self.syncpoint_color)
context.stroke()
# Overdraw all animated chunks, and update animation states
new_anim_chunks = []
for c in self.animating_chunks:
current_time = GLib.get_monotonic_time()
percent = min(
1.0, (current_time - c.start_time) / float(c.duration))
rgba_pairs = zip(c.start_rgba, c.end_rgba)
rgba = [s + (e - s) * percent for s, e in rgba_pairs]
it = textbuffer.get_iter_at_mark(c.start_mark)
ystart, _ = self.get_line_yrange(it)
it = textbuffer.get_iter_at_mark(c.end_mark)
yend, _ = self.get_line_yrange(it)
if ystart == yend:
ystart -= 1
context.set_source_rgba(*rgba)
context.rectangle(x, ystart, width, yend - ystart)
if c.anim_type == TextviewLineAnimationType.stroke:
context.stroke()
else:
context.fill()
if current_time <= c.start_time + c.duration:
new_anim_chunks.append(c)
else:
textbuffer.delete_mark(c.start_mark)
textbuffer.delete_mark(c.end_mark)
self.animating_chunks = new_anim_chunks
if self.animating_chunks and self.anim_source_id is None:
def anim_cb():
self.queue_draw()
return True
# Using timeout_add interferes with recalculation of inline
# highlighting; this mechanism could be improved.
self.anim_source_id = GLib.idle_add(anim_cb)
elif not self.animating_chunks and self.anim_source_id:
GLib.source_remove(self.anim_source_id)
self.anim_source_id = None
context.restore()
return GtkSource.View.do_draw_layer(self, layer, context)
class CommitMessageSourceView(GtkSource.View):
__gtype_name__ = "CommitMessageSourceView"
__gsettings_bindings_view__ = (
('indent-width', 'tab-width'),
('insert-spaces-instead-of-tabs', 'insert-spaces-instead-of-tabs'),
('enable-space-drawer', 'enable-space-drawer'),
)
enable_space_drawer = GObject.Property(type=bool, default=False)
def do_realize(self):
bind_settings(self)
self.bind_property(
'enable-space-drawer', self.props.space_drawer, 'enable-matrix',
GObject.BindingFlags.DEFAULT | GObject.BindingFlags.SYNC_CREATE,
)
return GtkSource.View.do_realize(self)
class MeldSourceMap(GtkSource.Map, SourceViewHelperMixin):
__gtype_name__ = "MeldSourceMap"
compact_view = GObject.Property(
type=bool,
nick="Limit the view to a fixed width",
default=False,
)
COMPACT_MODE_WIDTH = 40
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.connect('notify::compact-view', lambda *args: self.queue_resize())
def do_draw_layer(self, layer, context):
if layer != Gtk.TextViewLayer.BELOW_TEXT:
return GtkSource.Map.do_draw_layer(self, layer, context)
# Handle bad view assignments and partial initialisation
parent_view = self.props.view
if not hasattr(parent_view, 'chunk_iter'):
return GtkSource.Map.do_draw_layer(self, layer, context)
context.save()
context.set_line_width(1.0)
_, clip = Gdk.cairo_get_clip_rectangle(context)
x = clip.x - 0.5
width = clip.width + 1
bounds = (
self.get_line_num_for_y(clip.y),
self.get_line_num_for_y(clip.y + clip.height),
)
# Paint chunk backgrounds
for change in parent_view.chunk_iter(bounds):
if change[1] == change[2]:
# We don't have room to paint inserts in this widget
continue
ypos0 = self.get_y_for_line_num(change[1])
ypos1 = self.get_y_for_line_num(change[2])
height = max(0, ypos1 - ypos0 - 1)
context.rectangle(x, ypos0 + 0.5, width, height)
context.set_source_rgba(*parent_view.fill_colors[change[0]])
context.fill()
context.restore()
return GtkSource.Map.do_draw_layer(self, layer, context)
def do_get_preferred_width(self):
if self.props.compact_view:
return (self.COMPACT_MODE_WIDTH, self.COMPACT_MODE_WIDTH)
else:
return GtkSource.Map.do_get_preferred_width(self)
|
import copy
import json
import pytest
from tests.async_mock import ANY, patch
from tests.common import (
async_fire_mqtt_message,
async_get_device_automations,
mock_device_registry,
mock_registry,
)
DEFAULT_CONFIG_DEVICE = {
"device": {"identifiers": ["0AFFD2"]},
"topic": "foobar/tag_scanned",
}
DEFAULT_CONFIG = {
"topic": "foobar/tag_scanned",
}
DEFAULT_CONFIG_JSON = {
"device": {"identifiers": ["0AFFD2"]},
"topic": "foobar/tag_scanned",
"value_template": "{{ value_json.PN532.UID }}",
}
DEFAULT_TAG_ID = "E9F35959"
DEFAULT_TAG_SCAN = "E9F35959"
DEFAULT_TAG_SCAN_JSON = (
'{"Time":"2020-09-28T17:02:10","PN532":{"UID":"E9F35959", "DATA":"ILOVETASMOTA"}}'
)
@pytest.fixture
def device_reg(hass):
"""Return an empty, loaded, registry."""
return mock_device_registry(hass)
@pytest.fixture
def entity_reg(hass):
"""Return an empty, loaded, registry."""
return mock_registry(hass)
@pytest.fixture
def tag_mock():
"""Fixture to mock tag."""
with patch("homeassistant.components.tag.async_scan_tag") as mock_tag:
yield mock_tag
@pytest.mark.no_fail_on_log_exception
async def test_discover_bad_tag(hass, device_reg, entity_reg, mqtt_mock, tag_mock):
"""Test bad discovery message."""
config1 = copy.deepcopy(DEFAULT_CONFIG_DEVICE)
# Test sending bad data
data0 = '{ "device":{"identifiers":["0AFFD2"]}, "topics": "foobar/tag_scanned" }'
async_fire_mqtt_message(hass, "homeassistant/tag/bla/config", data0)
await hass.async_block_till_done()
assert device_reg.async_get_device({("mqtt", "0AFFD2")}, set()) is None
# Test sending correct data
async_fire_mqtt_message(hass, "homeassistant/tag/bla/config", json.dumps(config1))
await hass.async_block_till_done()
device_entry = device_reg.async_get_device({("mqtt", "0AFFD2")}, set())
# Fake tag scan.
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, device_entry.id)
async def test_if_fires_on_mqtt_message_with_device(
hass, device_reg, mqtt_mock, tag_mock
):
"""Test tag scanning, with device."""
config = copy.deepcopy(DEFAULT_CONFIG_DEVICE)
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config))
await hass.async_block_till_done()
device_entry = device_reg.async_get_device({("mqtt", "0AFFD2")}, set())
# Fake tag scan.
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, device_entry.id)
async def test_if_fires_on_mqtt_message_without_device(
hass, device_reg, mqtt_mock, tag_mock
):
"""Test tag scanning, without device."""
config = copy.deepcopy(DEFAULT_CONFIG)
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config))
await hass.async_block_till_done()
# Fake tag scan.
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, None)
async def test_if_fires_on_mqtt_message_with_template(
hass, device_reg, mqtt_mock, tag_mock
):
"""Test tag scanning, with device."""
config = copy.deepcopy(DEFAULT_CONFIG_JSON)
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config))
await hass.async_block_till_done()
device_entry = device_reg.async_get_device({("mqtt", "0AFFD2")}, set())
# Fake tag scan.
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN_JSON)
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, device_entry.id)
async def test_strip_tag_id(hass, device_reg, mqtt_mock, tag_mock):
"""Test strip whitespace from tag_id."""
config = copy.deepcopy(DEFAULT_CONFIG)
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config))
await hass.async_block_till_done()
# Fake tag scan.
async_fire_mqtt_message(hass, "foobar/tag_scanned", "123456 ")
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, "123456", None)
async def test_if_fires_on_mqtt_message_after_update_with_device(
hass, device_reg, mqtt_mock, tag_mock
):
"""Test tag scanning after update."""
config1 = copy.deepcopy(DEFAULT_CONFIG_DEVICE)
config2 = copy.deepcopy(DEFAULT_CONFIG_DEVICE)
config2["topic"] = "foobar/tag_scanned2"
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config1))
await hass.async_block_till_done()
device_entry = device_reg.async_get_device({("mqtt", "0AFFD2")}, set())
# Fake tag scan.
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, device_entry.id)
# Update the tag scanner with different topic
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config2))
await hass.async_block_till_done()
tag_mock.reset_mock()
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_not_called()
async_fire_mqtt_message(hass, "foobar/tag_scanned2", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, device_entry.id)
# Update the tag scanner with same topic
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config2))
await hass.async_block_till_done()
tag_mock.reset_mock()
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_not_called()
async_fire_mqtt_message(hass, "foobar/tag_scanned2", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, device_entry.id)
async def test_if_fires_on_mqtt_message_after_update_without_device(
hass, device_reg, mqtt_mock, tag_mock
):
"""Test tag scanning after update."""
config1 = copy.deepcopy(DEFAULT_CONFIG)
config2 = copy.deepcopy(DEFAULT_CONFIG)
config2["topic"] = "foobar/tag_scanned2"
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config1))
await hass.async_block_till_done()
# Fake tag scan.
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, None)
# Update the tag scanner with different topic
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config2))
await hass.async_block_till_done()
tag_mock.reset_mock()
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_not_called()
async_fire_mqtt_message(hass, "foobar/tag_scanned2", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, None)
# Update the tag scanner with same topic
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config2))
await hass.async_block_till_done()
tag_mock.reset_mock()
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_not_called()
async_fire_mqtt_message(hass, "foobar/tag_scanned2", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, None)
async def test_if_fires_on_mqtt_message_after_update_with_template(
hass, device_reg, mqtt_mock, tag_mock
):
"""Test tag scanning after update."""
config1 = copy.deepcopy(DEFAULT_CONFIG_JSON)
config2 = copy.deepcopy(DEFAULT_CONFIG_JSON)
config2["value_template"] = "{{ value_json.RDM6300.UID }}"
tag_scan_2 = '{"Time":"2020-09-28T17:02:10","RDM6300":{"UID":"E9F35959", "DATA":"ILOVETASMOTA"}}'
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config1))
await hass.async_block_till_done()
device_entry = device_reg.async_get_device({("mqtt", "0AFFD2")}, set())
# Fake tag scan.
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN_JSON)
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, device_entry.id)
# Update the tag scanner with different template
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config2))
await hass.async_block_till_done()
tag_mock.reset_mock()
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN_JSON)
await hass.async_block_till_done()
tag_mock.assert_not_called()
async_fire_mqtt_message(hass, "foobar/tag_scanned", tag_scan_2)
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, device_entry.id)
# Update the tag scanner with same template
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config2))
await hass.async_block_till_done()
tag_mock.reset_mock()
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN_JSON)
await hass.async_block_till_done()
tag_mock.assert_not_called()
async_fire_mqtt_message(hass, "foobar/tag_scanned", tag_scan_2)
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, device_entry.id)
async def test_no_resubscribe_same_topic(hass, device_reg, mqtt_mock):
"""Test subscription to topics without change."""
config = copy.deepcopy(DEFAULT_CONFIG_DEVICE)
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config))
await hass.async_block_till_done()
assert device_reg.async_get_device({("mqtt", "0AFFD2")}, set())
call_count = mqtt_mock.async_subscribe.call_count
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config))
await hass.async_block_till_done()
assert mqtt_mock.async_subscribe.call_count == call_count
async def test_not_fires_on_mqtt_message_after_remove_by_mqtt_with_device(
hass, device_reg, mqtt_mock, tag_mock
):
"""Test tag scanning after removal."""
config = copy.deepcopy(DEFAULT_CONFIG_DEVICE)
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config))
await hass.async_block_till_done()
device_entry = device_reg.async_get_device({("mqtt", "0AFFD2")}, set())
# Fake tag scan.
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, device_entry.id)
# Remove the tag scanner
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", "")
await hass.async_block_till_done()
tag_mock.reset_mock()
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_not_called()
# Rediscover the tag scanner
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config))
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, device_entry.id)
async def test_not_fires_on_mqtt_message_after_remove_by_mqtt_without_device(
hass, device_reg, mqtt_mock, tag_mock
):
"""Test tag scanning not firing after removal."""
config = copy.deepcopy(DEFAULT_CONFIG)
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config))
await hass.async_block_till_done()
# Fake tag scan.
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, None)
# Remove the tag scanner
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", "")
await hass.async_block_till_done()
tag_mock.reset_mock()
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_not_called()
# Rediscover the tag scanner
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config))
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, None)
async def test_not_fires_on_mqtt_message_after_remove_from_registry(
hass,
device_reg,
mqtt_mock,
tag_mock,
):
"""Test tag scanning after removal."""
config = copy.deepcopy(DEFAULT_CONFIG_DEVICE)
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config))
await hass.async_block_till_done()
device_entry = device_reg.async_get_device({("mqtt", "0AFFD2")}, set())
# Fake tag scan.
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, device_entry.id)
# Remove the device
device_reg.async_remove_device(device_entry.id)
await hass.async_block_till_done()
tag_mock.reset_mock()
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_not_called()
async def test_entity_device_info_with_connection(hass, mqtt_mock):
"""Test MQTT device registry integration."""
registry = await hass.helpers.device_registry.async_get_registry()
data = json.dumps(
{
"topic": "test-topic",
"device": {
"connections": [["mac", "02:5b:26:a8:dc:12"]],
"manufacturer": "Whatever",
"name": "Beer",
"model": "Glass",
"sw_version": "0.1-beta",
},
}
)
async_fire_mqtt_message(hass, "homeassistant/tag/bla/config", data)
await hass.async_block_till_done()
device = registry.async_get_device(set(), {("mac", "02:5b:26:a8:dc:12")})
assert device is not None
assert device.connections == {("mac", "02:5b:26:a8:dc:12")}
assert device.manufacturer == "Whatever"
assert device.name == "Beer"
assert device.model == "Glass"
assert device.sw_version == "0.1-beta"
async def test_entity_device_info_with_identifier(hass, mqtt_mock):
"""Test MQTT device registry integration."""
registry = await hass.helpers.device_registry.async_get_registry()
data = json.dumps(
{
"topic": "test-topic",
"device": {
"identifiers": ["helloworld"],
"manufacturer": "Whatever",
"name": "Beer",
"model": "Glass",
"sw_version": "0.1-beta",
},
}
)
async_fire_mqtt_message(hass, "homeassistant/tag/bla/config", data)
await hass.async_block_till_done()
device = registry.async_get_device({("mqtt", "helloworld")}, set())
assert device is not None
assert device.identifiers == {("mqtt", "helloworld")}
assert device.manufacturer == "Whatever"
assert device.name == "Beer"
assert device.model == "Glass"
assert device.sw_version == "0.1-beta"
async def test_entity_device_info_update(hass, mqtt_mock):
"""Test device registry update."""
registry = await hass.helpers.device_registry.async_get_registry()
config = {
"topic": "test-topic",
"device": {
"identifiers": ["helloworld"],
"connections": [["mac", "02:5b:26:a8:dc:12"]],
"manufacturer": "Whatever",
"name": "Beer",
"model": "Glass",
"sw_version": "0.1-beta",
},
}
data = json.dumps(config)
async_fire_mqtt_message(hass, "homeassistant/tag/bla/config", data)
await hass.async_block_till_done()
device = registry.async_get_device({("mqtt", "helloworld")}, set())
assert device is not None
assert device.name == "Beer"
config["device"]["name"] = "Milk"
data = json.dumps(config)
async_fire_mqtt_message(hass, "homeassistant/tag/bla/config", data)
await hass.async_block_till_done()
device = registry.async_get_device({("mqtt", "helloworld")}, set())
assert device is not None
assert device.name == "Milk"
async def test_cleanup_tag(hass, device_reg, entity_reg, mqtt_mock):
"""Test tag discovery topic is cleaned when device is removed from registry."""
config = {
"topic": "test-topic",
"device": {"identifiers": ["helloworld"]},
}
data = json.dumps(config)
async_fire_mqtt_message(hass, "homeassistant/tag/bla/config", data)
await hass.async_block_till_done()
# Verify device registry entry is created
device_entry = device_reg.async_get_device({("mqtt", "helloworld")}, set())
assert device_entry is not None
device_reg.async_remove_device(device_entry.id)
await hass.async_block_till_done()
await hass.async_block_till_done()
# Verify device registry entry is cleared
device_entry = device_reg.async_get_device({("mqtt", "helloworld")}, set())
assert device_entry is None
# Verify retained discovery topic has been cleared
mqtt_mock.async_publish.assert_called_once_with(
"homeassistant/tag/bla/config", "", 0, True
)
async def test_cleanup_device(hass, device_reg, entity_reg, mqtt_mock):
"""Test removal from device registry when tag is removed."""
config = {
"topic": "test-topic",
"device": {"identifiers": ["helloworld"]},
}
data = json.dumps(config)
async_fire_mqtt_message(hass, "homeassistant/tag/bla/config", data)
await hass.async_block_till_done()
# Verify device registry entry is created
device_entry = device_reg.async_get_device({("mqtt", "helloworld")}, set())
assert device_entry is not None
async_fire_mqtt_message(hass, "homeassistant/tag/bla/config", "")
await hass.async_block_till_done()
# Verify device registry entry is cleared
device_entry = device_reg.async_get_device({("mqtt", "helloworld")}, set())
assert device_entry is None
async def test_cleanup_device_several_tags(
hass, device_reg, entity_reg, mqtt_mock, tag_mock
):
"""Test removal from device registry when the last tag is removed."""
config1 = {
"topic": "test-topic1",
"device": {"identifiers": ["helloworld"]},
}
config2 = {
"topic": "test-topic2",
"device": {"identifiers": ["helloworld"]},
}
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config1))
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "homeassistant/tag/bla2/config", json.dumps(config2))
await hass.async_block_till_done()
# Verify device registry entry is created
device_entry = device_reg.async_get_device({("mqtt", "helloworld")}, set())
assert device_entry is not None
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", "")
await hass.async_block_till_done()
# Verify device registry entry is not cleared
device_entry = device_reg.async_get_device({("mqtt", "helloworld")}, set())
assert device_entry is not None
# Fake tag scan.
async_fire_mqtt_message(hass, "test-topic1", "12345")
async_fire_mqtt_message(hass, "test-topic2", "23456")
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, "23456", device_entry.id)
async_fire_mqtt_message(hass, "homeassistant/tag/bla2/config", "")
await hass.async_block_till_done()
# Verify device registry entry is cleared
device_entry = device_reg.async_get_device({("mqtt", "helloworld")}, set())
assert device_entry is None
async def test_cleanup_device_with_entity_and_trigger_1(
hass, device_reg, entity_reg, mqtt_mock
):
"""Test removal from device registry for device with tag, entity and trigger.
Tag removed first, then trigger and entity.
"""
config1 = {
"topic": "test-topic",
"device": {"identifiers": ["helloworld"]},
}
config2 = {
"automation_type": "trigger",
"topic": "test-topic",
"type": "foo",
"subtype": "bar",
"device": {"identifiers": ["helloworld"]},
}
config3 = {
"name": "test_binary_sensor",
"state_topic": "test-topic",
"device": {"identifiers": ["helloworld"]},
"unique_id": "veryunique",
}
data1 = json.dumps(config1)
data2 = json.dumps(config2)
data3 = json.dumps(config3)
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", data1)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "homeassistant/device_automation/bla2/config", data2)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "homeassistant/binary_sensor/bla3/config", data3)
await hass.async_block_till_done()
# Verify device registry entry is created
device_entry = device_reg.async_get_device({("mqtt", "helloworld")}, set())
assert device_entry is not None
triggers = await async_get_device_automations(hass, "trigger", device_entry.id)
assert len(triggers) == 3 # 2 binary_sensor triggers + device trigger
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", "")
await hass.async_block_till_done()
# Verify device registry entry is not cleared
device_entry = device_reg.async_get_device({("mqtt", "helloworld")}, set())
assert device_entry is not None
async_fire_mqtt_message(hass, "homeassistant/device_automation/bla2/config", "")
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "homeassistant/binary_sensor/bla3/config", "")
await hass.async_block_till_done()
# Verify device registry entry is cleared
device_entry = device_reg.async_get_device({("mqtt", "helloworld")}, set())
assert device_entry is None
async def test_cleanup_device_with_entity2(hass, device_reg, entity_reg, mqtt_mock):
"""Test removal from device registry for device with tag, entity and trigger.
Trigger and entity removed first, then tag.
"""
config1 = {
"topic": "test-topic",
"device": {"identifiers": ["helloworld"]},
}
config2 = {
"automation_type": "trigger",
"topic": "test-topic",
"type": "foo",
"subtype": "bar",
"device": {"identifiers": ["helloworld"]},
}
config3 = {
"name": "test_binary_sensor",
"state_topic": "test-topic",
"device": {"identifiers": ["helloworld"]},
"unique_id": "veryunique",
}
data1 = json.dumps(config1)
data2 = json.dumps(config2)
data3 = json.dumps(config3)
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", data1)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "homeassistant/device_automation/bla2/config", data2)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "homeassistant/binary_sensor/bla3/config", data3)
await hass.async_block_till_done()
# Verify device registry entry is created
device_entry = device_reg.async_get_device({("mqtt", "helloworld")}, set())
assert device_entry is not None
triggers = await async_get_device_automations(hass, "trigger", device_entry.id)
assert len(triggers) == 3 # 2 binary_sensor triggers + device trigger
async_fire_mqtt_message(hass, "homeassistant/device_automation/bla2/config", "")
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "homeassistant/binary_sensor/bla3/config", "")
await hass.async_block_till_done()
# Verify device registry entry is not cleared
device_entry = device_reg.async_get_device({("mqtt", "helloworld")}, set())
assert device_entry is not None
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", "")
await hass.async_block_till_done()
# Verify device registry entry is cleared
device_entry = device_reg.async_get_device({("mqtt", "helloworld")}, set())
assert device_entry is None
|
import melissa
import voluptuous as vol
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.discovery import async_load_platform
DOMAIN = "melissa"
DATA_MELISSA = "MELISSA"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Set up the Melissa Climate component."""
conf = config[DOMAIN]
username = conf.get(CONF_USERNAME)
password = conf.get(CONF_PASSWORD)
api = melissa.AsyncMelissa(username=username, password=password)
await api.async_connect()
hass.data[DATA_MELISSA] = api
hass.async_create_task(async_load_platform(hass, "climate", DOMAIN, {}, config))
return True
|
from datetime import timedelta
import logging
from requests import HTTPError
from tmb import IBus
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import ATTR_ATTRIBUTION, CONF_NAME, TIME_MINUTES
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
ATTRIBUTION = "Data provided by Transport Metropolitans de Barcelona"
ICON = "mdi:bus-clock"
CONF_APP_ID = "app_id"
CONF_APP_KEY = "app_key"
CONF_LINE = "line"
CONF_BUS_STOP = "stop"
CONF_BUS_STOPS = "stops"
ATTR_BUS_STOP = "stop"
ATTR_LINE = "line"
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60)
LINE_STOP_SCHEMA = vol.Schema(
{
vol.Required(CONF_BUS_STOP): cv.string,
vol.Required(CONF_LINE): cv.string,
vol.Optional(CONF_NAME): cv.string,
}
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_APP_ID): cv.string,
vol.Required(CONF_APP_KEY): cv.string,
vol.Required(CONF_BUS_STOPS): vol.All(cv.ensure_list, [LINE_STOP_SCHEMA]),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the sensors."""
ibus_client = IBus(config[CONF_APP_ID], config[CONF_APP_KEY])
sensors = []
for line_stop in config.get(CONF_BUS_STOPS):
line = line_stop[CONF_LINE]
stop = line_stop[CONF_BUS_STOP]
if line_stop.get(CONF_NAME):
name = f"{line} - {line_stop[CONF_NAME]} ({stop})"
else:
name = f"{line} - {stop}"
sensors.append(TMBSensor(ibus_client, stop, line, name))
add_entities(sensors, True)
class TMBSensor(Entity):
"""Implementation of a TMB line/stop Sensor."""
def __init__(self, ibus_client, stop, line, name):
"""Initialize the sensor."""
self._ibus_client = ibus_client
self._stop = stop
self._line = line.upper()
self._name = name
self._unit = TIME_MINUTES
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def icon(self):
"""Return the icon for the frontend."""
return ICON
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return self._unit
@property
def unique_id(self):
"""Return a unique, HASS-friendly identifier for this entity."""
return f"{self._stop}_{self._line}"
@property
def state(self):
"""Return the next departure time."""
return self._state
@property
def device_state_attributes(self):
"""Return the state attributes of the last update."""
return {
ATTR_ATTRIBUTION: ATTRIBUTION,
ATTR_BUS_STOP: self._stop,
ATTR_LINE: self._line,
}
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the next bus information."""
try:
self._state = self._ibus_client.get_stop_forecast(self._stop, self._line)
except HTTPError:
_LOGGER.error(
"Unable to fetch data from TMB API. Please check your API keys are valid"
)
|
from datetime import timedelta
import logging
from homeassistant.const import PRECISION_TENTHS, PRECISION_WHOLE, TEMP_CELSIUS
from homeassistant.helpers.config_validation import ( # noqa: F401
PLATFORM_SCHEMA,
PLATFORM_SCHEMA_BASE,
)
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.temperature import display_temp as show_temp
# mypy: allow-untyped-defs, no-check-untyped-defs
_LOGGER = logging.getLogger(__name__)
ATTR_CONDITION_CLASS = "condition_class"
ATTR_CONDITION_CLEAR_NIGHT = "clear-night"
ATTR_CONDITION_CLOUDY = "cloudy"
ATTR_CONDITION_EXCEPTIONAL = "exceptional"
ATTR_CONDITION_FOG = "fog"
ATTR_CONDITION_HAIL = "hail"
ATTR_CONDITION_LIGHTNING = "lightning"
ATTR_CONDITION_LIGHTNING_RAINY = "lightning-rainy"
ATTR_CONDITION_PARTLYCLOUDY = "partlycloudy"
ATTR_CONDITION_POURING = "pouring"
ATTR_CONDITION_RAINY = "rainy"
ATTR_CONDITION_SNOWY = "snowy"
ATTR_CONDITION_SNOWY_RAINY = "snowy-rainy"
ATTR_CONDITION_SUNNY = "sunny"
ATTR_CONDITION_WINDY = "windy"
ATTR_CONDITION_WINDY_VARIANT = "windy-variant"
ATTR_FORECAST = "forecast"
ATTR_FORECAST_CONDITION = "condition"
ATTR_FORECAST_PRECIPITATION = "precipitation"
ATTR_FORECAST_PRECIPITATION_PROBABILITY = "precipitation_probability"
ATTR_FORECAST_TEMP = "temperature"
ATTR_FORECAST_TEMP_LOW = "templow"
ATTR_FORECAST_TIME = "datetime"
ATTR_FORECAST_WIND_BEARING = "wind_bearing"
ATTR_FORECAST_WIND_SPEED = "wind_speed"
ATTR_WEATHER_ATTRIBUTION = "attribution"
ATTR_WEATHER_HUMIDITY = "humidity"
ATTR_WEATHER_OZONE = "ozone"
ATTR_WEATHER_PRESSURE = "pressure"
ATTR_WEATHER_TEMPERATURE = "temperature"
ATTR_WEATHER_VISIBILITY = "visibility"
ATTR_WEATHER_WIND_BEARING = "wind_bearing"
ATTR_WEATHER_WIND_SPEED = "wind_speed"
DOMAIN = "weather"
ENTITY_ID_FORMAT = DOMAIN + ".{}"
SCAN_INTERVAL = timedelta(seconds=30)
async def async_setup(hass, config):
"""Set up the weather component."""
component = hass.data[DOMAIN] = EntityComponent(
_LOGGER, DOMAIN, hass, SCAN_INTERVAL
)
await component.async_setup(config)
return True
async def async_setup_entry(hass, entry):
"""Set up a config entry."""
return await hass.data[DOMAIN].async_setup_entry(entry)
async def async_unload_entry(hass, entry):
"""Unload a config entry."""
return await hass.data[DOMAIN].async_unload_entry(entry)
class WeatherEntity(Entity):
"""ABC for weather data."""
@property
def temperature(self):
"""Return the platform temperature."""
raise NotImplementedError()
@property
def temperature_unit(self):
"""Return the unit of measurement."""
raise NotImplementedError()
@property
def pressure(self):
"""Return the pressure."""
return None
@property
def humidity(self):
"""Return the humidity."""
raise NotImplementedError()
@property
def wind_speed(self):
"""Return the wind speed."""
return None
@property
def wind_bearing(self):
"""Return the wind bearing."""
return None
@property
def ozone(self):
"""Return the ozone level."""
return None
@property
def attribution(self):
"""Return the attribution."""
return None
@property
def visibility(self):
"""Return the visibility."""
return None
@property
def forecast(self):
"""Return the forecast."""
return None
@property
def precision(self):
"""Return the precision of the temperature value."""
return (
PRECISION_TENTHS
if self.temperature_unit == TEMP_CELSIUS
else PRECISION_WHOLE
)
@property
def state_attributes(self):
"""Return the state attributes."""
data = {}
if self.temperature is not None:
data[ATTR_WEATHER_TEMPERATURE] = show_temp(
self.hass, self.temperature, self.temperature_unit, self.precision
)
humidity = self.humidity
if humidity is not None:
data[ATTR_WEATHER_HUMIDITY] = round(humidity)
ozone = self.ozone
if ozone is not None:
data[ATTR_WEATHER_OZONE] = ozone
pressure = self.pressure
if pressure is not None:
data[ATTR_WEATHER_PRESSURE] = pressure
wind_bearing = self.wind_bearing
if wind_bearing is not None:
data[ATTR_WEATHER_WIND_BEARING] = wind_bearing
wind_speed = self.wind_speed
if wind_speed is not None:
data[ATTR_WEATHER_WIND_SPEED] = wind_speed
visibility = self.visibility
if visibility is not None:
data[ATTR_WEATHER_VISIBILITY] = visibility
attribution = self.attribution
if attribution is not None:
data[ATTR_WEATHER_ATTRIBUTION] = attribution
if self.forecast is not None:
forecast = []
for forecast_entry in self.forecast:
forecast_entry = dict(forecast_entry)
forecast_entry[ATTR_FORECAST_TEMP] = show_temp(
self.hass,
forecast_entry[ATTR_FORECAST_TEMP],
self.temperature_unit,
self.precision,
)
if ATTR_FORECAST_TEMP_LOW in forecast_entry:
forecast_entry[ATTR_FORECAST_TEMP_LOW] = show_temp(
self.hass,
forecast_entry[ATTR_FORECAST_TEMP_LOW],
self.temperature_unit,
self.precision,
)
forecast.append(forecast_entry)
data[ATTR_FORECAST] = forecast
return data
@property
def state(self):
"""Return the current state."""
return self.condition
@property
def condition(self):
"""Return the current condition."""
raise NotImplementedError()
|
import os
try:
import json
except ImportError:
import simplejson as json
import diamond.collector
class OpenstackSwiftReconCollector(diamond.collector.Collector):
def get_default_config_help(self):
config_help = super(OpenstackSwiftReconCollector,
self).get_default_config_help()
config_help.update({
'recon_account_cache': 'path to swift recon account cache '
'(default /var/cache/swift/account.recon)',
'recon_container_cache': 'path to swift recon container cache '
'(default /var/cache/swift/container.recon)',
'recon_object_cache': 'path to swift recon object cache '
'(default /var/cache/swift/object.recon)'
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(OpenstackSwiftReconCollector, self).get_default_config()
config.update({
'path': 'swiftrecon',
'recon_account_cache': '/var/cache/swift/account.recon',
'recon_container_cache': '/var/cache/swift/container.recon',
'recon_object_cache': '/var/cache/swift/object.recon',
'interval': 300,
})
return config
def _process_cache(self, d, path=()):
"""Recusively walk a nested recon cache dict to obtain path/values"""
for k, v in d.iteritems():
if not isinstance(v, dict):
self.metrics.append((path + (k,), v))
else:
self._process_cache(v, path + (k,))
def collect(self):
self.metrics = []
recon_cache = {'account': self.config['recon_account_cache'],
'container': self.config['recon_container_cache'],
'object': self.config['recon_object_cache']}
for recon_type in recon_cache:
if not os.access(recon_cache[recon_type], os.R_OK):
continue
try:
f = open(recon_cache[recon_type])
try:
rmetrics = json.loads(f.readlines()[0].strip())
self.metrics = []
self._process_cache(rmetrics)
for k, v in self.metrics:
metric_name = '%s.%s' % (recon_type, ".".join(k))
if isinstance(v, (int, float)):
self.publish(metric_name, v)
except (ValueError, IndexError):
continue
finally:
f.close()
|
from __future__ import division
import numpy as np
import warnings
import chainer
from chainer.backends import cuda
import chainer.functions as F
from chainercv.links.model.faster_rcnn.utils.anchor_target_creator import\
AnchorTargetCreator
from chainercv.links.model.faster_rcnn.utils.proposal_target_creator \
import ProposalTargetCreator
class LightHeadRCNNTrainChain(chainer.Chain):
"""Calculate losses for Light Head R-CNN and report them.
This is used to train Light Head R-CNN in the joint training scheme
[#LHRCNN]_.
.. [#LHRCNN] Zeming Li, Chao Peng, Gang Yu, Xiangyu Zhang, Yangdong Deng, \
Jian Sun. Light-Head R-CNN: In Defense of Two-Stage Object Detector. \
arXiv preprint arXiv:1711.07264.
The losses include:
* :obj:`rpn_loc_loss`: The localization loss for \
Region Proposal Network (RPN).
* :obj:`rpn_cls_loss`: The classification loss for RPN.
* :obj:`roi_loc_loss`: The localization loss for the head module.
* :obj:`roi_cls_loss`: The classification loss for the head module.
Args:
light_head_rcnn (~light_head_rcnn.links.light_head_rcnn.LightHeadRCNN):
A Light Head R-CNN model that is going to be trained.
rpn_sigma (float): Sigma parameter for the localization loss
of Region Proposal Network (RPN). The default value is 3,
which is the value used in [#LHRCNN]_.
roi_sigma (float): Sigma paramter for the localization loss of
the head. The default value is 1, which is the value used
in [#LHRCNN]_.
anchor_target_creator: An instantiation of
:class:`~chainercv.links.model.faster_rcnn.AnchorTargetCreator`.
proposal_target_creator: An instantiation of
:class:`~light_head_rcnn.links.model.utils.ProposalTargetCreator`.
"""
def __init__(
self, light_head_rcnn,
rpn_sigma=3., roi_sigma=1., n_ohem_sample=256,
anchor_target_creator=None, proposal_target_creator=None,
):
super(LightHeadRCNNTrainChain, self).__init__()
with self.init_scope():
self.light_head_rcnn = light_head_rcnn
self.rpn_sigma = rpn_sigma
self.roi_sigma = roi_sigma
self.n_ohem_sample = n_ohem_sample
if anchor_target_creator is None:
self.anchor_target_creator = AnchorTargetCreator()
else:
self.anchor_target_creator = anchor_target_creator
if proposal_target_creator is None:
self.proposal_target_creator = ProposalTargetCreator(n_sample=None)
else:
self.proposal_target_creator = proposal_target_creator
self.loc_normalize_mean = light_head_rcnn.loc_normalize_mean
self.loc_normalize_std = light_head_rcnn.loc_normalize_std
def __call__(self, imgs, bboxes, labels, scales):
"""Forward Faster R-CNN and calculate losses.
Here are notations used.
* :math:`N` is the batch size.
* :math:`R` is the number of bounding boxes per image.
Currently, only :math:`N=1` is supported.
Args:
imgs (~chainer.Variable): A variable with a batch of images.
bboxes (~chainer.Variable): A batch of bounding boxes.
Its shape is :math:`(N, R, 4)`.
labels (~chainer.Variable): A batch of labels.
Its shape is :math:`(N, R)`. The background is excluded from
the definition, which means that the range of the value
is :math:`[0, L - 1]`. :math:`L` is the number of foreground
classes.
scale (float or ~chainer.Variable): Amount of scaling applied to
the raw image during preprocessing.
Returns:
chainer.Variable:
Scalar loss variable.
This is the sum of losses for Region Proposal Network and
the head module.
"""
if isinstance(bboxes, chainer.Variable):
bboxes = bboxes.array
if isinstance(labels, chainer.Variable):
labels = labels.array
if isinstance(scales, chainer.Variable):
scales = scales.array
scales = cuda.to_cpu(scales)
batch_size, _, H, W = imgs.shape
img_size = (H, W)
rpn_features, roi_features = self.light_head_rcnn.extractor(imgs)
rpn_locs, rpn_scores, rois, roi_indices, anchor = \
self.light_head_rcnn.rpn(rpn_features, img_size, scales)
rpn_locs = rpn_locs.reshape((-1, rpn_locs.shape[2]))
rpn_scores = rpn_scores.reshape((-1, rpn_scores.shape[2]))
gt_rpn_locs = []
gt_rpn_labels = []
for bbox in bboxes:
gt_rpn_loc, gt_rpn_label = self.anchor_target_creator(
bbox, anchor, img_size)
if cuda.get_array_module(rpn_locs.array) != np:
gt_rpn_loc = cuda.to_gpu(gt_rpn_loc)
gt_rpn_label = cuda.to_gpu(gt_rpn_label)
gt_rpn_locs.append(gt_rpn_loc)
gt_rpn_labels.append(gt_rpn_label)
del gt_rpn_loc, gt_rpn_label
gt_rpn_locs = self.xp.concatenate(gt_rpn_locs, axis=0)
gt_rpn_labels = self.xp.concatenate(gt_rpn_labels, axis=0)
batch_indices = range(batch_size)
sample_rois = []
sample_roi_indices = []
gt_roi_locs = []
gt_roi_labels = []
for batch_index, bbox, label in \
zip(batch_indices, bboxes, labels):
roi = rois[roi_indices == batch_index]
sample_roi, gt_roi_loc, gt_roi_label = \
self.proposal_target_creator(
roi, bbox, label,
self.loc_normalize_mean, self.loc_normalize_std)
del roi
sample_roi_index = self.xp.full(
(len(sample_roi),), batch_index, dtype=np.int32)
sample_rois.append(sample_roi)
sample_roi_indices.append(sample_roi_index)
del sample_roi, sample_roi_index
gt_roi_locs.append(gt_roi_loc)
gt_roi_labels.append(gt_roi_label)
del gt_roi_loc, gt_roi_label
sample_rois = self.xp.concatenate(sample_rois, axis=0)
sample_roi_indices = self.xp.concatenate(sample_roi_indices, axis=0)
gt_roi_locs = self.xp.concatenate(gt_roi_locs, axis=0)
gt_roi_labels = self.xp.concatenate(gt_roi_labels, axis=0)
roi_cls_locs, roi_scores = self.light_head_rcnn.head(
roi_features, sample_rois, sample_roi_indices)
# RPN losses
rpn_loc_loss = _fast_rcnn_loc_loss(
rpn_locs, gt_rpn_locs, gt_rpn_labels, self.rpn_sigma)
rpn_cls_loss = F.softmax_cross_entropy(rpn_scores, gt_rpn_labels)
# Losses for outputs of the head.
roi_loc_loss, roi_cls_loss = _ohem_loss(
roi_cls_locs, roi_scores, gt_roi_locs, gt_roi_labels,
self.n_ohem_sample * batch_size, self.roi_sigma)
roi_loc_loss = 2 * roi_loc_loss
loss = rpn_loc_loss + rpn_cls_loss + roi_loc_loss + roi_cls_loss
chainer.reporter.report(
{'rpn_loc_loss': rpn_loc_loss,
'rpn_cls_loss': rpn_cls_loss,
'roi_loc_loss': roi_loc_loss,
'roi_cls_loss': roi_cls_loss,
'loss': loss},
self)
return loss
def _ohem_loss(
roi_cls_locs, roi_scores, gt_roi_locs, gt_roi_labels,
n_ohem_sample, roi_sigma=1.0
):
xp = cuda.get_array_module(roi_cls_locs)
n_sample = roi_cls_locs.shape[0]
roi_cls_locs = roi_cls_locs.reshape((n_sample, -1, 4))
roi_locs = roi_cls_locs[xp.arange(n_sample), gt_roi_labels]
roi_loc_loss = _fast_rcnn_loc_loss(
roi_locs, gt_roi_locs, gt_roi_labels, roi_sigma, reduce='no')
roi_cls_loss = F.softmax_cross_entropy(
roi_scores, gt_roi_labels, reduce='no')
assert roi_loc_loss.shape == roi_cls_loss.shape
n_ohem_sample = min(n_ohem_sample, n_sample)
# sort in CPU because of GPU memory
roi_cls_loc_loss = cuda.to_cpu(roi_loc_loss.array + roi_cls_loss.array)
indices = roi_cls_loc_loss.argsort(axis=0)[::-1]
# filter nan
indices = np.array(
[i for i in indices if not np.isnan(roi_cls_loc_loss[i])],
dtype=np.int32)
indices = indices[:n_ohem_sample]
if cuda.get_array_module(roi_loc_loss.array) != np:
indices = cuda.to_gpu(indices)
if len(indices) > 0:
roi_loc_loss = F.sum(roi_loc_loss[indices]) / n_ohem_sample
roi_cls_loss = F.sum(roi_cls_loss[indices]) / len(indices)
else:
roi_loc_loss = chainer.Variable(xp.array(0.0, dtype=xp.float32))
roi_cls_loss = chainer.Variable(xp.array(0.0, dtype=xp.float32))
roi_loc_loss.zerograd()
roi_cls_loss.zerograd()
return roi_loc_loss, roi_cls_loss
def _smooth_l1_loss_base(x, t, in_weight, sigma):
sigma2 = sigma ** 2
diff = in_weight * (x - t)
abs_diff = F.absolute(diff)
flag = (abs_diff.array < (1. / sigma2)).astype(np.float32)
y = (flag * (sigma2 / 2.) * F.square(diff) +
(1 - flag) * (abs_diff - 0.5 / sigma2))
return F.sum(y, axis=1)
def _fast_rcnn_loc_loss(pred_loc, gt_loc, gt_label, sigma, reduce='mean'):
xp = cuda.get_array_module(pred_loc)
in_weight = xp.zeros_like(gt_loc)
# Localization loss is calculated only for positive rois.
in_weight[gt_label > 0] = 1
loc_loss = _smooth_l1_loss_base(pred_loc, gt_loc, in_weight, sigma)
# Normalize by total number of negtive and positive rois.
if reduce == 'mean':
loc_loss = F.sum(loc_loss) / xp.sum(gt_label >= 0)
elif reduce != 'no':
warnings.warn('no reduce option: {}'.format(reduce))
return loc_loss
|
import time
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
from snmpraw import SNMPRawCollector
from diamond.collector import Collector
###############################################################################
class TestSNMPRawCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('SNMPRawCollector', {
})
self.collector = SNMPRawCollector(config, None)
def test_import(self):
self.assertTrue(SNMPRawCollector)
@patch.object(Collector, 'publish_metric')
@patch.object(time, 'time', Mock(return_value=1000))
@patch.object(SNMPRawCollector, '_get_value', Mock(return_value=5))
def test_metric(self, collect_mock):
test_config = {'devices': {'test': {'oids': {'1.1.1.1': 'test'}}}}
self.collector.config.update(test_config)
path = '.'.join([self.collector.config['path_prefix'], 'test',
self.collector.config['path_suffix'], 'test'])
self.collector.collect_snmp('test', None, None, None)
metric = collect_mock.call_args[0][0]
self.assertEqual(metric.metric_type, 'GAUGE')
self.assertEqual(metric.ttl, None)
self.assertEqual(metric.value, self.collector._get_value())
self.assertEqual(metric.precision, self.collector._precision(5))
self.assertEqual(metric.host, None)
self.assertEqual(metric.path, path)
self.assertEqual(metric.timestamp, 1000)
###############################################################################
if __name__ == "__main__":
unittest.main()
|
import logging
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_DOOR,
DEVICE_CLASS_GAS,
DEVICE_CLASS_SMOKE,
BinarySensorEntity,
)
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, entry, async_add_entities) -> None:
"""Set up TotalConnect device sensors based on a config entry."""
sensors = []
client_locations = hass.data[DOMAIN][entry.entry_id].locations
for location_id, location in client_locations.items():
for zone_id, zone in location.zones.items():
sensors.append(TotalConnectBinarySensor(zone_id, location_id, zone))
async_add_entities(sensors, True)
class TotalConnectBinarySensor(BinarySensorEntity):
"""Represent an TotalConnect zone."""
def __init__(self, zone_id, location_id, zone):
"""Initialize the TotalConnect status."""
self._zone_id = zone_id
self._location_id = location_id
self._zone = zone
self._name = self._zone.description
self._unique_id = f"{location_id} {zone_id}"
self._is_on = None
self._is_tampered = None
self._is_low_battery = None
@property
def unique_id(self):
"""Return the unique id."""
return self._unique_id
@property
def name(self):
"""Return the name of the device."""
return self._name
def update(self):
"""Return the state of the device."""
self._is_tampered = self._zone.is_tampered()
self._is_low_battery = self._zone.is_low_battery()
if self._zone.is_faulted() or self._zone.is_triggered():
self._is_on = True
else:
self._is_on = False
@property
def is_on(self):
"""Return true if the binary sensor is on."""
return self._is_on
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
if self._zone.is_type_security():
return DEVICE_CLASS_DOOR
if self._zone.is_type_fire():
return DEVICE_CLASS_SMOKE
if self._zone.is_type_carbon_monoxide():
return DEVICE_CLASS_GAS
return None
@property
def device_state_attributes(self):
"""Return the state attributes."""
attributes = {
"zone_id": self._zone_id,
"location_id": self._location_id,
"low_battery": self._is_low_battery,
"tampered": self._is_tampered,
}
return attributes
|
import asyncio
from datetime import timedelta
import logging
from adext import AdExt
from alarmdecoder.devices import SerialDevice, SocketDevice
from alarmdecoder.util import NoDeviceError
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
CONF_HOST,
CONF_PORT,
CONF_PROTOCOL,
EVENT_HOMEASSISTANT_STOP,
)
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.util import dt as dt_util
from .const import (
CONF_DEVICE_BAUD,
CONF_DEVICE_PATH,
DATA_AD,
DATA_REMOVE_STOP_LISTENER,
DATA_REMOVE_UPDATE_LISTENER,
DATA_RESTART,
DOMAIN,
PROTOCOL_SERIAL,
PROTOCOL_SOCKET,
SIGNAL_PANEL_MESSAGE,
SIGNAL_REL_MESSAGE,
SIGNAL_RFX_MESSAGE,
SIGNAL_ZONE_FAULT,
SIGNAL_ZONE_RESTORE,
)
_LOGGER = logging.getLogger(__name__)
PLATFORMS = ["alarm_control_panel", "sensor", "binary_sensor"]
async def async_setup(hass, config):
"""Set up for the AlarmDecoder devices."""
return True
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool:
"""Set up AlarmDecoder config flow."""
undo_listener = entry.add_update_listener(_update_listener)
ad_connection = entry.data
protocol = ad_connection[CONF_PROTOCOL]
def stop_alarmdecoder(event):
"""Handle the shutdown of AlarmDecoder."""
if not hass.data.get(DOMAIN):
return
_LOGGER.debug("Shutting down alarmdecoder")
hass.data[DOMAIN][entry.entry_id][DATA_RESTART] = False
controller.close()
async def open_connection(now=None):
"""Open a connection to AlarmDecoder."""
try:
await hass.async_add_executor_job(controller.open, baud)
except NoDeviceError:
_LOGGER.debug("Failed to connect. Retrying in 5 seconds")
hass.helpers.event.async_track_point_in_time(
open_connection, dt_util.utcnow() + timedelta(seconds=5)
)
return
_LOGGER.debug("Established a connection with the alarmdecoder")
hass.data[DOMAIN][entry.entry_id][DATA_RESTART] = True
def handle_closed_connection(event):
"""Restart after unexpected loss of connection."""
if not hass.data[DOMAIN][entry.entry_id][DATA_RESTART]:
return
hass.data[DOMAIN][entry.entry_id][DATA_RESTART] = False
_LOGGER.warning("AlarmDecoder unexpectedly lost connection")
hass.add_job(open_connection)
def handle_message(sender, message):
"""Handle message from AlarmDecoder."""
hass.helpers.dispatcher.dispatcher_send(SIGNAL_PANEL_MESSAGE, message)
def handle_rfx_message(sender, message):
"""Handle RFX message from AlarmDecoder."""
hass.helpers.dispatcher.dispatcher_send(SIGNAL_RFX_MESSAGE, message)
def zone_fault_callback(sender, zone):
"""Handle zone fault from AlarmDecoder."""
hass.helpers.dispatcher.dispatcher_send(SIGNAL_ZONE_FAULT, zone)
def zone_restore_callback(sender, zone):
"""Handle zone restore from AlarmDecoder."""
hass.helpers.dispatcher.dispatcher_send(SIGNAL_ZONE_RESTORE, zone)
def handle_rel_message(sender, message):
"""Handle relay or zone expander message from AlarmDecoder."""
hass.helpers.dispatcher.dispatcher_send(SIGNAL_REL_MESSAGE, message)
baud = ad_connection.get(CONF_DEVICE_BAUD)
if protocol == PROTOCOL_SOCKET:
host = ad_connection[CONF_HOST]
port = ad_connection[CONF_PORT]
controller = AdExt(SocketDevice(interface=(host, port)))
if protocol == PROTOCOL_SERIAL:
path = ad_connection[CONF_DEVICE_PATH]
controller = AdExt(SerialDevice(interface=path))
controller.on_message += handle_message
controller.on_rfx_message += handle_rfx_message
controller.on_zone_fault += zone_fault_callback
controller.on_zone_restore += zone_restore_callback
controller.on_close += handle_closed_connection
controller.on_expander_message += handle_rel_message
remove_stop_listener = hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_STOP, stop_alarmdecoder
)
hass.data.setdefault(DOMAIN, {})
hass.data[DOMAIN][entry.entry_id] = {
DATA_AD: controller,
DATA_REMOVE_UPDATE_LISTENER: undo_listener,
DATA_REMOVE_STOP_LISTENER: remove_stop_listener,
DATA_RESTART: False,
}
await open_connection()
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
return True
async def async_unload_entry(hass: HomeAssistantType, entry: ConfigEntry):
"""Unload a AlarmDecoder entry."""
hass.data[DOMAIN][entry.entry_id][DATA_RESTART] = False
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
if not unload_ok:
return False
hass.data[DOMAIN][entry.entry_id][DATA_REMOVE_UPDATE_LISTENER]()
hass.data[DOMAIN][entry.entry_id][DATA_REMOVE_STOP_LISTENER]()
await hass.async_add_executor_job(hass.data[DOMAIN][entry.entry_id][DATA_AD].close)
if hass.data[DOMAIN][entry.entry_id]:
hass.data[DOMAIN].pop(entry.entry_id)
if not hass.data[DOMAIN]:
hass.data.pop(DOMAIN)
return True
async def _update_listener(hass: HomeAssistantType, entry: ConfigEntry):
"""Handle options update."""
_LOGGER.debug("AlarmDecoder options updated: %s", entry.as_dict()["options"])
await hass.config_entries.async_reload(entry.entry_id)
|
import fnmatch
import logging
import os
import signal
import sys
from configparser import RawConfigParser, NoOptionError, NoSectionError
from threading import Thread
from netort.resource import manager as resource_manager
from pkg_resources import resource_filename
from yandextank.common.util import Cleanup, Finish, Status
from yandextank.core.tankworker import TankWorker, is_ini
logger = logging.getLogger()
class RealConsoleMarkup(object):
'''
Took colors from here: https://www.siafoo.net/snippet/88
'''
WHITE_ON_BLACK = '\033[37;40m'
TOTAL_RESET = '\033[0m'
clear = "\x1b[2J\x1b[H"
new_line = "\n"
YELLOW = '\033[1;33m'
RED = '\033[1;31m'
RED_DARK = '\033[31;3m'
RESET = '\033[1;m'
CYAN = "\033[1;36m"
GREEN = "\033[1;32m"
WHITE = "\033[1;37m"
MAGENTA = '\033[1;35m'
BG_MAGENTA = '\033[1;45m'
BG_GREEN = '\033[1;42m'
BG_BROWN = '\033[1;43m'
BG_CYAN = '\033[1;46m'
def clean_markup(self, orig_str):
''' clean markup from string '''
for val in [
self.YELLOW, self.RED, self.RESET, self.CYAN, self.BG_MAGENTA,
self.WHITE, self.BG_GREEN, self.GREEN, self.BG_BROWN,
self.RED_DARK, self.MAGENTA, self.BG_CYAN
]:
orig_str = orig_str.replace(val, '')
return orig_str
def signal_handler(sig, frame):
""" required for non-tty python runs to interrupt """
raise KeyboardInterrupt()
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)
def apply_shorthand_options(config, options, default_section='DEFAULT'):
"""
:type config: ConfigParser
"""
if not options:
return config
for option_str in options:
key, value = option_str.split('=')
try:
section, option = key.split('.')
except ValueError:
section = default_section
option = key
if not config.has_section(section):
config.add_section(section)
config.set(section, option, value)
return config
def load_ini_cfgs(config_files):
config_filenames = [resource_manager.resource_filename(config) for config in config_files]
cfg = RawConfigParser()
cfg.read(config_filenames)
dotted_options = []
if cfg.has_section('tank'):
for option, value in cfg.items('tank'):
if '.' in option:
dotted_options += [option + '=' + value]
else:
cfg.add_section('tank')
cfg = apply_shorthand_options(cfg, dotted_options)
cfg.set('tank', 'pid', str(os.getpid()))
return cfg
def get_default_configs():
""" returns default configs list, from /etc and home dir """
# initialize basic defaults
configs = [resource_filename(__name__, 'config/00-base.ini')]
baseconfigs_location = '/etc/yandex-tank'
try:
conf_files = sorted(os.listdir(baseconfigs_location))
for filename in conf_files:
if fnmatch.fnmatch(filename, '*.ini'):
configs += [
os.path.realpath(
baseconfigs_location + os.sep + filename)
]
except OSError:
logger.info(
baseconfigs_location + ' is not accessible to get configs list')
configs += [os.path.expanduser('~/.yandex-tank')]
return configs
def get_depr_cfg(config_files, no_rc, cmd_options, depr_options):
try:
all_config_files = []
if not no_rc:
all_config_files = get_default_configs()
if not config_files:
if os.path.exists(os.path.realpath('load.ini')):
all_config_files += [os.path.realpath('load.ini')]
elif os.path.exists(os.path.realpath('load.conf')):
# just for old 'lunapark' compatibility
conf_file = os.path.realpath('load.conf')
all_config_files += [conf_file]
else:
for config_file in config_files:
all_config_files.append(config_file)
cfg_ini = load_ini_cfgs([cfg_file for cfg_file in all_config_files if is_ini(cfg_file)])
# substitute telegraf config
def patch_ini_config_with_monitoring(ini_config, mon_section_name):
"""
:type ini_config: ConfigParser
"""
CONFIG = 'config'
telegraf_cfg = ini_config.get(mon_section_name, CONFIG)
if not telegraf_cfg.startswith('<') and not telegraf_cfg.lower() == 'auto':
with open(resource_manager.resource_filename(telegraf_cfg), 'rb') as telegraf_cfg_file:
config_contents = telegraf_cfg_file.read()
ini_config.set(mon_section_name, CONFIG, config_contents)
return ini_config
try:
cfg_ini = patch_ini_config_with_monitoring(cfg_ini, 'monitoring')
except (NoSectionError, NoOptionError):
try:
patch_ini_config_with_monitoring(cfg_ini, 'telegraf')
except (NoOptionError, NoSectionError):
pass
for section, key, value in depr_options:
if not cfg_ini.has_section(section):
cfg_ini.add_section(section)
cfg_ini.set(section, key, value)
return apply_shorthand_options(cfg_ini, cmd_options)
except Exception as ex:
sys.stderr.write(RealConsoleMarkup.RED)
sys.stderr.write(RealConsoleMarkup.RESET)
sys.stderr.write(RealConsoleMarkup.TOTAL_RESET)
raise ex
class ConsoleWorker(Thread, TankWorker):
def __init__(self, configs, cli_options=None, cfg_patches=None, cli_args=None, no_local=False,
log_handlers=None, wait_lock=False, files=None, ammo_file=None, debug=False):
Thread.__init__(self)
TankWorker.__init__(self, configs=configs, cli_options=cli_options, cfg_patches=cfg_patches,
cli_args=cli_args, no_local=no_local, log_handlers=log_handlers,
wait_lock=wait_lock, files=files, ammo_file=ammo_file, debug=debug)
self.daemon = True
self.status = Status.TEST_INITIATED
self.test_id = self.core.test_id
self.retcode = None
self.msg = ''
def run(self):
with Cleanup(self) as add_cleanup:
lock = self.get_lock()
add_cleanup('release lock', lock.release)
self.status = Status.TEST_PREPARING
logger.info('Created a folder for the test. %s' % self.folder)
self.core.plugins_configure()
add_cleanup('plugins cleanup', self.core.plugins_cleanup)
self.core.plugins_prepare_test()
with Finish(self):
self.status = Status.TEST_RUNNING
self.core.plugins_start_test()
self.retcode = self.core.wait_for_finish()
self.status = Status.TEST_POST_PROCESS
self.retcode = self.core.plugins_post_process(self.retcode)
class DevNullOpts:
def __init__(self):
pass
log = "/dev/null"
|
import time
import CloudFlare
from flask import current_app
def cf_api_call():
cf_key = current_app.config.get("ACME_CLOUDFLARE_KEY", "")
cf_email = current_app.config.get("ACME_CLOUDFLARE_EMAIL", "")
return CloudFlare.CloudFlare(email=cf_email, token=cf_key)
def find_zone_id(host):
elements = host.split(".")
cf = cf_api_call()
n = 1
while n < 5:
n = n + 1
domain = ".".join(elements[-n:])
current_app.logger.debug("Trying to get ID for zone {0}".format(domain))
try:
zone = cf.zones.get(params={"name": domain, "per_page": 1})
except Exception as e:
current_app.logger.error("Cloudflare API error: %s" % e)
pass
if len(zone) == 1:
break
if len(zone) == 0:
current_app.logger.error("No zone found")
return
else:
return zone[0]["id"]
def wait_for_dns_change(change_id, account_number=None):
cf = cf_api_call()
zone_id, record_id = change_id
while True:
r = cf.zones.get(zone_id, record_id)
current_app.logger.debug("Record status: %s" % r["status"])
if r["status"] == "active":
break
time.sleep(1)
return
def create_txt_record(host, value, account_number):
cf = cf_api_call()
zone_id = find_zone_id(host)
if not zone_id:
return
txt_record = {"name": host, "type": "TXT", "content": value}
current_app.logger.debug(
"Creating TXT record {0} with value {1}".format(host, value)
)
try:
r = cf.zones.dns_records.post(zone_id, data=txt_record)
except Exception as e:
current_app.logger.error(
"/zones.dns_records.post %s: %s" % (txt_record["name"], e)
)
return zone_id, r["id"]
def delete_txt_record(change_ids, account_number, host, value):
cf = cf_api_call()
for change_id in change_ids:
zone_id, record_id = change_id
current_app.logger.debug("Removing record with id {0}".format(record_id))
try:
cf.zones.dns_records.delete(zone_id, record_id)
except Exception as e:
current_app.logger.error("/zones.dns_records.post: %s" % e)
|
from homeassistant.components.homematicip_cloud import DOMAIN as HMIPC_DOMAIN
from homeassistant.components.homematicip_cloud.generic_entity import (
ATTR_GROUP_MEMBER_UNREACHABLE,
)
from homeassistant.components.switch import (
ATTR_CURRENT_POWER_W,
ATTR_TODAY_ENERGY_KWH,
DOMAIN as SWITCH_DOMAIN,
)
from homeassistant.const import STATE_OFF, STATE_ON
from homeassistant.setup import async_setup_component
from .helper import async_manipulate_test_data, get_and_check_entity_basics
async def test_manually_configured_platform(hass):
"""Test that we do not set up an access point."""
assert await async_setup_component(
hass, SWITCH_DOMAIN, {SWITCH_DOMAIN: {"platform": HMIPC_DOMAIN}}
)
assert not hass.data.get(HMIPC_DOMAIN)
async def test_hmip_switch(hass, default_mock_hap_factory):
"""Test HomematicipSwitch."""
entity_id = "switch.schrank"
entity_name = "Schrank"
device_model = "HMIP-PS"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=[entity_name]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == STATE_ON
service_call_counter = len(hmip_device.mock_calls)
await hass.services.async_call(
"switch", "turn_off", {"entity_id": entity_id}, blocking=True
)
assert len(hmip_device.mock_calls) == service_call_counter + 1
assert hmip_device.mock_calls[-1][0] == "turn_off"
assert hmip_device.mock_calls[-1][1] == ()
await async_manipulate_test_data(hass, hmip_device, "on", False)
ha_state = hass.states.get(entity_id)
assert ha_state.state == STATE_OFF
await hass.services.async_call(
"switch", "turn_on", {"entity_id": entity_id}, blocking=True
)
assert len(hmip_device.mock_calls) == service_call_counter + 3
assert hmip_device.mock_calls[-1][0] == "turn_on"
assert hmip_device.mock_calls[-1][1] == ()
await async_manipulate_test_data(hass, hmip_device, "on", True)
ha_state = hass.states.get(entity_id)
assert ha_state.state == STATE_ON
async def test_hmip_switch_input(hass, default_mock_hap_factory):
"""Test HomematicipSwitch."""
entity_id = "switch.wohnzimmer_beleuchtung"
entity_name = "Wohnzimmer Beleuchtung"
device_model = "HmIP-FSI16"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=[entity_name]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == STATE_ON
service_call_counter = len(hmip_device.mock_calls)
await hass.services.async_call(
"switch", "turn_off", {"entity_id": entity_id}, blocking=True
)
assert len(hmip_device.mock_calls) == service_call_counter + 1
assert hmip_device.mock_calls[-1][0] == "turn_off"
assert hmip_device.mock_calls[-1][1] == ()
await async_manipulate_test_data(hass, hmip_device, "on", False)
ha_state = hass.states.get(entity_id)
assert ha_state.state == STATE_OFF
await hass.services.async_call(
"switch", "turn_on", {"entity_id": entity_id}, blocking=True
)
assert len(hmip_device.mock_calls) == service_call_counter + 3
assert hmip_device.mock_calls[-1][0] == "turn_on"
assert hmip_device.mock_calls[-1][1] == ()
await async_manipulate_test_data(hass, hmip_device, "on", True)
ha_state = hass.states.get(entity_id)
assert ha_state.state == STATE_ON
async def test_hmip_switch_measuring(hass, default_mock_hap_factory):
"""Test HomematicipSwitchMeasuring."""
entity_id = "switch.pc"
entity_name = "Pc"
device_model = "HMIP-PSM"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=[entity_name]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == STATE_ON
service_call_counter = len(hmip_device.mock_calls)
await hass.services.async_call(
"switch", "turn_off", {"entity_id": entity_id}, blocking=True
)
assert len(hmip_device.mock_calls) == service_call_counter + 1
assert hmip_device.mock_calls[-1][0] == "turn_off"
assert hmip_device.mock_calls[-1][1] == ()
await async_manipulate_test_data(hass, hmip_device, "on", False)
ha_state = hass.states.get(entity_id)
assert ha_state.state == STATE_OFF
await hass.services.async_call(
"switch", "turn_on", {"entity_id": entity_id}, blocking=True
)
assert len(hmip_device.mock_calls) == service_call_counter + 3
assert hmip_device.mock_calls[-1][0] == "turn_on"
assert hmip_device.mock_calls[-1][1] == ()
await async_manipulate_test_data(hass, hmip_device, "on", True)
await async_manipulate_test_data(hass, hmip_device, "currentPowerConsumption", 50)
ha_state = hass.states.get(entity_id)
assert ha_state.state == STATE_ON
assert ha_state.attributes[ATTR_CURRENT_POWER_W] == 50
assert ha_state.attributes[ATTR_TODAY_ENERGY_KWH] == 36
await async_manipulate_test_data(hass, hmip_device, "energyCounter", None)
ha_state = hass.states.get(entity_id)
assert not ha_state.attributes.get(ATTR_TODAY_ENERGY_KWH)
async def test_hmip_group_switch(hass, default_mock_hap_factory):
"""Test HomematicipGroupSwitch."""
entity_id = "switch.strom_group"
entity_name = "Strom Group"
device_model = None
mock_hap = await default_mock_hap_factory.async_get_mock_hap(test_groups=["Strom"])
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == STATE_ON
service_call_counter = len(hmip_device.mock_calls)
await hass.services.async_call(
"switch", "turn_off", {"entity_id": entity_id}, blocking=True
)
assert len(hmip_device.mock_calls) == service_call_counter + 1
assert hmip_device.mock_calls[-1][0] == "turn_off"
assert hmip_device.mock_calls[-1][1] == ()
await async_manipulate_test_data(hass, hmip_device, "on", False)
ha_state = hass.states.get(entity_id)
assert ha_state.state == STATE_OFF
await hass.services.async_call(
"switch", "turn_on", {"entity_id": entity_id}, blocking=True
)
assert len(hmip_device.mock_calls) == service_call_counter + 3
assert hmip_device.mock_calls[-1][0] == "turn_on"
assert hmip_device.mock_calls[-1][1] == ()
await async_manipulate_test_data(hass, hmip_device, "on", True)
ha_state = hass.states.get(entity_id)
assert ha_state.state == STATE_ON
assert not ha_state.attributes.get(ATTR_GROUP_MEMBER_UNREACHABLE)
await async_manipulate_test_data(hass, hmip_device, "unreach", True)
ha_state = hass.states.get(entity_id)
assert ha_state.attributes[ATTR_GROUP_MEMBER_UNREACHABLE]
async def test_hmip_multi_switch(hass, default_mock_hap_factory):
"""Test HomematicipMultiSwitch."""
entity_id = "switch.jalousien_1_kizi_2_schlazi_channel1"
entity_name = "Jalousien - 1 KiZi, 2 SchlaZi Channel1"
device_model = "HmIP-PCBS2"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=[
"Jalousien - 1 KiZi, 2 SchlaZi",
"Multi IO Box",
"Heizungsaktor",
"ioBroker",
]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == STATE_OFF
service_call_counter = len(hmip_device.mock_calls)
await hass.services.async_call(
"switch", "turn_on", {"entity_id": entity_id}, blocking=True
)
assert len(hmip_device.mock_calls) == service_call_counter + 1
assert hmip_device.mock_calls[-1][0] == "turn_on"
assert hmip_device.mock_calls[-1][1] == (1,)
await async_manipulate_test_data(hass, hmip_device, "on", True)
ha_state = hass.states.get(entity_id)
assert ha_state.state == STATE_ON
await hass.services.async_call(
"switch", "turn_off", {"entity_id": entity_id}, blocking=True
)
assert len(hmip_device.mock_calls) == service_call_counter + 3
assert hmip_device.mock_calls[-1][0] == "turn_off"
assert hmip_device.mock_calls[-1][1] == (1,)
await async_manipulate_test_data(hass, hmip_device, "on", False)
ha_state = hass.states.get(entity_id)
assert ha_state.state == STATE_OFF
async def test_hmip_wired_multi_switch(hass, default_mock_hap_factory):
"""Test HomematicipMultiSwitch."""
entity_id = "switch.fernseher_wohnzimmer"
entity_name = "Fernseher (Wohnzimmer)"
device_model = "HmIPW-DRS8"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=[
"Wired Schaltaktor – 8-fach",
]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == STATE_ON
service_call_counter = len(hmip_device.mock_calls)
await hass.services.async_call(
"switch", "turn_off", {"entity_id": entity_id}, blocking=True
)
assert len(hmip_device.mock_calls) == service_call_counter + 1
assert hmip_device.mock_calls[-1][0] == "turn_off"
assert hmip_device.mock_calls[-1][1] == (1,)
await async_manipulate_test_data(hass, hmip_device, "on", False)
ha_state = hass.states.get(entity_id)
assert ha_state.state == STATE_OFF
await hass.services.async_call(
"switch", "turn_on", {"entity_id": entity_id}, blocking=True
)
assert len(hmip_device.mock_calls) == service_call_counter + 3
assert hmip_device.mock_calls[-1][0] == "turn_on"
assert hmip_device.mock_calls[-1][1] == (1,)
await async_manipulate_test_data(hass, hmip_device, "on", True)
ha_state = hass.states.get(entity_id)
assert ha_state.state == STATE_ON
|
from homeassistant import data_entry_flow
from homeassistant.auth import auth_manager_from_config
from homeassistant.components.auth import mfa_setup_flow
from homeassistant.setup import async_setup_component
from tests.common import CLIENT_ID, MockUser, ensure_auth_manager_loaded
async def test_ws_setup_depose_mfa(hass, hass_ws_client):
"""Test set up mfa module for current user."""
hass.auth = await auth_manager_from_config(
hass,
provider_configs=[
{
"type": "insecure_example",
"users": [
{
"username": "test-user",
"password": "test-pass",
"name": "Test Name",
}
],
}
],
module_configs=[
{
"type": "insecure_example",
"id": "example_module",
"data": [{"user_id": "mock-user", "pin": "123456"}],
}
],
)
ensure_auth_manager_loaded(hass.auth)
await async_setup_component(hass, "auth", {"http": {}})
user = MockUser(id="mock-user").add_to_hass(hass)
cred = await hass.auth.auth_providers[0].async_get_or_create_credentials(
{"username": "test-user"}
)
await hass.auth.async_link_user(user, cred)
refresh_token = await hass.auth.async_create_refresh_token(user, CLIENT_ID)
access_token = hass.auth.async_create_access_token(refresh_token)
client = await hass_ws_client(hass, access_token)
await client.send_json({"id": 10, "type": mfa_setup_flow.WS_TYPE_SETUP_MFA})
result = await client.receive_json()
assert result["id"] == 10
assert result["success"] is False
assert result["error"]["code"] == "no_module"
await client.send_json(
{
"id": 11,
"type": mfa_setup_flow.WS_TYPE_SETUP_MFA,
"mfa_module_id": "example_module",
}
)
result = await client.receive_json()
assert result["id"] == 11
assert result["success"]
flow = result["result"]
assert flow["type"] == data_entry_flow.RESULT_TYPE_FORM
assert flow["handler"] == "example_module"
assert flow["step_id"] == "init"
assert flow["data_schema"][0] == {"type": "string", "name": "pin"}
await client.send_json(
{
"id": 12,
"type": mfa_setup_flow.WS_TYPE_SETUP_MFA,
"flow_id": flow["flow_id"],
"user_input": {"pin": "654321"},
}
)
result = await client.receive_json()
assert result["id"] == 12
assert result["success"]
flow = result["result"]
assert flow["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert flow["handler"] == "example_module"
assert flow["data"]["result"] is None
await client.send_json(
{
"id": 13,
"type": mfa_setup_flow.WS_TYPE_DEPOSE_MFA,
"mfa_module_id": "invalid_id",
}
)
result = await client.receive_json()
assert result["id"] == 13
assert result["success"] is False
assert result["error"]["code"] == "disable_failed"
await client.send_json(
{
"id": 14,
"type": mfa_setup_flow.WS_TYPE_DEPOSE_MFA,
"mfa_module_id": "example_module",
}
)
result = await client.receive_json()
assert result["id"] == 14
assert result["success"]
assert result["result"] == "done"
|
from datetime import datetime
from homeassistant.components.homeassistant import (
DOMAIN as HA_DOMAIN,
SERVICE_UPDATE_ENTITY,
)
from homeassistant.const import ATTR_ENTITY_ID
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from tests.async_mock import patch
DAY1 = datetime(2017, 1, 1, 1, tzinfo=dt_util.UTC)
DAY2 = datetime(2017, 1, 18, 1, tzinfo=dt_util.UTC)
async def test_moon_day1(hass):
"""Test the Moon sensor."""
config = {"sensor": {"platform": "moon", "name": "moon_day1"}}
await async_setup_component(hass, HA_DOMAIN, {})
assert await async_setup_component(hass, "sensor", config)
await hass.async_block_till_done()
assert hass.states.get("sensor.moon_day1")
with patch(
"homeassistant.components.moon.sensor.dt_util.utcnow", return_value=DAY1
):
await async_update_entity(hass, "sensor.moon_day1")
assert hass.states.get("sensor.moon_day1").state == "waxing_crescent"
async def test_moon_day2(hass):
"""Test the Moon sensor."""
config = {"sensor": {"platform": "moon", "name": "moon_day2"}}
await async_setup_component(hass, HA_DOMAIN, {})
assert await async_setup_component(hass, "sensor", config)
await hass.async_block_till_done()
assert hass.states.get("sensor.moon_day2")
with patch(
"homeassistant.components.moon.sensor.dt_util.utcnow", return_value=DAY2
):
await async_update_entity(hass, "sensor.moon_day2")
assert hass.states.get("sensor.moon_day2").state == "waning_gibbous"
async def async_update_entity(hass, entity_id):
"""Run an update action for an entity."""
await hass.services.async_call(
HA_DOMAIN,
SERVICE_UPDATE_ENTITY,
{ATTR_ENTITY_ID: entity_id},
blocking=True,
)
await hass.async_block_till_done()
|
from datetime import datetime, timedelta
from os import path
import unittest
import pytest
import pytz
from homeassistant import config as hass_config
from homeassistant.components.history_stats import DOMAIN
from homeassistant.components.history_stats.sensor import HistoryStatsSensor
from homeassistant.const import SERVICE_RELOAD, STATE_UNKNOWN
import homeassistant.core as ha
from homeassistant.helpers.template import Template
from homeassistant.setup import async_setup_component, setup_component
import homeassistant.util.dt as dt_util
from tests.async_mock import patch
from tests.common import get_test_home_assistant, init_recorder_component
class TestHistoryStatsSensor(unittest.TestCase):
"""Test the History Statistics sensor."""
def setUp(self):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.addCleanup(self.hass.stop)
def test_setup(self):
"""Test the history statistics sensor setup."""
self.init_recorder()
config = {
"history": {},
"sensor": {
"platform": "history_stats",
"entity_id": "binary_sensor.test_id",
"state": "on",
"start": "{{ now().replace(hour=0)"
".replace(minute=0).replace(second=0) }}",
"duration": "02:00",
"name": "Test",
},
}
assert setup_component(self.hass, "sensor", config)
self.hass.block_till_done()
state = self.hass.states.get("sensor.test")
assert state.state == STATE_UNKNOWN
@patch(
"homeassistant.helpers.template.TemplateEnvironment.is_safe_callable",
return_value=True,
)
def test_period_parsing(self, mock):
"""Test the conversion from templates to period."""
now = datetime(2019, 1, 1, 23, 30, 0, tzinfo=pytz.utc)
with patch("homeassistant.util.dt.now", return_value=now):
today = Template(
"{{ now().replace(hour=0).replace(minute=0).replace(second=0) }}",
self.hass,
)
duration = timedelta(hours=2, minutes=1)
sensor1 = HistoryStatsSensor(
self.hass, "test", "on", today, None, duration, "time", "test"
)
sensor2 = HistoryStatsSensor(
self.hass, "test", "on", None, today, duration, "time", "test"
)
sensor1.update_period()
sensor1_start, sensor1_end = sensor1._period
sensor2.update_period()
sensor2_start, sensor2_end = sensor2._period
# Start = 00:00:00
assert sensor1_start.hour == 0
assert sensor1_start.minute == 0
assert sensor1_start.second == 0
# End = 02:01:00
assert sensor1_end.hour == 2
assert sensor1_end.minute == 1
assert sensor1_end.second == 0
# Start = 21:59:00
assert sensor2_start.hour == 21
assert sensor2_start.minute == 59
assert sensor2_start.second == 0
# End = 00:00:00
assert sensor2_end.hour == 0
assert sensor2_end.minute == 0
assert sensor2_end.second == 0
def test_measure(self):
"""Test the history statistics sensor measure."""
t0 = dt_util.utcnow() - timedelta(minutes=40)
t1 = t0 + timedelta(minutes=20)
t2 = dt_util.utcnow() - timedelta(minutes=10)
# Start t0 t1 t2 End
# |--20min--|--20min--|--10min--|--10min--|
# |---off---|---on----|---off---|---on----|
fake_states = {
"binary_sensor.test_id": [
ha.State("binary_sensor.test_id", "on", last_changed=t0),
ha.State("binary_sensor.test_id", "off", last_changed=t1),
ha.State("binary_sensor.test_id", "on", last_changed=t2),
]
}
start = Template("{{ as_timestamp(now()) - 3600 }}", self.hass)
end = Template("{{ now() }}", self.hass)
sensor1 = HistoryStatsSensor(
self.hass, "binary_sensor.test_id", "on", start, end, None, "time", "Test"
)
sensor2 = HistoryStatsSensor(
self.hass, "unknown.id", "on", start, end, None, "time", "Test"
)
sensor3 = HistoryStatsSensor(
self.hass, "binary_sensor.test_id", "on", start, end, None, "count", "test"
)
sensor4 = HistoryStatsSensor(
self.hass, "binary_sensor.test_id", "on", start, end, None, "ratio", "test"
)
assert sensor1._type == "time"
assert sensor3._type == "count"
assert sensor4._type == "ratio"
with patch(
"homeassistant.components.history.state_changes_during_period",
return_value=fake_states,
):
with patch("homeassistant.components.history.get_state", return_value=None):
sensor1.update()
sensor2.update()
sensor3.update()
sensor4.update()
assert sensor1.state == 0.5
assert sensor2.state is None
assert sensor3.state == 2
assert sensor4.state == 50
def test_wrong_date(self):
"""Test when start or end value is not a timestamp or a date."""
good = Template("{{ now() }}", self.hass)
bad = Template("{{ TEST }}", self.hass)
sensor1 = HistoryStatsSensor(
self.hass, "test", "on", good, bad, None, "time", "Test"
)
sensor2 = HistoryStatsSensor(
self.hass, "test", "on", bad, good, None, "time", "Test"
)
before_update1 = sensor1._period
before_update2 = sensor2._period
sensor1.update_period()
sensor2.update_period()
assert before_update1 == sensor1._period
assert before_update2 == sensor2._period
def test_wrong_duration(self):
"""Test when duration value is not a timedelta."""
self.init_recorder()
config = {
"history": {},
"sensor": {
"platform": "history_stats",
"entity_id": "binary_sensor.test_id",
"name": "Test",
"state": "on",
"start": "{{ now() }}",
"duration": "TEST",
},
}
setup_component(self.hass, "sensor", config)
assert self.hass.states.get("sensor.test") is None
with pytest.raises(TypeError):
setup_component(self.hass, "sensor", config)()
def test_bad_template(self):
"""Test Exception when the template cannot be parsed."""
bad = Template("{{ x - 12 }}", self.hass) # x is undefined
duration = "01:00"
sensor1 = HistoryStatsSensor(
self.hass, "test", "on", bad, None, duration, "time", "Test"
)
sensor2 = HistoryStatsSensor(
self.hass, "test", "on", None, bad, duration, "time", "Test"
)
before_update1 = sensor1._period
before_update2 = sensor2._period
sensor1.update_period()
sensor2.update_period()
assert before_update1 == sensor1._period
assert before_update2 == sensor2._period
def test_not_enough_arguments(self):
"""Test config when not enough arguments provided."""
self.init_recorder()
config = {
"history": {},
"sensor": {
"platform": "history_stats",
"entity_id": "binary_sensor.test_id",
"name": "Test",
"state": "on",
"start": "{{ now() }}",
},
}
setup_component(self.hass, "sensor", config)
assert self.hass.states.get("sensor.test") is None
with pytest.raises(TypeError):
setup_component(self.hass, "sensor", config)()
def test_too_many_arguments(self):
"""Test config when too many arguments provided."""
self.init_recorder()
config = {
"history": {},
"sensor": {
"platform": "history_stats",
"entity_id": "binary_sensor.test_id",
"name": "Test",
"state": "on",
"start": "{{ as_timestamp(now()) - 3600 }}",
"end": "{{ now() }}",
"duration": "01:00",
},
}
setup_component(self.hass, "sensor", config)
assert self.hass.states.get("sensor.test") is None
with pytest.raises(TypeError):
setup_component(self.hass, "sensor", config)()
def init_recorder(self):
"""Initialize the recorder."""
init_recorder_component(self.hass)
self.hass.start()
async def test_reload(hass):
"""Verify we can reload history_stats sensors."""
await hass.async_add_executor_job(
init_recorder_component, hass
) # force in memory db
hass.state = ha.CoreState.not_running
hass.states.async_set("binary_sensor.test_id", "on")
await async_setup_component(
hass,
"sensor",
{
"sensor": {
"platform": "history_stats",
"entity_id": "binary_sensor.test_id",
"name": "test",
"state": "on",
"start": "{{ as_timestamp(now()) - 3600 }}",
"duration": "01:00",
},
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 2
assert hass.states.get("sensor.test")
yaml_path = path.join(
_get_fixtures_base_path(),
"fixtures",
"history_stats/configuration.yaml",
)
with patch.object(hass_config, "YAML_CONFIG_FILE", yaml_path):
await hass.services.async_call(
DOMAIN,
SERVICE_RELOAD,
{},
blocking=True,
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 2
assert hass.states.get("sensor.test") is None
assert hass.states.get("sensor.second_test")
def _get_fixtures_base_path():
return path.dirname(path.dirname(path.dirname(__file__)))
|
import os
import unittest
import mock
from perfkitbenchmarker.linux_benchmarks import pgbench_benchmark
class PgbenchBenchmarkTestCase(unittest.TestCase):
def setUp(self):
p = mock.patch(pgbench_benchmark.__name__ + '.FLAGS')
p.start()
self.addCleanup(p.stop)
path = os.path.join(os.path.dirname(__file__), '../data',
'pgbench.stderr.txt')
with open(path) as fp:
self.stderr_output = fp.read()
def testMakeSamplesFromOutput(self):
testMetadata = {'foo': 'bar'}
num_clients = 32
num_jobs = 16
expected_tps_metadata = testMetadata.copy()
expected_tps_metadata.update({
'clients': num_clients,
'jobs': num_jobs,
'tps': [7.0, 14.0, 13.0, 14.0, 13.0],
})
expected_latency_metadata = testMetadata.copy()
expected_latency_metadata.update({
'clients': num_clients,
'jobs': num_jobs,
'latency': [435.396, 1038.548, 1055.813, 1123.461, 1358.214],
})
actual = pgbench_benchmark.MakeSamplesFromOutput(
self.stderr_output, num_clients, num_jobs, testMetadata)
self.assertEqual(2, len(actual))
tps_sample = [x for x in actual if x.metric == 'tps_array'][0]
self.assertEqual(tps_sample.value, -1)
self.assertEqual(tps_sample.unit, 'tps')
self.assertDictEqual(tps_sample.metadata, expected_tps_metadata)
latency_sample = [x for x in actual if x.metric == 'latency_array'][0]
self.assertEqual(latency_sample.value, -1)
self.assertEqual(latency_sample.unit, 'ms')
self.assertDictEqual(latency_sample.metadata, expected_latency_metadata)
if __name__ == '__main__':
unittest.main()
|
__docformat__ = "restructuredtext en"
import string
ATTRS_VAL = {
'algos': ('dfs', 'tree', 'minbackward',
'left_to_right', 'right_to_left',
'top_to_bottom', 'bottom_to_top',
'maxdepth', 'maxdepthslow', 'mindepth', 'mindepthslow',
'mindegree', 'minindegree', 'minoutdegree',
'maxdegree', 'maxindegree', 'maxoutdegree'),
'booleans': ('yes', 'no'),
'colors': ('black', 'white', 'blue', 'red', 'green', 'yellow',
'magenta', 'lightgrey',
'cyan', 'darkgrey', 'darkblue', 'darkred', 'darkgreen',
'darkyellow', 'darkmagenta', 'darkcyan', 'gold',
'lightblue', 'lightred', 'lightgreen', 'lightyellow',
'lightmagenta', 'lightcyan', 'lilac', 'turquoise',
'aquamarine', 'khaki', 'purple', 'yellowgreen', 'pink',
'orange', 'orchid'),
'shapes': ('box', 'ellipse', 'rhomb', 'triangle'),
'textmodes': ('center', 'left_justify', 'right_justify'),
'arrowstyles': ('solid', 'line', 'none'),
'linestyles': ('continuous', 'dashed', 'dotted', 'invisible'),
}
# meaning of possible values:
# O -> string
# 1 -> int
# list -> value in list
GRAPH_ATTRS = {
'title': 0,
'label': 0,
'color': ATTRS_VAL['colors'],
'textcolor': ATTRS_VAL['colors'],
'bordercolor': ATTRS_VAL['colors'],
'width': 1,
'height': 1,
'borderwidth': 1,
'textmode': ATTRS_VAL['textmodes'],
'shape': ATTRS_VAL['shapes'],
'shrink': 1,
'stretch': 1,
'orientation': ATTRS_VAL['algos'],
'vertical_order': 1,
'horizontal_order': 1,
'xspace': 1,
'yspace': 1,
'layoutalgorithm': ATTRS_VAL['algos'],
'late_edge_labels': ATTRS_VAL['booleans'],
'display_edge_labels': ATTRS_VAL['booleans'],
'dirty_edge_labels': ATTRS_VAL['booleans'],
'finetuning': ATTRS_VAL['booleans'],
'manhattan_edges': ATTRS_VAL['booleans'],
'smanhattan_edges': ATTRS_VAL['booleans'],
'port_sharing': ATTRS_VAL['booleans'],
'edges': ATTRS_VAL['booleans'],
'nodes': ATTRS_VAL['booleans'],
'splines': ATTRS_VAL['booleans'],
}
NODE_ATTRS = {
'title': 0,
'label': 0,
'color': ATTRS_VAL['colors'],
'textcolor': ATTRS_VAL['colors'],
'bordercolor': ATTRS_VAL['colors'],
'width': 1,
'height': 1,
'borderwidth': 1,
'textmode': ATTRS_VAL['textmodes'],
'shape': ATTRS_VAL['shapes'],
'shrink': 1,
'stretch': 1,
'vertical_order': 1,
'horizontal_order': 1,
}
EDGE_ATTRS = {
'sourcename': 0,
'targetname': 0,
'label': 0,
'linestyle': ATTRS_VAL['linestyles'],
'class': 1,
'thickness': 0,
'color': ATTRS_VAL['colors'],
'textcolor': ATTRS_VAL['colors'],
'arrowcolor': ATTRS_VAL['colors'],
'backarrowcolor': ATTRS_VAL['colors'],
'arrowsize': 1,
'backarrowsize': 1,
'arrowstyle': ATTRS_VAL['arrowstyles'],
'backarrowstyle': ATTRS_VAL['arrowstyles'],
'textmode': ATTRS_VAL['textmodes'],
'priority': 1,
'anchor': 1,
'horizontal_order': 1,
}
# Misc utilities ###############################################################
def latin_to_vcg(st):
"""Convert latin characters using vcg escape sequence.
"""
for char in st:
if char not in string.ascii_letters:
try:
num = ord(char)
if num >= 192:
st = st.replace(char, r'\fi%d'%ord(char))
except:
pass
return st
class VCGPrinter:
"""A vcg graph writer.
"""
def __init__(self, output_stream):
self._stream = output_stream
self._indent = ''
def open_graph(self, **args):
"""open a vcg graph
"""
self._stream.write('%sgraph:{\n'%self._indent)
self._inc_indent()
self._write_attributes(GRAPH_ATTRS, **args)
def close_graph(self):
"""close a vcg graph
"""
self._dec_indent()
self._stream.write('%s}\n'%self._indent)
def node(self, title, **args):
"""draw a node
"""
self._stream.write('%snode: {title:"%s"' % (self._indent, title))
self._write_attributes(NODE_ATTRS, **args)
self._stream.write('}\n')
def edge(self, from_node, to_node, edge_type='', **args):
"""draw an edge from a node to another.
"""
self._stream.write(
'%s%sedge: {sourcename:"%s" targetname:"%s"' % (
self._indent, edge_type, from_node, to_node))
self._write_attributes(EDGE_ATTRS, **args)
self._stream.write('}\n')
# private ##################################################################
def _write_attributes(self, attributes_dict, **args):
"""write graph, node or edge attributes
"""
for key, value in args.items():
try:
_type = attributes_dict[key]
except KeyError:
raise Exception('''no such attribute %s
possible attributes are %s''' % (key, attributes_dict.keys()))
if not _type:
self._stream.write('%s%s:"%s"\n' % (self._indent, key, value))
elif _type == 1:
self._stream.write('%s%s:%s\n' % (self._indent, key,
int(value)))
elif value in _type:
self._stream.write('%s%s:%s\n' % (self._indent, key, value))
else:
raise Exception('''value %s isn\'t correct for attribute %s
correct values are %s''' % (value, key, _type))
def _inc_indent(self):
"""increment indentation
"""
self._indent = ' %s' % self._indent
def _dec_indent(self):
"""decrement indentation
"""
self._indent = self._indent[:-2]
|
import argparse
import contextlib
import json
import logging
import os
import subprocess
import tempfile
import time
from http.client import HTTPConnection
import requests
import ruamel.yaml as yaml
from paasta_tools.utils import DEFAULT_SOA_CONFIGS_GIT_URL
from paasta_tools.utils import format_git_url
from paasta_tools.utils import load_system_paasta_config
requests_log = logging.getLogger("requests.packages.urllib3")
logging.basicConfig(level=logging.INFO)
log = logging.getLogger()
def parse_args():
parser = argparse.ArgumentParser(description="")
parser.add_argument(
"-s",
"--splunk-creds",
help="Service credentials for Splunk API, user:pass",
dest="splunk_creds",
required=True,
)
parser.add_argument(
"-f",
"--criteria-filter",
help="Filter Splunk search results criteria field. Default: *",
dest="criteria_filter",
required=False,
default="*",
)
parser.add_argument(
"-j",
"--jira-creds",
help="Service credentials for JIRA API, user:pass",
dest="jira_creds",
required=False,
)
parser.add_argument(
"-t",
"--ticket",
help="Create JIRA tickets for every service in corresponding project (not available in bulk mode)",
action="store_true",
dest="ticket",
default=False,
)
parser.add_argument(
"-r",
"--reviews",
help="Guess owners of each service and create reviews automatically",
action="store_true",
dest="create_reviews",
default=False,
)
parser.add_argument(
"-p",
"--publish-reviews",
help="Guess owners of each service and publish reviews automatically",
action="store_true",
dest="publish_reviews",
default=False,
)
parser.add_argument(
"-b",
"--bulk",
help="Patch all services in the report with only one code review",
action="store_true",
dest="bulk",
default=False,
)
parser.add_argument(
"--app",
help="Splunk app of the CSV file",
default="yelp_performance",
required=False,
dest="splunk_app",
)
parser.add_argument(
"-c",
"--csv-report",
help="Splunk csv file from which to pull data.",
required=True,
dest="csv_report",
)
parser.add_argument(
"-y",
"--yelpsoa-configs-dir",
help="Use provided existing yelpsoa-configs instead of cloning the repo in a temporary dir. Only avail with -b option",
dest="YELPSOA_DIR",
required=False,
)
parser.add_argument(
"-l",
"--local",
help="Do not create a branch. Implies -y and -b.",
action="store_true",
dest="no_branch",
default=False,
)
parser.add_argument(
"-v", "--verbose", help="Debug mode.", action="store_true", dest="verbose",
)
return parser.parse_args()
def tempdir():
tmp = tempfile.TemporaryDirectory(prefix="repo", dir="/nail/tmp")
log.debug(f"Created temp directory: {tmp.name}")
return tmp
@contextlib.contextmanager
def cwd(path):
pwd = os.getcwd()
os.chdir(path)
log.debug(f"Switching from directory {pwd} to {path}")
try:
yield
finally:
log.debug(f"Switching back from directory {path} to {pwd}")
os.chdir(pwd)
def get_report_from_splunk(creds, app, filename, criteria_filter):
""" Expect a table containing at least the following fields:
criteria (<service> [marathon|kubernetes]-<cluster_name> <instance>)
service_owner (Optional)
project (Required to create tickets)
estimated_monthly_savings (Optional)
search_time (Unix time)
one of the following pairs:
- current_cpus
suggested_cpus
- current_mem
suggested_mem
- current_disk
suggested_disk
"""
url = f"https://splunk-api.yelpcorp.com/servicesNS/nobody/{app}/search/jobs/export"
search = (
'| inputlookup {filename} | search criteria="{criteria_filter}"'
'| eval _time = search_time | where _time > relative_time(now(),"-7d")'
).format(filename=filename, criteria_filter=criteria_filter)
log.debug(f"Sending this query to Splunk: {search}\n")
data = {"output_mode": "json", "search": search}
creds = creds.split(":")
resp = requests.post(url, data=data, auth=(creds[0], creds[1]))
resp_text = resp.text.split("\n")
log.info("Found {} services to rightsize".format(len(resp_text) - 1))
resp_text = [x for x in resp_text if x]
resp_text = [json.loads(x) for x in resp_text]
services_to_update = {}
for d in resp_text:
if "result" not in d:
raise ValueError(f"Splunk request didn't return any results: {resp_text}")
criteria = d["result"]["criteria"]
serv = {}
serv["service"] = criteria.split(" ")[0]
serv["cluster"] = criteria.split(" ")[1]
serv["instance"] = criteria.split(" ")[2]
serv["owner"] = d["result"].get("service_owner", "Unavailable")
serv["date"] = d["result"]["_time"].split(" ")[0]
serv["money"] = d["result"].get("estimated_monthly_savings", 0)
serv["project"] = d["result"].get("project", "Unavailable")
serv["cpus"] = d["result"].get("suggested_cpus")
serv["old_cpus"] = d["result"].get("current_cpus")
serv["mem"] = d["result"].get("suggested_mem")
serv["old_mem"] = d["result"].get("current_mem")
serv["disk"] = d["result"].get("suggested_disk")
serv["old_disk"] = d["result"].get("current_disk")
serv["hacheck_cpus"] = d["result"].get("suggested_hacheck_cpus")
services_to_update[criteria] = serv
return {
"search": search,
"results": services_to_update,
}
def clone_in(target_dir, system_paasta_config=None):
if not system_paasta_config:
system_paasta_config = load_system_paasta_config()
repo_config = system_paasta_config.get_git_repo_config("yelpsoa-configs")
remote = format_git_url(
system_paasta_config.get_git_config()["git_user"],
repo_config.get("git_server", DEFAULT_SOA_CONFIGS_GIT_URL),
repo_config["repo_name"],
)
subprocess.check_call(("git", "clone", remote, target_dir))
def create_branch(branch_name):
subprocess.check_call(("git", "checkout", "master"))
subprocess.check_call(("git", "checkout", "-b", branch_name))
def bulk_commit(filenames, originating_search):
message = f"Rightsizer bulk update\n\nSplunk search:\n{originating_search}"
subprocess.check_call(["git", "add"] + filenames)
subprocess.check_call(("git", "commit", "-n", "-m", message))
def bulk_review(filenames, originating_search, publish=False):
reviewers = set(get_reviewers_in_group("right-sizer"))
for filename in filenames:
reviewers = reviewers.union(get_reviewers(filename))
reviewers_arg = " ".join(list(reviewers))
summary = "Rightsizer bulk update"
description = (
"This is an automated bulk review. It will be shipped automatically if a primary reviewer gives a shipit. If you think this should not be shipped, talk to one of the primary reviewers. \n\n"
"This review is based on results from the following Splunk search:\n"
f"{originating_search}"
)
review_cmd = [
"review-branch",
f"--summary={summary}",
f"--description={description}",
"--reviewers",
reviewers_arg,
"--server",
"https://reviewboard.yelpcorp.com",
]
if publish:
review_cmd.append("-p")
subprocess.check_call(review_cmd)
def commit(filename, serv):
message = "Updating {} for {}provisioned cpu from {} to {} cpus".format(
filename, serv["state"], serv["old_cpus"], serv["cpus"]
)
log.debug(f"Commit {filename} with the following message: {message}")
subprocess.check_call(("git", "add", filename))
subprocess.check_call(("git", "commit", "-n", "-m", message))
def get_reviewers_in_group(group_name):
"""Using rbt's target-groups argument overrides our configured default review groups.
So we'll expand the group into usernames and pass those users in the group individually.
"""
rightsizer_reviewers = json.loads(
subprocess.check_output(
(
"rbt",
"api-get",
"--server",
"https://reviewboard.yelpcorp.com",
f"groups/{group_name}/users/",
)
).decode("UTF-8")
)
return [user.get("username", "") for user in rightsizer_reviewers.get("users", {})]
def get_reviewers(filename):
recent_authors = set()
authors = (
subprocess.check_output(("git", "log", "--format=%ae", "--", filename))
.decode("UTF-8")
.splitlines()
)
authors = [x.split("@")[0] for x in authors]
for author in authors:
if "no-reply" in author:
continue
recent_authors.add(author)
if len(recent_authors) >= 3:
break
return recent_authors
def review(filename, summary, description, publish_review):
all_reviewers = get_reviewers(filename).union(get_reviewers_in_group("right-sizer"))
reviewers_arg = " ".join(all_reviewers)
publish_arg = "-p" if publish_review is True else "-d"
subprocess.check_call(
(
"review-branch",
f"--summary={summary}",
f"--description={description}",
publish_arg,
"--reviewers",
reviewers_arg,
"--server",
"https://reviewboard.yelpcorp.com",
)
)
def edit_soa_configs(filename, instance, cpu, mem, disk):
if not os.path.exists(filename):
filename = filename.replace("marathon", "kubernetes")
if os.path.islink(filename):
real_filename = os.path.realpath(filename)
os.remove(filename)
else:
real_filename = filename
try:
with open(real_filename, "r") as fi:
yams = fi.read()
yams = yams.replace("cpus: .", "cpus: 0.")
data = yaml.round_trip_load(yams, preserve_quotes=True)
instdict = data[instance]
if cpu:
instdict["cpus"] = float(cpu)
if mem:
mem = max(128, round(float(mem)))
instdict["mem"] = mem
if disk:
instdict["disk"] = round(float(disk))
out = yaml.round_trip_dump(data, width=120)
with open(filename, "w") as fi:
fi.write(out)
except FileNotFoundError:
log.exception(f"Could not find {filename}")
except KeyError:
log.exception(f"Error in {filename}. Will continue")
def create_jira_ticket(serv, creds, description, JIRA):
creds = creds.split(":")
options = {"server": "https://jira.yelpcorp.com"}
jira_cli = JIRA(options=options, basic_auth=(creds[0], creds[1])) # noqa: F821
jira_ticket = {}
# Sometimes a project has required fields we can't predict
try:
jira_ticket = {
"project": {"key": serv["project"]},
"description": description,
"issuetype": {"name": "Improvement"},
"labels": ["perf-watching", "paasta-rightsizer"],
"summary": "{s}.{i} in {c} may be {o}provisioned".format(
s=serv["service"],
i=serv["instance"],
c=serv["cluster"],
o=serv["state"],
),
}
tick = jira_cli.create_issue(fields=jira_ticket)
except Exception:
jira_ticket["project"] = {"key": "PEOBS"}
jira_ticket["labels"].append(serv["service"])
tick = jira_cli.create_issue(fields=jira_ticket)
return tick.key
def _get_dashboard_qs_param(param, value):
# Some dashboards may ask for query string params like param=value, but not this provider.
return f"variables%5B%5D={param}%3D{param}:{value}"
def generate_ticket_content(serv):
cpus = float(serv["cpus"])
provisioned_state = "over"
if cpus > float(serv["old_cpus"]):
provisioned_state = "under"
serv["state"] = provisioned_state
ticket_desc = (
"This ticket and CR have been auto-generated to help keep PaaSTA right-sized."
"\nPEOBS will review this CR and give a shipit. Then an ops deputy from your team can merge"
" if these values look good for your service after review."
"\nOpen an issue with any concerns and someone from PEOBS will respond."
"\nWe suspect that {s}.{i} in {c} may have been {o}-provisioned"
" during the 1 week prior to {d}. It initially had {x} cpus, but based on the below dashboard,"
" we recommend {y} cpus."
"\n- Dashboard: https://y.yelpcorp.com/{o}provisioned?{cluster_param}&{service_param}&{instance_param}"
"\n- Service owner: {n}"
"\n- Estimated monthly excess cost: ${m}"
"\n\nFor more information and sizing examples for larger services:"
"\n- Runbook: https://y.yelpcorp.com/rb-provisioning-alert"
"\n- Alert owner: [email protected]"
).format(
s=serv["service"],
c=serv["cluster"],
i=serv["instance"],
o=provisioned_state,
d=serv["date"],
n=serv["owner"],
m=serv["money"],
x=serv["old_cpus"],
y=serv["cpus"],
cluster_param=_get_dashboard_qs_param(
"paasta_cluster", serv["cluster"].replace("marathon-", "")
),
service_param=_get_dashboard_qs_param("paasta_service", serv["service"]),
instance_param=_get_dashboard_qs_param("paasta_instance", serv["instance"]),
)
summary = f"Rightsizing {serv['service']}.{serv['instance']} in {serv['cluster']} to make it not have {provisioned_state}-provisioned cpu" # noqa: E501
return (summary, ticket_desc)
def bulk_rightsize(report, create_code_review, publish_code_review, create_new_branch):
if create_new_branch:
branch = "rightsize-bulk-{}".format(int(time.time()))
create_branch(branch)
filenames = []
for _, serv in report["results"].items():
filename = "{}/{}.yaml".format(serv["service"], serv["cluster"])
filenames.append(filename)
cpus = serv.get("cpus", None)
mem = serv.get("mem", None)
disk = serv.get("disk", None)
edit_soa_configs(filename, serv["instance"], cpus, mem, disk)
if create_code_review:
bulk_commit(filenames, report["search"])
bulk_review(filenames, report["search"], publish_code_review)
def individual_rightsize(
report, create_tickets, jira_creds, create_review, publish_review, JIRA
):
for _, serv in report["results"].items():
filename = "{}/{}.yaml".format(serv["service"], serv["cluster"])
summary, ticket_desc = generate_ticket_content(serv)
if create_tickets is True:
branch = create_jira_ticket(serv, jira_creds, ticket_desc, JIRA)
else:
branch = "rightsize-{}".format(int(time.time() * 1000))
create_branch(branch)
cpus = serv.get("cpus", None)
mem = serv.get("mem", None)
disk = serv.get("disk", None)
edit_soa_configs(filename, serv["instance"], cpus, mem, disk)
try:
commit(filename, serv)
if create_review:
review(filename, summary, ticket_desc, publish_review)
except Exception:
log.exception(
(
"\nUnable to push changes to {f}. Check if {f} conforms to"
"yelpsoa-configs yaml rules. No review created. To see the"
"cpu suggestion for this service check {t}."
).format(f=filename, t=branch)
)
continue
def main():
args = parse_args()
if args.verbose:
log.setLevel(logging.DEBUG)
requests_log.setLevel(logging.DEBUG)
HTTPConnection.debuglevel = 2
requests_log.propagate = True
# Safety checks
if args.no_branch and not args.YELPSOA_DIR:
log.error(
"You must specify --yelpsoa-configs-dir to work on if you use the --local option"
)
return False
if args.ticket:
if not args.jira_creds:
raise ValueError("No JIRA creds specified")
# Only import the jira module if we need too
from jira.client import JIRA # noqa: F401
else:
JIRA = None
report = get_report_from_splunk(
args.splunk_creds, args.splunk_app, args.csv_report, args.criteria_filter
)
tmpdir = tempdir() # Create a tmp dir even if we are not using it
working_dir = args.YELPSOA_DIR
system_paasta_config = load_system_paasta_config()
if working_dir is None:
# Working in a temporary directory
working_dir = os.path.join("rightsizer", tmpdir.name)
clone_in(working_dir, system_paasta_config=system_paasta_config)
with cwd(working_dir):
if args.bulk or args.no_branch:
log.info("Running in bulk mode")
bulk_rightsize(
report, args.create_reviews, args.publish_reviews, not args.no_branch
)
else:
individual_rightsize(
report,
args.ticket,
args.jira_creds,
args.create_reviews,
args.publish_reviews,
JIRA,
)
tmpdir.cleanup() # Cleanup any tmpdire used
if __name__ == "__main__":
main()
|
import re
from homeassistant import config_entries, data_entry_flow, setup
from homeassistant.components import ssdp
from homeassistant.components.syncthru.config_flow import SyncThru
from homeassistant.components.syncthru.const import DOMAIN
from homeassistant.const import CONF_NAME, CONF_URL
from tests.async_mock import patch
from tests.common import MockConfigEntry, mock_coro
FIXTURE_USER_INPUT = {
CONF_URL: "http://192.168.1.2/",
CONF_NAME: "My Printer",
}
def mock_connection(aioclient_mock):
"""Mock syncthru connection."""
aioclient_mock.get(
re.compile("."),
text="""
{
\tstatus: {
\thrDeviceStatus: 2,
\tstatus1: " Sleeping... "
\t},
\tidentity: {
\tserial_num: "000000000000000",
\t}
}
""",
)
async def test_show_setup_form(hass):
"""Test that the setup form is served."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}, data=None
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
async def test_already_configured_by_url(hass, aioclient_mock):
"""Test we match and update already configured devices by URL."""
await setup.async_setup_component(hass, "persistent_notification", {})
udn = "uuid:XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX"
MockConfigEntry(
domain=DOMAIN,
data={**FIXTURE_USER_INPUT, CONF_NAME: "Already configured"},
title="Already configured",
unique_id=udn,
).add_to_hass(hass)
mock_connection(aioclient_mock)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_USER},
data=FIXTURE_USER_INPUT,
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["data"][CONF_URL] == FIXTURE_USER_INPUT[CONF_URL]
assert result["data"][CONF_NAME] == FIXTURE_USER_INPUT[CONF_NAME]
assert result["result"].unique_id == udn
async def test_syncthru_not_supported(hass):
"""Test we show user form on unsupported device."""
with patch.object(SyncThru, "update", side_effect=ValueError):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_USER},
data=FIXTURE_USER_INPUT,
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
assert result["errors"] == {CONF_URL: "syncthru_not_supported"}
async def test_unknown_state(hass):
"""Test we show user form on unsupported device."""
with patch.object(SyncThru, "update", return_value=mock_coro()), patch.object(
SyncThru, "is_unknown_state", return_value=True
):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_USER},
data=FIXTURE_USER_INPUT,
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
assert result["errors"] == {CONF_URL: "unknown_state"}
async def test_success(hass, aioclient_mock):
"""Test successful flow provides entry creation data."""
await setup.async_setup_component(hass, "persistent_notification", {})
mock_connection(aioclient_mock)
with patch(
"homeassistant.components.syncthru.async_setup_entry", return_value=True
) as mock_setup_entry:
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_USER},
data=FIXTURE_USER_INPUT,
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["data"][CONF_URL] == FIXTURE_USER_INPUT[CONF_URL]
await hass.async_block_till_done()
assert len(mock_setup_entry.mock_calls) == 1
async def test_ssdp(hass, aioclient_mock):
"""Test SSDP discovery initiates config properly."""
await setup.async_setup_component(hass, "persistent_notification", {})
mock_connection(aioclient_mock)
url = "http://192.168.1.2/"
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_SSDP},
data={
ssdp.ATTR_SSDP_LOCATION: "http://192.168.1.2:5200/Printer.xml",
ssdp.ATTR_UPNP_DEVICE_TYPE: "urn:schemas-upnp-org:device:Printer:1",
ssdp.ATTR_UPNP_MANUFACTURER: "Samsung Electronics",
ssdp.ATTR_UPNP_PRESENTATION_URL: url,
ssdp.ATTR_UPNP_SERIAL: "00000000",
ssdp.ATTR_UPNP_UDN: "uuid:XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX",
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "confirm"
assert CONF_URL in result["data_schema"].schema
for k in result["data_schema"].schema:
if k == CONF_URL:
assert k.default() == url
|
from aiohomekit.model.characteristics import CharacteristicsTypes
from homeassistant.components.fan import (
DIRECTION_FORWARD,
DIRECTION_REVERSE,
SPEED_HIGH,
SPEED_LOW,
SPEED_MEDIUM,
SPEED_OFF,
SUPPORT_DIRECTION,
SUPPORT_OSCILLATE,
SUPPORT_SET_SPEED,
FanEntity,
)
from homeassistant.core import callback
from . import KNOWN_DEVICES, HomeKitEntity
# 0 is clockwise, 1 is counter-clockwise. The match to forward and reverse is so that
# its consistent with homeassistant.components.homekit.
DIRECTION_TO_HK = {
DIRECTION_REVERSE: 1,
DIRECTION_FORWARD: 0,
}
HK_DIRECTION_TO_HA = {v: k for (k, v) in DIRECTION_TO_HK.items()}
SPEED_TO_PCNT = {
SPEED_HIGH: 100,
SPEED_MEDIUM: 50,
SPEED_LOW: 25,
SPEED_OFF: 0,
}
class BaseHomeKitFan(HomeKitEntity, FanEntity):
"""Representation of a Homekit fan."""
# This must be set in subclasses to the name of a boolean characteristic
# that controls whether the fan is on or off.
on_characteristic = None
def get_characteristic_types(self):
"""Define the homekit characteristics the entity cares about."""
return [
CharacteristicsTypes.SWING_MODE,
CharacteristicsTypes.ROTATION_DIRECTION,
CharacteristicsTypes.ROTATION_SPEED,
self.on_characteristic,
]
@property
def is_on(self):
"""Return true if device is on."""
return self.service.value(self.on_characteristic) == 1
@property
def speed(self):
"""Return the current speed."""
if not self.is_on:
return SPEED_OFF
rotation_speed = self.service.value(CharacteristicsTypes.ROTATION_SPEED)
if rotation_speed > SPEED_TO_PCNT[SPEED_MEDIUM]:
return SPEED_HIGH
if rotation_speed > SPEED_TO_PCNT[SPEED_LOW]:
return SPEED_MEDIUM
if rotation_speed > SPEED_TO_PCNT[SPEED_OFF]:
return SPEED_LOW
return SPEED_OFF
@property
def speed_list(self):
"""Get the list of available speeds."""
if self.supported_features & SUPPORT_SET_SPEED:
return [SPEED_OFF, SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH]
return []
@property
def current_direction(self):
"""Return the current direction of the fan."""
direction = self.service.value(CharacteristicsTypes.ROTATION_DIRECTION)
return HK_DIRECTION_TO_HA[direction]
@property
def oscillating(self):
"""Return whether or not the fan is currently oscillating."""
oscillating = self.service.value(CharacteristicsTypes.SWING_MODE)
return oscillating == 1
@property
def supported_features(self):
"""Flag supported features."""
features = 0
if self.service.has(CharacteristicsTypes.ROTATION_DIRECTION):
features |= SUPPORT_DIRECTION
if self.service.has(CharacteristicsTypes.ROTATION_SPEED):
features |= SUPPORT_SET_SPEED
if self.service.has(CharacteristicsTypes.SWING_MODE):
features |= SUPPORT_OSCILLATE
return features
async def async_set_direction(self, direction):
"""Set the direction of the fan."""
await self.async_put_characteristics(
{CharacteristicsTypes.ROTATION_DIRECTION: DIRECTION_TO_HK[direction]}
)
async def async_set_speed(self, speed):
"""Set the speed of the fan."""
if speed == SPEED_OFF:
return await self.async_turn_off()
await self.async_put_characteristics(
{CharacteristicsTypes.ROTATION_SPEED: SPEED_TO_PCNT[speed]}
)
async def async_oscillate(self, oscillating: bool):
"""Oscillate the fan."""
await self.async_put_characteristics(
{CharacteristicsTypes.SWING_MODE: 1 if oscillating else 0}
)
async def async_turn_on(self, speed=None, **kwargs):
"""Turn the specified fan on."""
characteristics = {}
if not self.is_on:
characteristics[self.on_characteristic] = True
if self.supported_features & SUPPORT_SET_SPEED and speed:
characteristics[CharacteristicsTypes.ROTATION_SPEED] = SPEED_TO_PCNT[speed]
if characteristics:
await self.async_put_characteristics(characteristics)
async def async_turn_off(self, **kwargs):
"""Turn the specified fan off."""
await self.async_put_characteristics({self.on_characteristic: False})
class HomeKitFanV1(BaseHomeKitFan):
"""Implement fan support for public.hap.service.fan."""
on_characteristic = CharacteristicsTypes.ON
class HomeKitFanV2(BaseHomeKitFan):
"""Implement fan support for public.hap.service.fanv2."""
on_characteristic = CharacteristicsTypes.ACTIVE
ENTITY_TYPES = {
"fan": HomeKitFanV1,
"fanv2": HomeKitFanV2,
}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Homekit fans."""
hkid = config_entry.data["AccessoryPairingID"]
conn = hass.data[KNOWN_DEVICES][hkid]
@callback
def async_add_service(aid, service):
entity_class = ENTITY_TYPES.get(service["stype"])
if not entity_class:
return False
info = {"aid": aid, "iid": service["iid"]}
async_add_entities([entity_class(conn, info)], True)
return True
conn.add_listener(async_add_service)
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
from diamond.collector import Collector
from chronyd import ChronydCollector
##########################################################################
class TestChronydCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('ChronydCollector', {
})
self.collector = ChronydCollector(config, {})
def test_import(self):
self.assertTrue(ChronydCollector)
@patch.object(Collector, 'publish')
def test_should_work_with_ip_addresses(self, publish_mock):
patch_collector = patch.object(
ChronydCollector,
'get_output',
Mock(return_value=self.getFixture(
'fedora').getvalue()))
patch_collector.start()
self.collector.collect()
patch_collector.stop()
metrics = {
'178_251_120_16.offset_ms': -7e-05,
'85_12_29_43.offset_ms': -0.785,
'85_234_197_3.offset_ms': 0.08,
'85_255_214_66.offset_ms': 0.386,
}
self.setDocExample(
collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
@patch.object(Collector, 'publish')
def test_should_work_with_fqdns(self, publish_mock):
patch_collector = patch.object(
ChronydCollector,
'get_output',
Mock(return_value=self.getFixture(
'fqdn').getvalue()))
patch_collector.start()
self.collector.collect()
patch_collector.stop()
metrics = {
'adm-dns-resolver-001.offset_ms': 0.000277,
'adm-dns-resolver-002.offset_ms': 0.456,
}
self.assertPublishedMany(publish_mock, metrics)
@patch.object(Collector, 'publish')
def test_check_invalid_unit(self, publish_mock):
patch_collector = patch.object(
ChronydCollector,
'get_output',
Mock(return_value=self.getFixture(
'bad_unit').getvalue()))
patch_collector.start()
self.collector.collect()
patch_collector.stop()
metrics = {
'adm-dns-resolver-002.offset_ms': 0.456,
}
self.assertPublishedMany(publish_mock, metrics)
@patch.object(Collector, 'publish')
def test_huge_values(self, publish_mock):
patch_collector = patch.object(
ChronydCollector,
'get_output',
Mock(return_value=self.getFixture(
'huge_vals').getvalue()))
patch_collector.start()
self.collector.collect()
patch_collector.stop()
metrics = {
'server1.offset_ms': 8735472000000,
'server2.offset_ms': -1009152000000,
}
self.assertPublishedMany(publish_mock, metrics)
##########################################################################
if __name__ == "__main__":
unittest.main()
|
import asyncio
import logging
import os
from urllib.parse import urlparse
from aiohttp import BasicAuth, FormData
from aiohttp.client_exceptions import ClientError
from slack import WebClient
from slack.errors import SlackApiError
import voluptuous as vol
from homeassistant.components.notify import (
ATTR_DATA,
ATTR_TARGET,
ATTR_TITLE,
PLATFORM_SCHEMA,
BaseNotificationService,
)
from homeassistant.const import CONF_API_KEY, CONF_ICON, CONF_USERNAME
from homeassistant.core import callback
from homeassistant.helpers import aiohttp_client, config_validation as cv
import homeassistant.helpers.template as template
_LOGGER = logging.getLogger(__name__)
ATTR_BLOCKS = "blocks"
ATTR_BLOCKS_TEMPLATE = "blocks_template"
ATTR_FILE = "file"
ATTR_ICON = "icon"
ATTR_PASSWORD = "password"
ATTR_PATH = "path"
ATTR_URL = "url"
ATTR_USERNAME = "username"
CONF_DEFAULT_CHANNEL = "default_channel"
DEFAULT_TIMEOUT_SECONDS = 15
FILE_PATH_SCHEMA = vol.Schema({vol.Required(ATTR_PATH): cv.isfile})
FILE_URL_SCHEMA = vol.Schema(
{
vol.Required(ATTR_URL): cv.url,
vol.Inclusive(ATTR_USERNAME, "credentials"): cv.string,
vol.Inclusive(ATTR_PASSWORD, "credentials"): cv.string,
}
)
DATA_FILE_SCHEMA = vol.Schema(
{vol.Required(ATTR_FILE): vol.Any(FILE_PATH_SCHEMA, FILE_URL_SCHEMA)}
)
DATA_TEXT_ONLY_SCHEMA = vol.Schema(
{
vol.Optional(ATTR_USERNAME): cv.string,
vol.Optional(ATTR_ICON): cv.string,
vol.Optional(ATTR_BLOCKS): list,
vol.Optional(ATTR_BLOCKS_TEMPLATE): list,
}
)
DATA_SCHEMA = vol.All(
cv.ensure_list, [vol.Any(DATA_FILE_SCHEMA, DATA_TEXT_ONLY_SCHEMA)]
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_DEFAULT_CHANNEL): cv.string,
vol.Optional(CONF_ICON): cv.string,
vol.Optional(CONF_USERNAME): cv.string,
}
)
async def async_get_service(hass, config, discovery_info=None):
"""Set up the Slack notification service."""
session = aiohttp_client.async_get_clientsession(hass)
client = WebClient(token=config[CONF_API_KEY], run_async=True, session=session)
try:
await client.auth_test()
except SlackApiError as err:
_LOGGER.error("Error while setting up integration: %s", err)
return
return SlackNotificationService(
hass,
client,
config[CONF_DEFAULT_CHANNEL],
username=config.get(CONF_USERNAME),
icon=config.get(CONF_ICON),
)
@callback
def _async_get_filename_from_url(url):
"""Return the filename of a passed URL."""
parsed_url = urlparse(url)
return os.path.basename(parsed_url.path)
@callback
def _async_sanitize_channel_names(channel_list):
"""Remove any # symbols from a channel list."""
return [channel.lstrip("#") for channel in channel_list]
@callback
def _async_templatize_blocks(hass, value):
"""Recursive template creator helper function."""
if isinstance(value, list):
return [_async_templatize_blocks(hass, item) for item in value]
if isinstance(value, dict):
return {
key: _async_templatize_blocks(hass, item) for key, item in value.items()
}
tmpl = template.Template(value, hass=hass)
return tmpl.async_render(parse_result=False)
class SlackNotificationService(BaseNotificationService):
"""Define the Slack notification logic."""
def __init__(self, hass, client, default_channel, username, icon):
"""Initialize."""
self._client = client
self._default_channel = default_channel
self._hass = hass
self._icon = icon
self._username = username
async def _async_send_local_file_message(self, path, targets, message, title):
"""Upload a local file (with message) to Slack."""
if not self._hass.config.is_allowed_path(path):
_LOGGER.error("Path does not exist or is not allowed: %s", path)
return
parsed_url = urlparse(path)
filename = os.path.basename(parsed_url.path)
try:
await self._client.files_upload(
channels=",".join(targets),
file=path,
filename=filename,
initial_comment=message,
title=title or filename,
)
except SlackApiError as err:
_LOGGER.error("Error while uploading file-based message: %s", err)
async def _async_send_remote_file_message(
self, url, targets, message, title, *, username=None, password=None
):
"""Upload a remote file (with message) to Slack.
Note that we bypass the python-slackclient WebClient and use aiohttp directly,
as the former would require us to download the entire remote file into memory
first before uploading it to Slack.
"""
if not self._hass.config.is_allowed_external_url(url):
_LOGGER.error("URL is not allowed: %s", url)
return
filename = _async_get_filename_from_url(url)
session = aiohttp_client.async_get_clientsession(self.hass)
kwargs = {}
if username and password is not None:
kwargs = {"auth": BasicAuth(username, password=password)}
resp = await session.request("get", url, **kwargs)
try:
resp.raise_for_status()
except ClientError as err:
_LOGGER.error("Error while retrieving %s: %s", url, err)
return
data = FormData(
{
"channels": ",".join(targets),
"filename": filename,
"initial_comment": message,
"title": title or filename,
"token": self._client.token,
},
charset="utf-8",
)
data.add_field("file", resp.content, filename=filename)
try:
await session.post("https://slack.com/api/files.upload", data=data)
except ClientError as err:
_LOGGER.error("Error while uploading file message: %s", err)
async def _async_send_text_only_message(
self, targets, message, title, blocks, username, icon
):
"""Send a text-only message."""
message_dict = {
"blocks": blocks,
"link_names": True,
"text": message,
"username": username,
}
icon = icon or self._icon
if icon:
if icon.lower().startswith(("http://", "https://")):
icon_type = "url"
else:
icon_type = "emoji"
message_dict[f"icon_{icon_type}"] = icon
tasks = {
target: self._client.chat_postMessage(**message_dict, channel=target)
for target in targets
}
results = await asyncio.gather(*tasks.values(), return_exceptions=True)
for target, result in zip(tasks, results):
if isinstance(result, SlackApiError):
_LOGGER.error(
"There was a Slack API error while sending to %s: %s",
target,
result,
)
async def async_send_message(self, message, **kwargs):
"""Send a message to Slack."""
data = kwargs.get(ATTR_DATA)
if data is None:
data = {}
try:
DATA_SCHEMA(data)
except vol.Invalid as err:
_LOGGER.error("Invalid message data: %s", err)
data = {}
title = kwargs.get(ATTR_TITLE)
targets = _async_sanitize_channel_names(
kwargs.get(ATTR_TARGET, [self._default_channel])
)
# Message Type 1: A text-only message
if ATTR_FILE not in data:
if ATTR_BLOCKS_TEMPLATE in data:
blocks = _async_templatize_blocks(self.hass, data[ATTR_BLOCKS_TEMPLATE])
elif ATTR_BLOCKS in data:
blocks = data[ATTR_BLOCKS]
else:
blocks = {}
return await self._async_send_text_only_message(
targets,
message,
title,
blocks,
username=data.get(ATTR_USERNAME, self._username),
icon=data.get(ATTR_ICON, self._icon),
)
# Message Type 2: A message that uploads a remote file
if ATTR_URL in data[ATTR_FILE]:
return await self._async_send_remote_file_message(
data[ATTR_FILE][ATTR_URL],
targets,
message,
title,
username=data[ATTR_FILE].get(ATTR_USERNAME),
password=data[ATTR_FILE].get(ATTR_PASSWORD),
)
# Message Type 3: A message that uploads a local file
return await self._async_send_local_file_message(
data[ATTR_FILE][ATTR_PATH], targets, message, title
)
|
revision = "c05a8998b371"
down_revision = "ac483cfeb230"
from alembic import op
import sqlalchemy as sa
import sqlalchemy_utils
def upgrade():
op.create_table(
"api_keys",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("name", sa.String(length=128), nullable=True),
sa.Column("user_id", sa.Integer(), nullable=False),
sa.Column("ttl", sa.BigInteger(), nullable=False),
sa.Column("issued_at", sa.BigInteger(), nullable=False),
sa.Column("revoked", sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint(["user_id"], ["users.id"]),
sa.PrimaryKeyConstraint("id"),
)
def downgrade():
op.drop_table("api_keys")
|
from typing import List, Optional
import voluptuous as vol
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_DEVICE_ID,
CONF_DOMAIN,
CONF_ENTITY_ID,
CONF_TYPE,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
)
from homeassistant.core import Context, HomeAssistant
from homeassistant.helpers import entity_registry
import homeassistant.helpers.config_validation as cv
from . import DOMAIN
# TODO specify your supported action types.
ACTION_TYPES = {"turn_on", "turn_off"}
ACTION_SCHEMA = cv.DEVICE_ACTION_BASE_SCHEMA.extend(
{
vol.Required(CONF_TYPE): vol.In(ACTION_TYPES),
vol.Required(CONF_ENTITY_ID): cv.entity_domain(DOMAIN),
}
)
async def async_get_actions(hass: HomeAssistant, device_id: str) -> List[dict]:
"""List device actions for NEW_NAME devices."""
registry = await entity_registry.async_get_registry(hass)
actions = []
# TODO Read this comment and remove it.
# This example shows how to iterate over the entities of this device
# that match this integration. If your actions instead rely on
# calling services, do something like:
# zha_device = await _async_get_zha_device(hass, device_id)
# return zha_device.device_actions
# Get all the integrations entities for this device
for entry in entity_registry.async_entries_for_device(registry, device_id):
if entry.domain != DOMAIN:
continue
# Add actions for each entity that belongs to this integration
# TODO add your own actions.
actions.append(
{
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "turn_on",
}
)
actions.append(
{
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "turn_off",
}
)
return actions
async def async_call_action_from_config(
hass: HomeAssistant, config: dict, variables: dict, context: Optional[Context]
) -> None:
"""Execute a device action."""
config = ACTION_SCHEMA(config)
service_data = {ATTR_ENTITY_ID: config[CONF_ENTITY_ID]}
if config[CONF_TYPE] == "turn_on":
service = SERVICE_TURN_ON
elif config[CONF_TYPE] == "turn_off":
service = SERVICE_TURN_OFF
await hass.services.async_call(
DOMAIN, service, service_data, blocking=True, context=context
)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import datetime
import unittest
from absl import flags
import freezegun
import mock
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.providers.aws import aws_capacity_reservation
from perfkitbenchmarker.providers.aws import util
from tests import pkb_common_test_case
FLAGS = flags.FLAGS
TIMEOUT_UTC = '2019-05-23 03:56:22.703681'
FAKE_DATETIME_NOW = datetime.datetime(2010, 1, 1)
CREATE_STDOUT_SUCCESSFUL = """
{
"CapacityReservation": {
"CapacityReservationId": "cr-0b720f7e7f73b54e8",
"EndDateType": "unlimited",
"AvailabilityZone": "us-west-2a",
"InstanceMatchCriteria": "open",
"EphemeralStorage": false,
"CreateDate": "2019-01-14T23:34:14.000Z",
"AvailableInstanceCount": 3,
"InstancePlatform": "Linux/UNIX",
"TotalInstanceCount": 3,
"State": "active",
"Tenancy": "default",
"EbsOptimized": false,
"InstanceType": "m5.xlarge"
}
}
"""
class FakeAwsVirtualMachine(object):
def __init__(self):
self.zone = 'us-west-1'
self.region = 'us-west-1'
self.machine_type = 'fake_machine_type'
self.OS_TYPE = 'ubuntu1804' # pylint: disable=invalid-name
self.network = mock.MagicMock()
class AwsCapacityReservationTest(pkb_common_test_case.PkbCommonTestCase):
def _create_patch(self, target, return_val=None):
p = mock.patch(target, return_value=return_val)
mock_to_return = p.start()
self.addCleanup(p.stop)
return mock_to_return
def setUp(self):
super(AwsCapacityReservationTest, self).setUp()
FLAGS.timeout_minutes = 30
self._create_patch(
util.__name__ + '.GetZonesInRegion',
return_val=['us-west-1a', 'us-west-1b'])
@freezegun.freeze_time(FAKE_DATETIME_NOW)
def test_create(self):
vm_group = [FakeAwsVirtualMachine()]
capacity_reservation = aws_capacity_reservation.AwsCapacityReservation(
vm_group)
with mock.patch(
vm_util.__name__ + '.IssueCommand',
return_value=(CREATE_STDOUT_SUCCESSFUL, '', 0)) as issue_command:
capacity_reservation._Create()
command_string = ' '.join(issue_command.call_args[0][0])
expected_end_date = FAKE_DATETIME_NOW + datetime.timedelta(
minutes=FLAGS.timeout_minutes)
expected_command = (
'aws --output json ec2 create-capacity-reservation '
'--instance-type=fake_machine_type '
'--instance-platform=Linux/UNIX --availability-zone=us-west-1a '
'--instance-count=1 --instance-match-criteria=targeted '
'--region=us-west-1 --end-date-type=limited --end-date=%s' %
expected_end_date.isoformat())
self.assertEqual(issue_command.call_count, 1)
self.assertIn(expected_command, command_string)
def test_delete(self):
vm_group = [FakeAwsVirtualMachine()]
capacity_reservation = aws_capacity_reservation.AwsCapacityReservation(
vm_group)
capacity_reservation.capacity_reservation_id = 'foo'
with mock.patch(vm_util.__name__ + '.IssueCommand') as issue_command:
capacity_reservation._Delete()
command_string = ' '.join(issue_command.call_args[0][0])
expected_command = ('aws --output json ec2 cancel-capacity-reservation '
'--capacity-reservation-id=foo --region=us-west-1')
self.assertEqual(issue_command.call_count, 1)
self.assertIn(expected_command, command_string)
def test_update_vms_in_group(self):
vm_1 = FakeAwsVirtualMachine()
vm_2 = FakeAwsVirtualMachine()
vm_3 = FakeAwsVirtualMachine()
vm_group = [vm_1, vm_2, vm_3]
capacity_reservation = aws_capacity_reservation.AwsCapacityReservation(
vm_group)
capacity_reservation.capacity_reservation_id = 'foo'
capacity_reservation._UpdateVmsInGroup('foo', 'us-west-1z')
for vm in vm_group:
self.assertEqual('foo', vm.capacity_reservation_id)
self.assertEqual('us-west-1z', vm.zone)
self.assertEqual('us-west-1z', vm.network.zone)
if __name__ == '__main__':
unittest.main()
|
from django.contrib.auth.decorators import login_required
from django.core.exceptions import PermissionDenied
from django.http import Http404, JsonResponse
from django.shortcuts import get_object_or_404, redirect
from django.utils.translation import gettext as _
from django.views.decorators.cache import never_cache
from django.views.decorators.http import require_POST
from weblate.lang.models import Language
from weblate.trans.forms import (
AnnouncementForm,
ComponentDeleteForm,
ComponentMoveForm,
ComponentRenameForm,
ComponentSettingsForm,
ProjectDeleteForm,
ProjectLanguageDeleteForm,
ProjectRenameForm,
ProjectSettingsForm,
TranslationDeleteForm,
)
from weblate.trans.models import Announcement, Change, Component
from weblate.trans.tasks import component_removal, project_removal
from weblate.trans.util import redirect_param, render
from weblate.utils import messages
from weblate.utils.stats import ProjectLanguage
from weblate.utils.views import (
get_component,
get_project,
get_translation,
show_form_errors,
)
@never_cache
@login_required
def change_project(request, project):
obj = get_project(request, project)
if not request.user.has_perm("project.edit", obj):
raise Http404()
if request.method == "POST":
settings_form = ProjectSettingsForm(request, request.POST, instance=obj)
if settings_form.is_valid():
settings_form.save()
messages.success(request, _("Settings saved"))
return redirect("settings", project=obj.slug)
else:
messages.error(
request, _("Invalid settings, please check the form for errors!")
)
else:
settings_form = ProjectSettingsForm(request, instance=obj)
return render(
request,
"project-settings.html",
{"object": obj, "form": settings_form},
)
@never_cache
@login_required
def change_component(request, project, component):
obj = get_component(request, project, component)
if not request.user.has_perm("component.edit", obj):
raise Http404()
if request.method == "POST":
form = ComponentSettingsForm(request, request.POST, instance=obj)
if form.is_valid():
form.save()
messages.success(request, _("Settings saved"))
return redirect("settings", project=obj.project.slug, component=obj.slug)
else:
messages.error(
request, _("Invalid settings, please check the form for errors!")
)
# Get a fresh copy of object, otherwise it will use unsaved changes
# from the failed form
obj = Component.objects.get(pk=obj.pk)
else:
form = ComponentSettingsForm(request, instance=obj)
if obj.repo_needs_merge():
messages.warning(
request,
_(
"The repository is outdated, you might not get "
"expected results until you update it."
),
)
return render(
request,
"component-settings.html",
{"project": obj.project, "object": obj, "form": form},
)
@never_cache
@login_required
@require_POST
def dismiss_alert(request, project, component):
obj = get_component(request, project, component)
if not request.user.has_perm("component.edit", obj):
raise Http404()
alert = obj.alert_set.get(name=request.POST["dismiss"])
if alert.obj.dismissable:
alert.dismissed = True
alert.save(update_fields=["dismissed"])
return redirect_param(obj, "#alerts")
@login_required
@require_POST
def remove_translation(request, project, component, lang):
obj = get_translation(request, project, component, lang)
if not request.user.has_perm("translation.delete", obj):
raise PermissionDenied()
form = TranslationDeleteForm(obj, request.POST)
if not form.is_valid():
show_form_errors(request, form)
return redirect_param(obj, "#delete")
obj.remove(request.user)
messages.success(request, _("Translation has been removed."))
return redirect(obj.component)
@login_required
@require_POST
def remove_component(request, project, component):
obj = get_component(request, project, component)
if not request.user.has_perm("component.edit", obj):
raise PermissionDenied()
form = ComponentDeleteForm(obj, request.POST)
if not form.is_valid():
show_form_errors(request, form)
return redirect_param(obj, "#delete")
component_removal.delay(obj.pk, request.user.pk)
messages.success(request, _("Translation component was scheduled for removal."))
return redirect(obj.project)
@login_required
@require_POST
def remove_project(request, project):
obj = get_project(request, project)
if not request.user.has_perm("project.edit", obj):
raise PermissionDenied()
form = ProjectDeleteForm(obj, request.POST)
if not form.is_valid():
show_form_errors(request, form)
return redirect_param(obj, "#delete")
project_removal.delay(obj.pk, request.user.pk)
messages.success(request, _("Project was scheduled for removal."))
return redirect("home")
@login_required
@require_POST
def remove_project_language(request, project, lang):
project_object = get_project(request, project)
language_object = get_object_or_404(Language, code=lang)
obj = ProjectLanguage(project_object, language_object)
if not request.user.has_perm("translation.delete", obj):
raise PermissionDenied()
form = ProjectLanguageDeleteForm(obj, request.POST)
if not form.is_valid():
show_form_errors(request, form)
return redirect_param(obj, "#delete")
for translation in obj.translation_set:
translation.remove(request.user)
messages.success(request, _("Language of the project was removed."))
return redirect(project_object)
def perform_rename(form_cls, request, obj, perm: str):
if not request.user.has_perm(perm, obj):
raise PermissionDenied()
form = form_cls(request, request.POST, instance=obj)
if not form.is_valid():
show_form_errors(request, form)
# Reload the object from db to revert possible rejected change
obj.refresh_from_db()
return redirect_param(obj, "#rename")
# Invalidate old stats
obj.stats.invalidate()
obj = form.save()
# Invalidate new stats
obj.stats.invalidate()
return redirect(obj)
@login_required
@require_POST
def rename_component(request, project, component):
obj = get_component(request, project, component)
return perform_rename(ComponentRenameForm, request, obj, "component.edit")
@login_required
@require_POST
def move_component(request, project, component):
obj = get_component(request, project, component)
return perform_rename(ComponentMoveForm, request, obj, "project.edit")
@login_required
@require_POST
def rename_project(request, project):
obj = get_project(request, project)
return perform_rename(ProjectRenameForm, request, obj, "project.edit")
@login_required
@require_POST
def announcement_translation(request, project, component, lang):
obj = get_translation(request, project, component, lang)
if not request.user.has_perm("component.edit", obj):
raise PermissionDenied()
form = AnnouncementForm(request.POST)
if not form.is_valid():
show_form_errors(request, form)
return redirect_param(obj, "#announcement")
Announcement.objects.create(
user=request.user,
project=obj.component.project,
component=obj.component,
language=obj.language,
**form.cleaned_data
)
return redirect(obj)
@login_required
@require_POST
def announcement_component(request, project, component):
obj = get_component(request, project, component)
if not request.user.has_perm("component.edit", obj):
raise PermissionDenied()
form = AnnouncementForm(request.POST)
if not form.is_valid():
show_form_errors(request, form)
return redirect_param(obj, "#announcement")
Announcement.objects.create(
user=request.user, project=obj.project, component=obj, **form.cleaned_data
)
return redirect(obj)
@login_required
@require_POST
def announcement_project(request, project):
obj = get_project(request, project)
if not request.user.has_perm("project.edit", obj):
raise PermissionDenied()
form = AnnouncementForm(request.POST)
if not form.is_valid():
show_form_errors(request, form)
return redirect_param(obj, "#announcement")
Announcement.objects.create(user=request.user, project=obj, **form.cleaned_data)
return redirect(obj)
@login_required
@require_POST
def announcement_delete(request, pk):
announcement = get_object_or_404(Announcement, pk=pk)
if request.user.has_perm(
"component.edit", announcement.component
) or request.user.has_perm("project.edit", announcement.project):
announcement.delete()
return JsonResponse({"responseStatus": 200})
@login_required
def component_progress(request, project, component):
obj = get_component(request, project, component)
return_url = "component" if "info" in request.GET else "guide"
if not obj.in_progress():
return redirect(return_url, **obj.get_reverse_url_kwargs())
progress, log = obj.get_progress()
return render(
request,
"component-progress.html",
{
"object": obj,
"progress": progress,
"log": "\n".join(log),
"return_url": return_url,
},
)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow.python.ops import control_flow_ops
def apply_with_random_selector(x, func, num_cases):
"""Computes func(x, sel), with sel sampled from [0...num_cases-1].
Args:
x: input Tensor.
func: Python function to apply.
num_cases: Python int32, number of cases to sample sel from.
Returns:
The result of func(x, sel), where func receives the value of the
selector as a python integer, but sel is sampled dynamically.
"""
sel = tf.random_uniform([], maxval=num_cases, dtype=tf.int32)
# Pass the real x only to one of the func calls.
return control_flow_ops.merge([
func(control_flow_ops.switch(x, tf.equal(sel, case))[1], case)
for case in range(num_cases)])[0]
def distort_color(image, color_ordering=0, fast_mode=True, scope=None):
"""Distort the color of a Tensor image.
Each color distortion is non-commutative and thus ordering of the color ops
matters. Ideally we would randomly permute the ordering of the color ops.
Rather then adding that level of complication, we select a distinct ordering
of color ops for each preprocessing thread.
Args:
image: 3-D Tensor containing single image in [0, 1].
color_ordering: Python int, a type of distortion (valid values: 0-3).
fast_mode: Avoids slower ops (random_hue and random_contrast)
scope: Optional scope for name_scope.
Returns:
3-D Tensor color-distorted image on range [0, 1]
Raises:
ValueError: if color_ordering not in [0, 3]
"""
with tf.name_scope(scope, 'distort_color', [image]):
if fast_mode:
if color_ordering == 0:
image = tf.image.random_brightness(image, max_delta=32. / 255.)
image = tf.image.random_saturation(image, lower=0.5, upper=1.5)
else:
image = tf.image.random_saturation(image, lower=0.5, upper=1.5)
image = tf.image.random_brightness(image, max_delta=32. / 255.)
else:
if color_ordering == 0:
image = tf.image.random_brightness(image, max_delta=32. / 255.)
image = tf.image.random_saturation(image, lower=0.5, upper=1.5)
image = tf.image.random_hue(image, max_delta=0.2)
image = tf.image.random_contrast(image, lower=0.5, upper=1.5)
elif color_ordering == 1:
image = tf.image.random_saturation(image, lower=0.5, upper=1.5)
image = tf.image.random_brightness(image, max_delta=32. / 255.)
image = tf.image.random_contrast(image, lower=0.5, upper=1.5)
image = tf.image.random_hue(image, max_delta=0.2)
elif color_ordering == 2:
image = tf.image.random_contrast(image, lower=0.5, upper=1.5)
image = tf.image.random_hue(image, max_delta=0.2)
image = tf.image.random_brightness(image, max_delta=32. / 255.)
image = tf.image.random_saturation(image, lower=0.5, upper=1.5)
elif color_ordering == 3:
image = tf.image.random_hue(image, max_delta=0.2)
image = tf.image.random_saturation(image, lower=0.5, upper=1.5)
image = tf.image.random_contrast(image, lower=0.5, upper=1.5)
image = tf.image.random_brightness(image, max_delta=32. / 255.)
else:
raise ValueError('color_ordering must be in [0, 3]')
# The random_* ops do not necessarily clamp.
return tf.clip_by_value(image, 0.0, 1.0)
def distorted_bounding_box_crop(image,
bbox,
min_object_covered=0.1,
aspect_ratio_range=(0.75, 1.33),
area_range=(0.05, 1.0),
max_attempts=100,
scope=None):
"""Generates cropped_image using a one of the bboxes randomly distorted.
See `tf.image.sample_distorted_bounding_box` for more documentation.
Args:
image: 3-D Tensor of image (it will be converted to floats in [0, 1]).
bbox: 3-D float Tensor of bounding boxes arranged [1, num_boxes, coords]
where each coordinate is [0, 1) and the coordinates are arranged
as [ymin, xmin, ymax, xmax]. If num_boxes is 0 then it would use the whole
image.
min_object_covered: An optional `float`. Defaults to `0.1`. The cropped
area of the image must contain at least this fraction of any bounding box
supplied.
aspect_ratio_range: An optional list of `floats`. The cropped area of the
image must have an aspect ratio = width / height within this range.
area_range: An optional list of `floats`. The cropped area of the image
must contain a fraction of the supplied image within in this range.
max_attempts: An optional `int`. Number of attempts at generating a cropped
region of the image of the specified constraints. After `max_attempts`
failures, return the entire image.
scope: Optional scope for name_scope.
Returns:
A tuple, a 3-D Tensor cropped_image and the distorted bbox
"""
with tf.name_scope(scope, 'distorted_bounding_box_crop', [image, bbox]):
# Each bounding box has shape [1, num_boxes, box coords] and
# the coordinates are ordered [ymin, xmin, ymax, xmax].
# A large fraction of image datasets contain a human-annotated bounding
# box delineating the region of the image containing the object of interest.
# We choose to create a new bounding box for the object which is a randomly
# distorted version of the human-annotated bounding box that obeys an
# allowed range of aspect ratios, sizes and overlap with the human-annotated
# bounding box. If no box is supplied, then we assume the bounding box is
# the entire image.
sample_distorted_bounding_box = tf.image.sample_distorted_bounding_box(
tf.shape(image),
bounding_boxes=bbox,
min_object_covered=min_object_covered,
aspect_ratio_range=aspect_ratio_range,
area_range=area_range,
max_attempts=max_attempts,
use_image_if_no_bounding_boxes=True)
bbox_begin, bbox_size, distort_bbox = sample_distorted_bounding_box
# Crop the image to the specified bounding box.
cropped_image = tf.slice(image, bbox_begin, bbox_size)
return cropped_image, distort_bbox
def preprocess_for_train(image, height, width, bbox,
fast_mode=True,
scope=None):
"""Distort one image for training a network.
Distorting images provides a useful technique for augmenting the data
set during training in order to make the network invariant to aspects
of the image that do not effect the label.
Additionally it would create image_summaries to display the different
transformations applied to the image.
Args:
image: 3-D Tensor of image. If dtype is tf.float32 then the range should be
[0, 1], otherwise it would converted to tf.float32 assuming that the range
is [0, MAX], where MAX is largest positive representable number for
int(8/16/32) data type (see `tf.image.convert_image_dtype` for details).
height: integer
width: integer
bbox: 3-D float Tensor of bounding boxes arranged [1, num_boxes, coords]
where each coordinate is [0, 1) and the coordinates are arranged
as [ymin, xmin, ymax, xmax].
fast_mode: Optional boolean, if True avoids slower transformations (i.e.
bi-cubic resizing, random_hue or random_contrast).
scope: Optional scope for name_scope.
Returns:
3-D float Tensor of distorted image used for training with range [-1, 1].
"""
with tf.name_scope(scope, 'distort_image', [image, height, width, bbox]):
if bbox is None:
bbox = tf.constant([0.0, 0.0, 1.0, 1.0],
dtype=tf.float32,
shape=[1, 1, 4])
if image.dtype != tf.float32:
image = tf.image.convert_image_dtype(image, dtype=tf.float32)
# Each bounding box has shape [1, num_boxes, box coords] and
# the coordinates are ordered [ymin, xmin, ymax, xmax].
image_with_box = tf.image.draw_bounding_boxes(tf.expand_dims(image, 0),
bbox)
tf.summary.image('image_with_bounding_boxes', image_with_box)
distorted_image, distorted_bbox = distorted_bounding_box_crop(image, bbox)
# Restore the shape since the dynamic slice based upon the bbox_size loses
# the third dimension.
distorted_image.set_shape([None, None, 3])
image_with_distorted_box = tf.image.draw_bounding_boxes(
tf.expand_dims(image, 0), distorted_bbox)
tf.summary.image('images_with_distorted_bounding_box',
image_with_distorted_box)
# This resizing operation may distort the images because the aspect
# ratio is not respected. We select a resize method in a round robin
# fashion based on the thread number.
# Note that ResizeMethod contains 4 enumerated resizing methods.
# We select only 1 case for fast_mode bilinear.
num_resize_cases = 1 if fast_mode else 4
distorted_image = apply_with_random_selector(
distorted_image,
lambda x, method: tf.image.resize_images(x, [height, width], method=method),
num_cases=num_resize_cases)
tf.summary.image('cropped_resized_image',
tf.expand_dims(distorted_image, 0))
# Randomly flip the image horizontally.
distorted_image = tf.image.random_flip_left_right(distorted_image)
# Randomly distort the colors. There are 4 ways to do it.
distorted_image = apply_with_random_selector(
distorted_image,
lambda x, ordering: distort_color(x, ordering, fast_mode),
num_cases=4)
tf.summary.image('final_distorted_image',
tf.expand_dims(distorted_image, 0))
distorted_image = tf.subtract(distorted_image, 0.5)
distorted_image = tf.multiply(distorted_image, 2.0)
return distorted_image
def preprocess_for_eval(image, height, width,
central_fraction=0.875, scope=None):
"""Prepare one image for evaluation.
If height and width are specified it would output an image with that size by
applying resize_bilinear.
If central_fraction is specified it would cropt the central fraction of the
input image.
Args:
image: 3-D Tensor of image. If dtype is tf.float32 then the range should be
[0, 1], otherwise it would converted to tf.float32 assuming that the range
is [0, MAX], where MAX is largest positive representable number for
int(8/16/32) data type (see `tf.image.convert_image_dtype` for details)
height: integer
width: integer
central_fraction: Optional Float, fraction of the image to crop.
scope: Optional scope for name_scope.
Returns:
3-D float Tensor of prepared image.
"""
with tf.name_scope(scope, 'eval_image', [image, height, width]):
if image.dtype != tf.float32:
image = tf.image.convert_image_dtype(image, dtype=tf.float32)
# Crop the central region of the image with an area containing 87.5% of
# the original image.
if central_fraction:
image = tf.image.central_crop(image, central_fraction=central_fraction)
if height and width:
# Resize the image to the specified height and width.
image = tf.expand_dims(image, 0)
image = tf.image.resize_bilinear(image, [height, width],
align_corners=False)
image = tf.squeeze(image, [0])
image = tf.subtract(image, 0.5)
image = tf.multiply(image, 2.0)
return image
def preprocess_image(image, height, width,
is_training=False,
bbox=None,
fast_mode=True):
"""Pre-process one image for training or evaluation.
Args:
image: 3-D Tensor [height, width, channels] with the image.
height: integer, image expected height.
width: integer, image expected width.
is_training: Boolean. If true it would transform an image for train,
otherwise it would transform it for evaluation.
bbox: 3-D float Tensor of bounding boxes arranged [1, num_boxes, coords]
where each coordinate is [0, 1) and the coordinates are arranged as
[ymin, xmin, ymax, xmax].
fast_mode: Optional boolean, if True avoids slower transformations.
Returns:
3-D float Tensor containing an appropriately scaled image
Raises:
ValueError: if user does not provide bounding box
"""
if is_training:
return preprocess_for_train(image, height, width, bbox, fast_mode)
else:
return preprocess_for_eval(image, height, width)
|
import pytest
import hyperopt.pyll.base
from matchzoo.engine import hyper_spaces
@pytest.fixture(scope='module', params=[
lambda x: x + 2,
lambda x: x - 2,
lambda x: x * 2,
lambda x: x / 2,
lambda x: x // 2,
lambda x: x ** 2,
lambda x: 2 + x,
lambda x: 2 - x,
lambda x: 2 * x,
lambda x: 2 / x,
lambda x: 2 // x,
lambda x: 2 ** x,
lambda x: -x
])
def op(request):
return request.param
@pytest.fixture(scope='module', params=[
hyper_spaces.choice(options=[0, 1]),
hyper_spaces.uniform(low=0, high=10),
hyper_spaces.quniform(low=0, high=10, q=2)
])
def proxy(request):
return request.param
def test_init(proxy):
assert isinstance(proxy.convert('label'), hyperopt.pyll.base.Apply)
def test_op(proxy, op):
assert isinstance(op(proxy).convert('label'), hyperopt.pyll.base.Apply)
def test_str(proxy):
assert isinstance(str(proxy), str)
|
import h5py
from tensornetwork.component_factory import get_component
from tensornetwork.network_components import Edge, AbstractNode, Node
from tensornetwork.network_operations import reachable, get_all_edges
from typing import List, Union, BinaryIO
import numpy as np
string_type = h5py.special_dtype(vlen=str)
def save_nodes(nodes: List[AbstractNode], path: Union[str, BinaryIO]) -> None:
"""Save an iterable of nodes into hdf5 format.
Args:
nodes: An iterable of connected nodes. All nodes have to connect within
`nodes`.
path: path to file where network is saved.
"""
if reachable(nodes) > set(nodes):
raise ValueError(
"Some nodes in `nodes` are connected to nodes not contained in `nodes`."
" Saving not possible.")
if len(set(nodes)) < len(list(nodes)):
raise ValueError(
'Some nodes in `nodes` appear more than once. This is not supported')
#we need to iterate twice and order matters
edges = list(get_all_edges(nodes))
nodes = list(nodes)
old_edge_names = {n: edge.name for n, edge in enumerate(edges)}
old_node_names = {n: node.name for n, node in enumerate(nodes)}
#generate unique names for nodes and edges
#for saving them
for n, node in enumerate(nodes):
node.set_name('node{}'.format(n))
for e, edge in enumerate(edges):
edge.set_name('edge{}'.format(e))
with h5py.File(path, 'w') as net_file:
nodes_group = net_file.create_group('nodes')
node_names_group = net_file.create_group('node_names')
node_names_group.create_dataset(
'names',
dtype=string_type,
data=np.array(list(old_node_names.values()), dtype=object))
edges_group = net_file.create_group('edges')
edge_names_group = net_file.create_group('edge_names')
edge_names_group.create_dataset(
'names',
dtype=string_type,
data=np.array(list(old_edge_names.values()), dtype=object))
for n, node in enumerate(nodes):
node_group = nodes_group.create_group(node.name)
node._save_node(node_group)
for edge in node.edges:
if edge.node1 == node and edge in edges:
edge_group = edges_group.create_group(edge.name)
edge._save_edge(edge_group)
edges.remove(edge)
#name edges and nodes back to their original names
for n, node in enumerate(nodes):
nodes[n].set_name(old_node_names[n])
for n, edge in enumerate(edges):
edges[n].set_name(old_edge_names[n])
def load_nodes(path: str) -> List[AbstractNode]:
"""Load a set of nodes from disk.
Args:
path: path to file where network is saved.
Returns:
An iterable of `Node` objects
"""
nodes_list = []
edges_list = []
with h5py.File(path, 'r') as net_file:
nodes = list(net_file["nodes"].keys())
node_names = {
'node{}'.format(n): v
for n, v in enumerate(net_file["node_names"]['names'][()])
}
edge_names = {
'edge{}'.format(n): v
for n, v in enumerate(net_file["edge_names"]['names'][()])
}
edges = list(net_file["edges"].keys())
for node_name in nodes:
node_data = net_file["nodes/" + node_name]
node_type = get_component(node_data['type'][()])
nodes_list.append(node_type._load_node(node_data=node_data))
nodes_dict = {node.name: node for node in nodes_list}
for edge in edges:
edge_data = net_file["edges/" + edge]
edges_list.append(Edge._load_edge(edge_data, nodes_dict))
for edge in edges_list:
edge.set_name(edge_names[edge.name])
for node in nodes_list:
node.set_name(node_names[node.name])
return nodes_list
def from_topology(topology, tensors, backend=None):
"""Create and connect new `tn.Node`s by the given einsum-like topology.
Example:
```
a, b, c = tn.from_topology("xy,yz,zx", [a, b, c])
```
Args:
topology: A string that defines the topology. Should be like
the left side of an einsum expression.
tensors: The tensors needed to create the nodes.
Returns:
A list of Nodes.
"""
edge_dict = {}
nodes = []
split_list = topology.split(",")
if len(split_list) != len(tensors):
raise ValueError("topology and number of tensors is mismatched")
for local_axes, tensor in zip(split_list, tensors):
local_axes_list = list(local_axes)
if len(local_axes_list) != len(tensor.shape):
raise ValueError(f"{local_axes} does not match shape {tensor.shape}")
new_node = Node(tensor, axis_names=local_axes_list, backend=backend)
for c in local_axes:
if c in edge_dict:
edge_dict[c] = edge_dict[c] ^ new_node[c]
else:
edge_dict[c] = new_node[c]
nodes.append(new_node)
return nodes
|
from homeassistant.components import mysensors
from homeassistant.components.cover import ATTR_POSITION, DOMAIN, CoverEntity
from homeassistant.const import STATE_OFF, STATE_ON
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the mysensors platform for covers."""
mysensors.setup_mysensors_platform(
hass,
DOMAIN,
discovery_info,
MySensorsCover,
async_add_entities=async_add_entities,
)
class MySensorsCover(mysensors.device.MySensorsEntity, CoverEntity):
"""Representation of the value of a MySensors Cover child node."""
@property
def assumed_state(self):
"""Return True if unable to access real state of entity."""
return self.gateway.optimistic
@property
def is_closed(self):
"""Return True if cover is closed."""
set_req = self.gateway.const.SetReq
if set_req.V_DIMMER in self._values:
return self._values.get(set_req.V_DIMMER) == 0
return self._values.get(set_req.V_LIGHT) == STATE_OFF
@property
def current_cover_position(self):
"""Return current position of cover.
None is unknown, 0 is closed, 100 is fully open.
"""
set_req = self.gateway.const.SetReq
return self._values.get(set_req.V_DIMMER)
async def async_open_cover(self, **kwargs):
"""Move the cover up."""
set_req = self.gateway.const.SetReq
self.gateway.set_child_value(
self.node_id, self.child_id, set_req.V_UP, 1, ack=1
)
if self.gateway.optimistic:
# Optimistically assume that cover has changed state.
if set_req.V_DIMMER in self._values:
self._values[set_req.V_DIMMER] = 100
else:
self._values[set_req.V_LIGHT] = STATE_ON
self.async_write_ha_state()
async def async_close_cover(self, **kwargs):
"""Move the cover down."""
set_req = self.gateway.const.SetReq
self.gateway.set_child_value(
self.node_id, self.child_id, set_req.V_DOWN, 1, ack=1
)
if self.gateway.optimistic:
# Optimistically assume that cover has changed state.
if set_req.V_DIMMER in self._values:
self._values[set_req.V_DIMMER] = 0
else:
self._values[set_req.V_LIGHT] = STATE_OFF
self.async_write_ha_state()
async def async_set_cover_position(self, **kwargs):
"""Move the cover to a specific position."""
position = kwargs.get(ATTR_POSITION)
set_req = self.gateway.const.SetReq
self.gateway.set_child_value(
self.node_id, self.child_id, set_req.V_DIMMER, position, ack=1
)
if self.gateway.optimistic:
# Optimistically assume that cover has changed state.
self._values[set_req.V_DIMMER] = position
self.async_write_ha_state()
async def async_stop_cover(self, **kwargs):
"""Stop the device."""
set_req = self.gateway.const.SetReq
self.gateway.set_child_value(
self.node_id, self.child_id, set_req.V_STOP, 1, ack=1
)
|
import os
import re
import cherrypy
from cherrypy._cpcompat import ntob
from cherrypy.process import servers
from cherrypy.test import helper
curdir = os.path.join(os.getcwd(), os.path.dirname(__file__))
def read_process(cmd, args=''):
pipein, pipeout = os.popen4('%s %s' % (cmd, args))
try:
firstline = pipeout.readline()
if (re.search(r'(not recognized|No such file|not found)', firstline,
re.IGNORECASE)):
raise IOError('%s must be on your system path.' % cmd)
output = firstline + pipeout.read()
finally:
pipeout.close()
return output
APACHE_PATH = 'httpd'
CONF_PATH = 'fcgi.conf'
conf_fcgid = """
# Apache2 server conf file for testing CherryPy with mod_fcgid.
DocumentRoot "%(root)s"
ServerName 127.0.0.1
Listen %(port)s
LoadModule fastcgi_module modules/mod_fastcgi.dll
LoadModule rewrite_module modules/mod_rewrite.so
Options ExecCGI
SetHandler fastcgi-script
RewriteEngine On
RewriteRule ^(.*)$ /fastcgi.pyc [L]
FastCgiExternalServer "%(server)s" -host 127.0.0.1:4000
"""
class ModFCGISupervisor(helper.LocalSupervisor):
using_apache = True
using_wsgi = True
template = conf_fcgid
def __str__(self):
return 'FCGI Server on %s:%s' % (self.host, self.port)
def start(self, modulename):
cherrypy.server.httpserver = servers.FlupFCGIServer(
application=cherrypy.tree, bindAddress=('127.0.0.1', 4000))
cherrypy.server.httpserver.bind_addr = ('127.0.0.1', 4000)
# For FCGI, we both start apache...
self.start_apache()
# ...and our local server
helper.LocalServer.start(self, modulename)
def start_apache(self):
fcgiconf = CONF_PATH
if not os.path.isabs(fcgiconf):
fcgiconf = os.path.join(curdir, fcgiconf)
# Write the Apache conf file.
f = open(fcgiconf, 'wb')
try:
server = repr(os.path.join(curdir, 'fastcgi.pyc'))[1:-1]
output = self.template % {'port': self.port, 'root': curdir,
'server': server}
output = ntob(output.replace('\r\n', '\n'))
f.write(output)
finally:
f.close()
result = read_process(APACHE_PATH, '-k start -f %s' % fcgiconf)
if result:
print(result)
def stop(self):
"""Gracefully shutdown a server that is serving forever."""
read_process(APACHE_PATH, '-k stop')
helper.LocalServer.stop(self)
def sync_apps(self):
cherrypy.server.httpserver.fcgiserver.application = self.get_app()
|
from bs4 import BeautifulSoup as bs
from urllib.parse import urlparse
from httpobs.scanner.analyzer.decorators import scored_test
from httpobs.scanner.analyzer.utils import is_hsts_preloaded
def __parse_acao_xml_get_domains(xml, type='crossdomain') -> list:
if xml is None:
return []
# Attempt to parse the XML file
try:
soup = bs(xml, 'html.parser')
except:
raise KeyError
# Parse the files
if type == 'crossdomain':
return [domains.get('domain').strip()
for domains in soup.find_all('allow-access-from') if domains.get('domain')]
elif type == 'clientaccesspolicy':
return [domains.get('uri').strip() for domains in soup.find_all('domain') if domains.get('uri')]
@scored_test
def cross_origin_resource_sharing(reqs: dict, expectation='cross-origin-resource-sharing-not-implemented') -> dict:
"""
:param reqs: dictionary containing all the request and response objects
:param expectation: test expectation
cross-origin-resource-sharing-implemented-with-public-access: Allow origin *
cross-origin-resource-sharing-implemented-with-restricted-access: Allow a specific origin
cross-origin-resource-sharing-implemented-with-universal-access: Reflect Origin, or have open .XML files
cross-origin-resource-sharing-implemented: One of them does
xml-not-parsable: Cannot parse one of the .xml files
:return: dictionary with:
data: the ACAO header, clientaccesspolicy.xml file, and crossorigin.xml file
expectation: test expectation
pass: whether the site's configuration met its expectation
result: short string describing the result of the test
"""
output = {
'data': {
'acao': None,
'clientaccesspolicy': None,
'crossdomain': None
},
'expectation': expectation,
'pass': False,
'result': 'cross-origin-resource-sharing-not-implemented',
}
# TODO: Fix ACAO being null?
acao = reqs['responses']['cors']
if acao is not None:
if 'Access-Control-Allow-Origin' in acao.headers:
output['data']['acao'] = acao.headers['Access-Control-Allow-Origin'].strip()[0:256]
if output['data']['acao'] == '*':
output['result'] = 'cross-origin-resource-sharing-implemented-with-public-access'
elif (acao.request.headers.get('Origin') == acao.headers['Access-Control-Allow-Origin'] and
acao.headers.get('Access-Control-Allow-Credentials', '').lower().strip() == 'true'):
output['result'] = 'cross-origin-resource-sharing-implemented-with-universal-access'
else:
output['result'] = 'cross-origin-resource-sharing-implemented-with-restricted-access'
if reqs['resources']['/crossdomain.xml'] or reqs['resources']['/clientaccesspolicy.xml']:
# Get the domains from each
try:
cd = __parse_acao_xml_get_domains(reqs['resources']['/crossdomain.xml'], 'crossdomain')
cl = __parse_acao_xml_get_domains(reqs['resources']['/clientaccesspolicy.xml'], 'clientaccesspolicy')
domains = cd + cl
# Code defensively against infinitely sized xml files when storing their contents
if len(str(domains)) < 32768:
output['data']['clientaccesspolicy'] = cl if cl else None
output['data']['crossdomain'] = cd if cd else None
except KeyError:
domains = []
output['result'] = 'xml-not-parsable' # If we can't parse either of those xml files
if '*' in domains:
output['result'] = 'cross-origin-resource-sharing-implemented-with-universal-access'
# No downgrades from the ACAO result
elif domains and output['result'] != 'cross-origin-resource-sharing-implemented-with-universal-access':
output['result'] = 'cross-origin-resource-sharing-implemented-with-restricted-access'
# Check to see if the test passed or failed
if output['result'] in ('cross-origin-resource-sharing-implemented-with-public-access',
'cross-origin-resource-sharing-implemented-with-restricted-access',
expectation):
output['pass'] = True
return output
@scored_test
def redirection(reqs: dict, expectation='redirection-to-https') -> dict:
"""
:param reqs: dictionary containing all the request and response objects
:param expectation: test expectation
redirection-to-https: Redirects from http to https,
first redirection stays on host [default]
redirection-not-to-https: Redirection takes place, but to another HTTP address
redirection-not-to-https-on-initial-redirection: final destination HTTPS, but not initial redirection
redirection-missing: No redirection takes place, staying on HTTP
redirection-not-needed-no-http: Site doesn't listen for HTTP requests at all
redirection-off-host-from-http: Initial HTTP allowed to go from one host to another, still redirects to HTTPS
redirection-invalid-cert: Invalid certificate chain encountered
:return: dictionary with:
destination: final location of where GET / over HTTP ends
expectation: test expectation
pass: whether the site's configuration met its expectation
path: the URLs that the requests followed to get to destination
redirects: whether the site does any redirections at all
result: short string describing the result of the test
status-code: HTTP status code for the final redirection (typically 301 or 302)
"""
response = reqs['responses']['http']
output = {
'destination': response.url[0:2048] if response else None, # code defensively against long URLs
'expectation': expectation,
'pass': False,
'redirects': True,
'result': None,
'route': [],
'status_code': response.status_code if response else None,
}
if response is None:
output['result'] = 'redirection-not-needed-no-http'
# If we encountered an invalid certificate during the redirection process, that's a no-go
elif not response.verified:
output['result'] = 'redirection-invalid-cert'
else:
# Construct the route
output['route'] = [r.request.url for r in response.history] if response.history else []
output['route'] += [response.url]
# Internally, we just use the port-trimmed urlparsed versions
route = [urlparse(url) for url in output['route']]
# Check to see if every redirection was covered by the preload list
if all([is_hsts_preloaded(url.hostname) for url in route]):
output['result'] = 'redirection-all-redirects-preloaded'
# No redirection, so you just stayed on the http website
elif len(output['route']) == 1:
output['redirects'] = False
output['result'] = 'redirection-missing'
# Final destination wasn't an https website
elif route[-1].scheme != 'https':
output['result'] = 'redirection-not-to-https'
# http should never redirect to another http location -- should always go to https first
elif route[1].scheme == 'http':
output['result'] = 'redirection-not-to-https-on-initial-redirection'
# If it's an http -> https redirection, make sure it redirects to the same host. If that's not done, then
# HSTS cannot be properly set on the original host
# TODO: Check for redirections like: http://www.example.com -> https://example.com -> https://www.example.com
elif (route[0].scheme == 'http' and route[1].scheme == 'https' and
route[0].hostname != route[1].hostname):
output['result'] = 'redirection-off-host-from-http'
output['status_code'] = response.history[-1].status_code
else:
output['result'] = 'redirection-to-https'
# Code defensively against infinite routing loops and other shenanigans
output['route'] = output['route'] if len(str(output['route'])) < 8192 else []
output['status_code'] = output['status_code'] if len(str(output['status_code'])) < 5 else None
# Check to see if the test passed or failed
if output['result'] in ('redirection-not-needed-no-http',
'redirection-all-redirects-preloaded',
expectation):
output['pass'] = True
return output
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from diamond.collector import Collector
from soft import SoftInterruptCollector
##########################################################################
class TestSoftInterruptCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('SoftInterruptCollector', {
'interval': 1
})
self.collector = SoftInterruptCollector(config, None)
def test_import(self):
self.assertTrue(SoftInterruptCollector)
@patch('__builtin__.open')
@patch('os.access', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_should_open_proc_stat(self, publish_mock, open_mock):
open_mock.return_value = StringIO('')
self.collector.collect()
open_mock.assert_called_once_with('/proc/stat', 'r')
@patch.object(Collector, 'publish')
def test_should_work_with_synthetic_data(self, publish_mock):
patch_open = patch('__builtin__.open', Mock(return_value=StringIO(
'softirq 0 0 0 0 0 0 0 0 0 0 0'
)))
patch_open.start()
self.collector.collect()
patch_open.stop()
self.assertPublishedMany(publish_mock, {})
patch_open = patch('__builtin__.open', Mock(return_value=StringIO(
'softirq 55 1 2 3 4 5 6 7 8 9 10'
)))
patch_open.start()
self.collector.collect()
patch_open.stop()
self.assertPublishedMany(publish_mock, {
'total': 55.0,
'0': 1,
'1': 2,
'2': 3,
'3': 4,
'4': 5,
'5': 6,
'6': 7,
'7': 8,
'8': 9,
'9': 10,
})
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
SoftInterruptCollector.PROC = self.getFixturePath('proc_stat_1')
self.collector.collect()
self.assertPublishedMany(publish_mock, {})
SoftInterruptCollector.PROC = self.getFixturePath('proc_stat_2')
self.collector.collect()
metrics = {
'total': 4971,
'0': 0,
'1': 1729,
'2': 2,
'3': 240,
'4': 31,
'5': 0,
'6': 0,
'7': 1480,
'8': 0,
'9': 1489,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
##########################################################################
if __name__ == "__main__":
unittest.main()
|
import logging
from typing import Any, Callable, Dict, Optional
import attr
from homeassistant.components import mqtt
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.loader import bind_hass
from . import debug_info
from .const import DEFAULT_QOS
from .models import MessageCallbackType
_LOGGER = logging.getLogger(__name__)
@attr.s(slots=True)
class EntitySubscription:
"""Class to hold data about an active entity topic subscription."""
hass: HomeAssistantType = attr.ib()
topic: str = attr.ib()
message_callback: MessageCallbackType = attr.ib()
unsubscribe_callback: Optional[Callable[[], None]] = attr.ib()
qos: int = attr.ib(default=0)
encoding: str = attr.ib(default="utf-8")
async def resubscribe_if_necessary(self, hass, other):
"""Re-subscribe to the new topic if necessary."""
if not self._should_resubscribe(other):
return
if other is not None and other.unsubscribe_callback is not None:
other.unsubscribe_callback()
# Clear debug data if it exists
debug_info.remove_subscription(
self.hass, other.message_callback, other.topic
)
if self.topic is None:
# We were asked to remove the subscription or not to create it
return
# Prepare debug data
debug_info.add_subscription(self.hass, self.message_callback, self.topic)
self.unsubscribe_callback = await mqtt.async_subscribe(
hass, self.topic, self.message_callback, self.qos, self.encoding
)
def _should_resubscribe(self, other):
"""Check if we should re-subscribe to the topic using the old state."""
if other is None:
return True
return (self.topic, self.qos, self.encoding) != (
other.topic,
other.qos,
other.encoding,
)
@bind_hass
async def async_subscribe_topics(
hass: HomeAssistantType,
new_state: Optional[Dict[str, EntitySubscription]],
topics: Dict[str, Any],
):
"""(Re)Subscribe to a set of MQTT topics.
State is kept in sub_state and a dictionary mapping from the subscription
key to the subscription state.
Please note that the sub state must not be shared between multiple
sets of topics. Every call to async_subscribe_topics must always
contain _all_ the topics the subscription state should manage.
"""
current_subscriptions = new_state if new_state is not None else {}
new_state = {}
for key, value in topics.items():
# Extract the new requested subscription
requested = EntitySubscription(
topic=value.get("topic", None),
message_callback=value.get("msg_callback", None),
unsubscribe_callback=None,
qos=value.get("qos", DEFAULT_QOS),
encoding=value.get("encoding", "utf-8"),
hass=hass,
)
# Get the current subscription state
current = current_subscriptions.pop(key, None)
await requested.resubscribe_if_necessary(hass, current)
new_state[key] = requested
# Go through all remaining subscriptions and unsubscribe them
for remaining in current_subscriptions.values():
if remaining.unsubscribe_callback is not None:
remaining.unsubscribe_callback()
# Clear debug data if it exists
debug_info.remove_subscription(
hass, remaining.message_callback, remaining.topic
)
return new_state
@bind_hass
async def async_unsubscribe_topics(hass: HomeAssistantType, sub_state: dict):
"""Unsubscribe from all MQTT topics managed by async_subscribe_topics."""
return await async_subscribe_topics(hass, sub_state, {})
|
class ArcticException(Exception):
pass
class NoDataFoundException(ArcticException):
pass
class UnhandledDtypeException(ArcticException):
pass
class LibraryNotFoundException(ArcticException):
pass
class DuplicateSnapshotException(ArcticException):
pass
class StoreNotInitializedException(ArcticException):
pass
class OptimisticLockException(ArcticException):
pass
class QuotaExceededException(ArcticException):
pass
class UnsupportedPickleStoreVersion(ArcticException):
pass
class DataIntegrityException(ArcticException):
"""
Base class for data integrity issues.
"""
pass
class ArcticSerializationException(ArcticException):
pass
class ConcurrentModificationException(DataIntegrityException):
pass
class UnorderedDataException(DataIntegrityException):
pass
class OverlappingDataException(DataIntegrityException):
pass
class AsyncArcticException(ArcticException):
pass
class RequestDurationException(AsyncArcticException):
pass
|
from os.path import join
from logilab.common.testlib import TestCase, unittest_main
from logilab.common.pytest import *
class ModuleFunctionTC(TestCase):
def test_this_is_testdir(self):
self.assertTrue(this_is_a_testdir("test"))
self.assertTrue(this_is_a_testdir("tests"))
self.assertTrue(this_is_a_testdir("unittests"))
self.assertTrue(this_is_a_testdir("unittest"))
self.assertFalse(this_is_a_testdir("unit"))
self.assertFalse(this_is_a_testdir("units"))
self.assertFalse(this_is_a_testdir("undksjhqfl"))
self.assertFalse(this_is_a_testdir("this_is_not_a_dir_test"))
self.assertFalse(this_is_a_testdir("this_is_not_a_testdir"))
self.assertFalse(this_is_a_testdir("unittestsarenothere"))
self.assertTrue(this_is_a_testdir(join("coincoin", "unittests")))
self.assertFalse(this_is_a_testdir(join("unittests", "spongebob")))
def test_this_is_testfile(self):
self.assertTrue(this_is_a_testfile("test.py"))
self.assertTrue(this_is_a_testfile("testbabar.py"))
self.assertTrue(this_is_a_testfile("unittest_celestine.py"))
self.assertTrue(this_is_a_testfile("smoketest.py"))
self.assertFalse(this_is_a_testfile("test.pyc"))
self.assertFalse(this_is_a_testfile("zephir_test.py"))
self.assertFalse(this_is_a_testfile("smoketest.pl"))
self.assertFalse(this_is_a_testfile("unittest"))
self.assertTrue(this_is_a_testfile(join("coincoin", "unittest_bibi.py")))
self.assertFalse(this_is_a_testfile(join("unittest", "spongebob.py")))
def test_replace_trace(self):
def tracefn(frame, event, arg):
pass
oldtrace = sys.gettrace()
with replace_trace(tracefn):
self.assertIs(sys.gettrace(), tracefn)
self.assertIs(sys.gettrace(), oldtrace)
def test_pause_trace(self):
def tracefn(frame, event, arg):
pass
oldtrace = sys.gettrace()
sys.settrace(tracefn)
try:
self.assertIs(sys.gettrace(), tracefn)
with pause_trace():
self.assertIs(sys.gettrace(), None)
self.assertIs(sys.gettrace(), tracefn)
finally:
sys.settrace(oldtrace)
def test_nocoverage(self):
def tracefn(frame, event, arg):
pass
@nocoverage
def myfn():
self.assertIs(sys.gettrace(), None)
with replace_trace(tracefn):
myfn()
if __name__ == '__main__':
unittest_main()
|
import logging
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
HVAC_MODE_AUTO,
HVAC_MODE_COOL,
HVAC_MODE_DRY,
HVAC_MODE_FAN_ONLY,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
PRESET_AWAY,
PRESET_BOOST,
SUPPORT_FAN_MODE,
SUPPORT_PRESET_MODE,
SUPPORT_TARGET_TEMPERATURE,
)
from homeassistant.const import ATTR_TEMPERATURE, TEMP_CELSIUS, TEMP_FAHRENHEIT
from . import FIBARO_DEVICES, FibaroDevice
PRESET_RESUME = "resume"
PRESET_MOIST = "moist"
PRESET_FURNACE = "furnace"
PRESET_CHANGEOVER = "changeover"
PRESET_ECO_HEAT = "eco_heat"
PRESET_ECO_COOL = "eco_cool"
PRESET_FORCE_OPEN = "force_open"
_LOGGER = logging.getLogger(__name__)
# SDS13781-10 Z-Wave Application Command Class Specification 2019-01-04
# Table 128, Thermostat Fan Mode Set version 4::Fan Mode encoding
FANMODES = {
0: "off",
1: "low",
2: "auto_high",
3: "medium",
4: "auto_medium",
5: "high",
6: "circulation",
7: "humidity_circulation",
8: "left_right",
9: "up_down",
10: "quiet",
128: "auto",
}
HA_FANMODES = {v: k for k, v in FANMODES.items()}
# SDS13781-10 Z-Wave Application Command Class Specification 2019-01-04
# Table 130, Thermostat Mode Set version 3::Mode encoding.
# 4 AUXILIARY
OPMODES_PRESET = {
5: PRESET_RESUME,
7: PRESET_FURNACE,
9: PRESET_MOIST,
10: PRESET_CHANGEOVER,
11: PRESET_ECO_HEAT,
12: PRESET_ECO_COOL,
13: PRESET_AWAY,
15: PRESET_BOOST,
31: PRESET_FORCE_OPEN,
}
HA_OPMODES_PRESET = {v: k for k, v in OPMODES_PRESET.items()}
OPMODES_HVAC = {
0: HVAC_MODE_OFF,
1: HVAC_MODE_HEAT,
2: HVAC_MODE_COOL,
3: HVAC_MODE_AUTO,
4: HVAC_MODE_HEAT,
5: HVAC_MODE_AUTO,
6: HVAC_MODE_FAN_ONLY,
7: HVAC_MODE_HEAT,
8: HVAC_MODE_DRY,
9: HVAC_MODE_DRY,
10: HVAC_MODE_AUTO,
11: HVAC_MODE_HEAT,
12: HVAC_MODE_COOL,
13: HVAC_MODE_AUTO,
15: HVAC_MODE_AUTO,
31: HVAC_MODE_HEAT,
}
HA_OPMODES_HVAC = {
HVAC_MODE_OFF: 0,
HVAC_MODE_HEAT: 1,
HVAC_MODE_COOL: 2,
HVAC_MODE_AUTO: 3,
HVAC_MODE_FAN_ONLY: 6,
}
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Perform the setup for Fibaro controller devices."""
if discovery_info is None:
return
add_entities(
[FibaroThermostat(device) for device in hass.data[FIBARO_DEVICES]["climate"]],
True,
)
class FibaroThermostat(FibaroDevice, ClimateEntity):
"""Representation of a Fibaro Thermostat."""
def __init__(self, fibaro_device):
"""Initialize the Fibaro device."""
super().__init__(fibaro_device)
self._temp_sensor_device = None
self._target_temp_device = None
self._op_mode_device = None
self._fan_mode_device = None
self._support_flags = 0
self.entity_id = f"climate.{self.ha_id}"
self._hvac_support = []
self._preset_support = []
self._fan_support = []
siblings = fibaro_device.fibaro_controller.get_siblings(fibaro_device)
_LOGGER.debug("%s siblings: %s", fibaro_device.ha_id, siblings)
tempunit = "C"
for device in siblings:
# Detecting temperature device, one strong and one weak way of
# doing so, so we prefer the hard evidence, if there is such.
if device.type == "com.fibaro.temperatureSensor":
self._temp_sensor_device = FibaroDevice(device)
tempunit = device.properties.unit
elif (
self._temp_sensor_device is None
and "unit" in device.properties
and (
"value" in device.properties
or "heatingThermostatSetpoint" in device.properties
)
and (device.properties.unit == "C" or device.properties.unit == "F")
):
self._temp_sensor_device = FibaroDevice(device)
tempunit = device.properties.unit
if (
"setTargetLevel" in device.actions
or "setThermostatSetpoint" in device.actions
or "setHeatingThermostatSetpoint" in device.actions
):
self._target_temp_device = FibaroDevice(device)
self._support_flags |= SUPPORT_TARGET_TEMPERATURE
tempunit = device.properties.unit
if "setMode" in device.actions or "setOperatingMode" in device.actions:
self._op_mode_device = FibaroDevice(device)
self._support_flags |= SUPPORT_PRESET_MODE
if "setFanMode" in device.actions:
self._fan_mode_device = FibaroDevice(device)
self._support_flags |= SUPPORT_FAN_MODE
if tempunit == "F":
self._unit_of_temp = TEMP_FAHRENHEIT
else:
self._unit_of_temp = TEMP_CELSIUS
if self._fan_mode_device:
fan_modes = (
self._fan_mode_device.fibaro_device.properties.supportedModes.split(",")
)
for mode in fan_modes:
mode = int(mode)
if mode not in FANMODES:
_LOGGER.warning("%d unknown fan mode", mode)
continue
self._fan_support.append(FANMODES[int(mode)])
if self._op_mode_device:
prop = self._op_mode_device.fibaro_device.properties
if "supportedOperatingModes" in prop:
op_modes = prop.supportedOperatingModes.split(",")
elif "supportedModes" in prop:
op_modes = prop.supportedModes.split(",")
for mode in op_modes:
mode = int(mode)
if mode in OPMODES_HVAC:
mode_ha = OPMODES_HVAC[mode]
if mode_ha not in self._hvac_support:
self._hvac_support.append(mode_ha)
if mode in OPMODES_PRESET:
self._preset_support.append(OPMODES_PRESET[mode])
async def async_added_to_hass(self):
"""Call when entity is added to hass."""
_LOGGER.debug(
"Climate %s\n"
"- _temp_sensor_device %s\n"
"- _target_temp_device %s\n"
"- _op_mode_device %s\n"
"- _fan_mode_device %s",
self.ha_id,
self._temp_sensor_device.ha_id if self._temp_sensor_device else "None",
self._target_temp_device.ha_id if self._target_temp_device else "None",
self._op_mode_device.ha_id if self._op_mode_device else "None",
self._fan_mode_device.ha_id if self._fan_mode_device else "None",
)
await super().async_added_to_hass()
# Register update callback for child devices
siblings = self.fibaro_device.fibaro_controller.get_siblings(self.fibaro_device)
for device in siblings:
if device != self.fibaro_device:
self.controller.register(device.id, self._update_callback)
@property
def supported_features(self):
"""Return the list of supported features."""
return self._support_flags
@property
def fan_modes(self):
"""Return the list of available fan modes."""
if not self._fan_mode_device:
return None
return self._fan_support
@property
def fan_mode(self):
"""Return the fan setting."""
if not self._fan_mode_device:
return None
mode = int(self._fan_mode_device.fibaro_device.properties.mode)
return FANMODES[mode]
def set_fan_mode(self, fan_mode):
"""Set new target fan mode."""
if not self._fan_mode_device:
return
self._fan_mode_device.action("setFanMode", HA_FANMODES[fan_mode])
@property
def fibaro_op_mode(self):
"""Return the operating mode of the device."""
if not self._op_mode_device:
return 3 # Default to AUTO
if "operatingMode" in self._op_mode_device.fibaro_device.properties:
return int(self._op_mode_device.fibaro_device.properties.operatingMode)
return int(self._op_mode_device.fibaro_device.properties.mode)
@property
def hvac_mode(self):
"""Return current operation ie. heat, cool, idle."""
return OPMODES_HVAC[self.fibaro_op_mode]
@property
def hvac_modes(self):
"""Return the list of available operation modes."""
if not self._op_mode_device:
return [HVAC_MODE_AUTO] # Default to this
return self._hvac_support
def set_hvac_mode(self, hvac_mode):
"""Set new target operation mode."""
if not self._op_mode_device:
return
if self.preset_mode:
return
if "setOperatingMode" in self._op_mode_device.fibaro_device.actions:
self._op_mode_device.action("setOperatingMode", HA_OPMODES_HVAC[hvac_mode])
elif "setMode" in self._op_mode_device.fibaro_device.actions:
self._op_mode_device.action("setMode", HA_OPMODES_HVAC[hvac_mode])
@property
def preset_mode(self):
"""Return the current preset mode, e.g., home, away, temp.
Requires SUPPORT_PRESET_MODE.
"""
if not self._op_mode_device:
return None
if "operatingMode" in self._op_mode_device.fibaro_device.properties:
mode = int(self._op_mode_device.fibaro_device.properties.operatingMode)
else:
mode = int(self._op_mode_device.fibaro_device.properties.mode)
if mode not in OPMODES_PRESET:
return None
return OPMODES_PRESET[mode]
@property
def preset_modes(self):
"""Return a list of available preset modes.
Requires SUPPORT_PRESET_MODE.
"""
if not self._op_mode_device:
return None
return self._preset_support
def set_preset_mode(self, preset_mode: str) -> None:
"""Set new preset mode."""
if self._op_mode_device is None:
return
if "setOperatingMode" in self._op_mode_device.fibaro_device.actions:
self._op_mode_device.action(
"setOperatingMode", HA_OPMODES_PRESET[preset_mode]
)
elif "setMode" in self._op_mode_device.fibaro_device.actions:
self._op_mode_device.action("setMode", HA_OPMODES_PRESET[preset_mode])
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return self._unit_of_temp
@property
def current_temperature(self):
"""Return the current temperature."""
if self._temp_sensor_device:
device = self._temp_sensor_device.fibaro_device
if "heatingThermostatSetpoint" in device.properties:
return float(device.properties.heatingThermostatSetpoint)
return float(device.properties.value)
return None
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
if self._target_temp_device:
device = self._target_temp_device.fibaro_device
if "heatingThermostatSetpointFuture" in device.properties:
return float(device.properties.heatingThermostatSetpointFuture)
return float(device.properties.targetLevel)
return None
def set_temperature(self, **kwargs):
"""Set new target temperatures."""
temperature = kwargs.get(ATTR_TEMPERATURE)
target = self._target_temp_device
if temperature is not None:
if "setThermostatSetpoint" in target.fibaro_device.actions:
target.action("setThermostatSetpoint", self.fibaro_op_mode, temperature)
elif "setHeatingThermostatSetpoint" in target.fibaro_device.actions:
target.action("setHeatingThermostatSetpoint", temperature)
else:
target.action("setTargetLevel", temperature)
|
from molecule import logger
from molecule.driver import base
from molecule import util
LOG = logger.get_logger(__name__)
class Azure(base.Base):
"""
The class responsible for managing `Azure`_ instances. `Azure`_
is ``not`` the default driver used in Molecule.
Molecule leverages Ansible's `azure_module`_, by mapping variables
from ``molecule.yml`` into ``create.yml`` and ``destroy.yml``.
.. _`azure_module`: https://docs.ansible.com/ansible/latest/guide_azure.html
.. code-block:: yaml
driver:
name: azure
platforms:
- name: instance
.. code-block:: bash
$ pip install 'ansible[azure]'
Change the options passed to the ssh client.
.. code-block:: yaml
driver:
name: azure
ssh_connection_options:
-o ControlPath=~/.ansible/cp/%r@%h-%p
.. important::
Molecule does not merge lists, when overriding the developer must
provide all options.
Provide a list of files Molecule will preserve, relative to the scenario
ephemeral directory, after any ``destroy`` subcommand execution.
.. code-block:: yaml
driver:
name: azure
safe_files:
- foo
.. _`Azure`: https://azure.microsoft.com
""" # noqa
def __init__(self, config):
super(Azure, self).__init__(config)
self._name = 'azure'
@property
def name(self):
return self._name
@name.setter
def name(self, value):
self._name = value
@property
def login_cmd_template(self):
connection_options = ' '.join(self.ssh_connection_options)
return ('ssh {{address}} '
'-l {{user}} '
'-p {{port}} '
'-i {{identity_file}} '
'{}').format(connection_options)
@property
def default_safe_files(self):
return [
self.instance_config,
]
@property
def default_ssh_connection_options(self):
return self._get_ssh_connection_options()
def login_options(self, instance_name):
d = {'instance': instance_name}
return util.merge_dicts(d, self._get_instance_config(instance_name))
def ansible_connection_options(self, instance_name):
try:
d = self._get_instance_config(instance_name)
return {
'ansible_user': d['user'],
'ansible_host': d['address'],
'ansible_port': d['port'],
'ansible_private_key_file': d['identity_file'],
'connection': 'ssh',
'ansible_ssh_common_args':
' '.join(self.ssh_connection_options),
}
except StopIteration:
return {}
except IOError:
# Instance has yet to be provisioned , therefore the
# instance_config is not on disk.
return {}
def _get_instance_config(self, instance_name):
instance_config_dict = util.safe_load_file(
self._config.driver.instance_config)
return next(item for item in instance_config_dict
if item['instance'] == instance_name)
def sanity_checks(self):
# FIXME(decentral1se): Implement sanity checks
pass
|
import base64
import logging
import os
import re
import zlib
from babelfish import Language, language_converters
from guessit import guessit
from six.moves.xmlrpc_client import ServerProxy
from . import Provider, TimeoutSafeTransport
from .. import __short_version__
from ..exceptions import (AuthenticationError, ConfigurationError, DownloadLimitExceeded, ProviderError,
ServiceUnavailable)
from ..matches import guess_matches
from ..subtitle import Subtitle, fix_line_ending
from ..video import Episode, Movie
logger = logging.getLogger(__name__)
class OpenSubtitlesSubtitle(Subtitle):
"""OpenSubtitles Subtitle."""
provider_name = 'opensubtitles'
series_re = re.compile(r'^"(?P<series_name>.*)" (?P<series_title>.*)$')
def __init__(self, language, hearing_impaired, page_link, subtitle_id, matched_by, movie_kind, hash, movie_name,
movie_release_name, movie_year, movie_imdb_id, series_season, series_episode, filename, encoding):
super(OpenSubtitlesSubtitle, self).__init__(language, hearing_impaired=hearing_impaired,
page_link=page_link, encoding=encoding)
self.subtitle_id = subtitle_id
self.matched_by = matched_by
self.movie_kind = movie_kind
self.hash = hash
self.movie_name = movie_name
self.movie_release_name = movie_release_name
self.movie_year = movie_year
self.movie_imdb_id = movie_imdb_id
self.series_season = series_season
self.series_episode = series_episode
self.filename = filename
@property
def id(self):
return str(self.subtitle_id)
@property
def info(self):
if not self.filename and not self.movie_release_name:
return self.subtitle_id
if self.movie_release_name and len(self.movie_release_name) > len(self.filename):
return self.movie_release_name
return self.filename
@property
def series_name(self):
return self.series_re.match(self.movie_name).group('series_name')
@property
def series_title(self):
return self.series_re.match(self.movie_name).group('series_title')
def get_matches(self, video):
if (isinstance(video, Episode) and self.movie_kind != 'episode') or (
isinstance(video, Movie) and self.movie_kind != 'movie'):
logger.info('%r is not a valid movie_kind', self.movie_kind)
return set()
matches = guess_matches(video, {
'title': self.series_name if self.movie_kind == 'episode' else self.movie_name,
'episode_title': self.series_title if self.movie_kind == 'episode' else None,
'year': self.movie_year,
'season': self.series_season,
'episode': self.series_episode
})
# tag
if self.matched_by == 'tag':
if not video.imdb_id or self.movie_imdb_id == video.imdb_id:
if self.movie_kind == 'episode':
matches |= {'series', 'year', 'season', 'episode'}
elif self.movie_kind == 'movie':
matches |= {'title', 'year'}
# guess
matches |= guess_matches(video, guessit(self.movie_release_name, {'type': self.movie_kind}))
matches |= guess_matches(video, guessit(self.filename, {'type': self.movie_kind}))
# hash
if 'opensubtitles' in video.hashes and self.hash == video.hashes['opensubtitles']:
if self.movie_kind == 'movie' and 'title' in matches:
matches.add('hash')
elif self.movie_kind == 'episode' and 'series' in matches and 'season' in matches and 'episode' in matches:
matches.add('hash')
else:
logger.debug('Match on hash discarded')
# imdb_id
if video.imdb_id and self.movie_imdb_id == video.imdb_id:
matches.add('imdb_id')
return matches
class OpenSubtitlesProvider(Provider):
"""OpenSubtitles Provider.
:param str username: username.
:param str password: password.
"""
languages = {Language.fromopensubtitles(l) for l in language_converters['opensubtitles'].codes}
server_url = 'https://api.opensubtitles.org/xml-rpc'
subtitle_class = OpenSubtitlesSubtitle
user_agent = 'subliminal v%s' % __short_version__
def __init__(self, username=None, password=None):
self.server = ServerProxy(self.server_url, TimeoutSafeTransport(10))
if any((username, password)) and not all((username, password)):
raise ConfigurationError('Username and password must be specified')
# None values not allowed for logging in, so replace it by ''
self.username = username or ''
self.password = password or ''
self.token = None
def initialize(self):
logger.info('Logging in')
response = checked(self.server.LogIn(self.username, self.password, 'eng', self.user_agent))
self.token = response['token']
logger.debug('Logged in with token %r', self.token)
def terminate(self):
logger.info('Logging out')
checked(self.server.LogOut(self.token))
self.server.close()
self.token = None
logger.debug('Logged out')
def no_operation(self):
logger.debug('No operation')
checked(self.server.NoOperation(self.token))
def query(self, languages, hash=None, size=None, imdb_id=None, query=None, season=None, episode=None, tag=None):
# fill the search criteria
criteria = []
if hash and size:
criteria.append({'moviehash': hash, 'moviebytesize': str(size)})
if imdb_id:
if season and episode:
criteria.append({'imdbid': imdb_id[2:], 'season': season, 'episode': episode})
else:
criteria.append({'imdbid': imdb_id[2:]})
if tag:
criteria.append({'tag': tag})
if query and season and episode:
criteria.append({'query': query.replace('\'', ''), 'season': season, 'episode': episode})
elif query:
criteria.append({'query': query.replace('\'', '')})
if not criteria:
raise ValueError('Not enough information')
# add the language
for criterion in criteria:
criterion['sublanguageid'] = ','.join(sorted(l.opensubtitles for l in languages))
# query the server
logger.info('Searching subtitles %r', criteria)
response = checked(self.server.SearchSubtitles(self.token, criteria))
subtitles = []
# exit if no data
if not response['data']:
logger.debug('No subtitles found')
return subtitles
# loop over subtitle items
for subtitle_item in response['data']:
# read the item
language = Language.fromopensubtitles(subtitle_item['SubLanguageID'])
hearing_impaired = bool(int(subtitle_item['SubHearingImpaired']))
page_link = subtitle_item['SubtitlesLink']
subtitle_id = int(subtitle_item['IDSubtitleFile'])
matched_by = subtitle_item['MatchedBy']
movie_kind = subtitle_item['MovieKind']
hash = subtitle_item['MovieHash']
movie_name = subtitle_item['MovieName']
movie_release_name = subtitle_item['MovieReleaseName']
movie_year = int(subtitle_item['MovieYear']) if subtitle_item['MovieYear'] else None
movie_imdb_id = 'tt' + subtitle_item['IDMovieImdb']
series_season = int(subtitle_item['SeriesSeason']) if subtitle_item['SeriesSeason'] else None
series_episode = int(subtitle_item['SeriesEpisode']) if subtitle_item['SeriesEpisode'] else None
filename = subtitle_item['SubFileName']
encoding = subtitle_item.get('SubEncoding') or None
subtitle = self.subtitle_class(language, hearing_impaired, page_link, subtitle_id, matched_by, movie_kind,
hash, movie_name, movie_release_name, movie_year, movie_imdb_id,
series_season, series_episode, filename, encoding)
logger.debug('Found subtitle %r by %s', subtitle, matched_by)
subtitles.append(subtitle)
return subtitles
def list_subtitles(self, video, languages):
season = episode = None
if isinstance(video, Episode):
query = video.series
season = video.season
episode = video.episode
else:
query = video.title
return self.query(languages, hash=video.hashes.get('opensubtitles'), size=video.size, imdb_id=video.imdb_id,
query=query, season=season, episode=episode, tag=os.path.basename(video.name))
def download_subtitle(self, subtitle):
logger.info('Downloading subtitle %r', subtitle)
response = checked(self.server.DownloadSubtitles(self.token, [str(subtitle.subtitle_id)]))
subtitle.content = fix_line_ending(zlib.decompress(base64.b64decode(response['data'][0]['data']), 47))
class OpenSubtitlesVipSubtitle(OpenSubtitlesSubtitle):
"""OpenSubtitles Subtitle."""
provider_name = 'opensubtitlesvip'
class OpenSubtitlesVipProvider(OpenSubtitlesProvider):
"""OpenSubtitles Provider using VIP url."""
server_url = 'https://vip-api.opensubtitles.org/xml-rpc'
subtitle_class = OpenSubtitlesVipSubtitle
class OpenSubtitlesError(ProviderError):
"""Base class for non-generic :class:`OpenSubtitlesProvider` exceptions."""
pass
class Unauthorized(OpenSubtitlesError, AuthenticationError):
"""Exception raised when status is '401 Unauthorized'."""
pass
class NoSession(OpenSubtitlesError, AuthenticationError):
"""Exception raised when status is '406 No session'."""
pass
class DownloadLimitReached(OpenSubtitlesError, DownloadLimitExceeded):
"""Exception raised when status is '407 Download limit reached'."""
pass
class InvalidImdbid(OpenSubtitlesError):
"""Exception raised when status is '413 Invalid ImdbID'."""
pass
class UnknownUserAgent(OpenSubtitlesError, AuthenticationError):
"""Exception raised when status is '414 Unknown User Agent'."""
pass
class DisabledUserAgent(OpenSubtitlesError, AuthenticationError):
"""Exception raised when status is '415 Disabled user agent'."""
pass
def checked(response):
"""Check a response status before returning it.
:param response: a response from a XMLRPC call to OpenSubtitles.
:return: the response.
:raise: :class:`OpenSubtitlesError`
"""
status_code = int(response['status'][:3])
if status_code == 401:
raise Unauthorized
if status_code == 406:
raise NoSession
if status_code == 407:
raise DownloadLimitReached
if status_code == 413:
raise InvalidImdbid
if status_code == 414:
raise UnknownUserAgent
if status_code == 415:
raise DisabledUserAgent
if status_code == 503:
raise ServiceUnavailable
if status_code != 200:
raise OpenSubtitlesError(response['status'])
return response
|
import sys, os, re, json, argparse, time, pytz
import console
from datetime import datetime, timedelta
from difflib import unified_diff, ndiff
def argue():
parser = argparse.ArgumentParser()
parser.add_argument('-v', '--verbose', action='store_true')
parser.add_argument('-s', '--symbolic', action='store_true')
parser.add_argument('-f', '--force', action='store_true')
parser.add_argument('lhs')
parser.add_argument('rhs')
args = parser.parse_args()
if args.verbose:
json.dump(vars(args),sys.stderr,indent=4)
return args
def ln(lhs,rhs,symbolic=False):
if not os.path.exists(lhs):
sys.stderr.write('%s not found\n'%lhs)
sys.exit(1)
if os.path.isdir(rhs):
rhs = '%s/%s'%(rhs,os.path.basename(lhs))
if os.path.isfile(rhs):
sys.stderr.write('%s already exists\n'%rhs)
sys.exit(1)
if os.path.islink(rhs):
sys.stderr.write('%s already linked\n'%rhs)
sys.exit(1)
if symbolic:
os.symlink(lhs,rhs)
else:
os.link(lhs,rhs)
return
def main():
console.clear()
args = argue()
ln(args.lhs,args.rhs,args.symbolic)
return
if __name__ == '__main__': main()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
from absl import flags
from perfkitbenchmarker import configs
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.linux_packages import fio
from perfkitbenchmarker.linux_packages import gluster
FLAGS = flags.FLAGS
flags.DEFINE_string('fill_disk_size', '4G',
'Amount to fill the disk before reading.')
flags.DEFINE_string('fill_disk_bs', '128k',
'Block size used to fill the disk before reading.')
flags.DEFINE_integer('fill_disk_iodepth', 64, 'iodepth used to fill the disk.')
flags.DEFINE_string('read_size', '4G', 'Size of the file to read.')
flags.DEFINE_string('read_bs', '512k', 'Block size of the file to read.')
flags.DEFINE_integer('read_iodepth', 1, 'iodepth used in reading the file.')
BENCHMARK_NAME = 'gluster_fio'
BENCHMARK_CONFIG = """
gluster_fio:
description: >
Runs fio against a remote gluster cluster.
vm_groups:
clients:
vm_spec: *default_single_core
vm_count: null
gluster_servers:
vm_spec: *default_single_core
disk_spec: *default_500_gb
vm_count: 1
"""
_VOLUME_NAME = 'vol01'
_MOUNT_POINT = '/glusterfs'
_NUM_SECTORS_READ_AHEAD = 16384
def GetConfig(user_config):
return configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
def Prepare(benchmark_spec):
"""Set up GlusterFS and install fio.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
gluster_servers = benchmark_spec.vm_groups['gluster_servers']
clients = benchmark_spec.vm_groups['clients']
client_vm = clients[0]
vm_util.RunThreaded(lambda vm: vm.Install('fio'), gluster_servers + clients)
for vm in gluster_servers:
vm.SetReadAhead(_NUM_SECTORS_READ_AHEAD,
[d.GetDevicePath() for d in vm.scratch_disks])
# Set up Gluster
if gluster_servers:
gluster.ConfigureServers(gluster_servers, _VOLUME_NAME)
args = [((client, gluster_servers[0], _VOLUME_NAME, _MOUNT_POINT), {})
for client in clients]
vm_util.RunThreaded(gluster.MountGluster, args)
gluster_address = gluster_servers[0].internal_ip
client_vm.RemoteCommand('sudo mkdir -p /testdir')
client_vm.RemoteCommand('sudo mount %s:/vol01 /testdir -t glusterfs' %
gluster_address)
def _RunFio(vm, fio_params, metadata):
"""Run fio.
Args:
vm: Virtual machine to run fio on.
fio_params: fio parameters used to create the fio command to run.
metadata: Metadata to add to the results.
Returns:
A list of sample.Sample objects
"""
stdout, _ = vm.RemoteCommand('sudo {0} {1}'.format(fio.GetFioExec(),
fio_params))
job_file_contents = fio.FioParametersToJob(fio_params)
samples = fio.ParseResults(
job_file_contents,
json.loads(stdout),
base_metadata=metadata,
skip_latency_individual_stats=True)
return samples
def Run(benchmark_spec):
"""Run fio against gluster.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
Returns:
A list of sample.Sample objects.
"""
gluster_servers = benchmark_spec.vm_groups['gluster_servers']
clients = benchmark_spec.vm_groups['clients']
client_vm = clients[0]
results = []
metadata = {
'fill_disk_size': FLAGS.fill_disk_size,
'fill_disk_bs': FLAGS.fill_disk_bs,
'fill_disk_iodepth': FLAGS.fill_disk_iodepth,
'read_size': FLAGS.read_size,
'read_bs': FLAGS.read_bs,
'read_iodepth': FLAGS.read_iodepth,
}
fio_params = ' '.join([
'--output-format=json', '--name=fill_disk',
'--filename=/testdir/testfile',
'--filesize=%s' % FLAGS.fill_disk_size, '--ioengine=libaio', '--direct=1',
'--verify=0', '--randrepeat=0',
'--bs=%s' % FLAGS.fill_disk_bs,
'--iodepth=%s' % FLAGS.fill_disk_iodepth, '--rw=randwrite'
])
samples = _RunFio(client_vm, fio_params, metadata)
results += samples
# In addition to dropping caches, increase polling to potentially reduce
# variance in network operations
for vm in gluster_servers + clients:
vm.RemoteCommand('sudo /sbin/sysctl net.core.busy_poll=50')
vm.DropCaches()
fio_read_common_params = [
'--output-format=json', '--randrepeat=1', '--ioengine=libaio',
'--gtod_reduce=1', '--filename=/testdir/testfile',
'--bs=%s' % FLAGS.read_bs,
'--iodepth=%s' % FLAGS.read_iodepth,
'--size=%s' % FLAGS.read_size, '--readwrite=randread'
]
fio_params = '--name=first_read ' + ' '.join(fio_read_common_params)
samples = _RunFio(client_vm, fio_params, metadata)
results += samples
# Run the command again. This time, the file should be cached.
fio_params = '--name=second_read ' + ' '.join(fio_read_common_params)
samples = _RunFio(client_vm, fio_params, metadata)
results += samples
return results
def Cleanup(benchmark_spec):
"""Cleanup gluster.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
clients = benchmark_spec.vm_groups['clients']
gluster_servers = benchmark_spec.vm_groups['gluster_servers']
for client in clients:
client.RemoteCommand('sudo umount %s' % _MOUNT_POINT)
if gluster_servers:
gluster.DeleteVolume(gluster_servers[0], _VOLUME_NAME)
|
import logging
import os
import shutil
import subprocess
from tempfile import NamedTemporaryFile
from homeassistant.components.camera import Camera
from homeassistant.const import CONF_FILE_PATH, CONF_NAME, EVENT_HOMEASSISTANT_STOP
from .const import (
CONF_HORIZONTAL_FLIP,
CONF_IMAGE_HEIGHT,
CONF_IMAGE_QUALITY,
CONF_IMAGE_ROTATION,
CONF_IMAGE_WIDTH,
CONF_OVERLAY_METADATA,
CONF_OVERLAY_TIMESTAMP,
CONF_TIMELAPSE,
CONF_VERTICAL_FLIP,
DOMAIN,
)
_LOGGER = logging.getLogger(__name__)
def kill_raspistill(*args):
"""Kill any previously running raspistill process.."""
subprocess.Popen(
["killall", "raspistill"], stdout=subprocess.DEVNULL, stderr=subprocess.STDOUT
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Raspberry Camera."""
# We only want this platform to be set up via discovery.
# prevent initializing by erroneous platform config section in yaml conf
if discovery_info is None:
return
if shutil.which("raspistill") is None:
_LOGGER.error("'raspistill' was not found")
return
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, kill_raspistill)
setup_config = hass.data[DOMAIN]
file_path = setup_config[CONF_FILE_PATH]
def delete_temp_file(*args):
"""Delete the temporary file to prevent saving multiple temp images.
Only used when no path is defined
"""
os.remove(file_path)
# If no file path is defined, use a temporary file
if file_path is None:
temp_file = NamedTemporaryFile(suffix=".jpg", delete=False)
temp_file.close()
file_path = temp_file.name
setup_config[CONF_FILE_PATH] = file_path
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, delete_temp_file)
# Check whether the file path has been whitelisted
elif not hass.config.is_allowed_path(file_path):
_LOGGER.error("'%s' is not a whitelisted directory", file_path)
return
add_entities([RaspberryCamera(setup_config)])
class RaspberryCamera(Camera):
"""Representation of a Raspberry Pi camera."""
def __init__(self, device_info):
"""Initialize Raspberry Pi camera component."""
super().__init__()
self._name = device_info[CONF_NAME]
self._config = device_info
# Kill if there's raspistill instance
kill_raspistill()
cmd_args = [
"raspistill",
"--nopreview",
"-o",
device_info[CONF_FILE_PATH],
"-t",
"0",
"-w",
str(device_info[CONF_IMAGE_WIDTH]),
"-h",
str(device_info[CONF_IMAGE_HEIGHT]),
"-tl",
str(device_info[CONF_TIMELAPSE]),
"-q",
str(device_info[CONF_IMAGE_QUALITY]),
"-rot",
str(device_info[CONF_IMAGE_ROTATION]),
]
if device_info[CONF_HORIZONTAL_FLIP]:
cmd_args.append("-hf")
if device_info[CONF_VERTICAL_FLIP]:
cmd_args.append("-vf")
if device_info[CONF_OVERLAY_METADATA]:
cmd_args.append("-a")
cmd_args.append(str(device_info[CONF_OVERLAY_METADATA]))
if device_info[CONF_OVERLAY_TIMESTAMP]:
cmd_args.append("-a")
cmd_args.append("4")
cmd_args.append("-a")
cmd_args.append(str(device_info[CONF_OVERLAY_TIMESTAMP]))
subprocess.Popen(cmd_args, stdout=subprocess.DEVNULL, stderr=subprocess.STDOUT)
def camera_image(self):
"""Return raspistill image response."""
with open(self._config[CONF_FILE_PATH], "rb") as file:
return file.read()
@property
def name(self):
"""Return the name of this camera."""
return self._name
@property
def frame_interval(self):
"""Return the interval between frames of the stream."""
return self._config[CONF_TIMELAPSE] / 1000
|
import asyncio
import json
import logging
import os
import pickle
import weakref
from collections import defaultdict
from pathlib import Path
from typing import Any, AsyncIterator, Dict, Optional, Tuple
from uuid import uuid4
from .. import data_manager, errors
from .base import BaseDriver, IdentifierData, ConfigCategory
__all__ = ["JsonDriver"]
_shared_datastore = {}
_driver_counts = {}
_finalizers = []
_locks = defaultdict(asyncio.Lock)
log = logging.getLogger("redbot.json_driver")
def finalize_driver(cog_name):
if cog_name not in _driver_counts:
return
_driver_counts[cog_name] -= 1
if _driver_counts[cog_name] == 0:
if cog_name in _shared_datastore:
del _shared_datastore[cog_name]
if cog_name in _locks:
del _locks[cog_name]
for f in _finalizers:
if not f.alive:
_finalizers.remove(f)
# noinspection PyProtectedMember
class JsonDriver(BaseDriver):
"""
Subclass of :py:class:`.BaseDriver`.
.. py:attribute:: file_name
The name of the file in which to store JSON data.
.. py:attribute:: data_path
The path in which to store the file indicated by :py:attr:`file_name`.
"""
def __init__(
self,
cog_name: str,
identifier: str,
*,
data_path_override: Optional[Path] = None,
file_name_override: str = "settings.json",
):
super().__init__(cog_name, identifier)
self.file_name = file_name_override
if data_path_override is not None:
self.data_path = data_path_override
elif cog_name == "Core" and identifier == "0":
self.data_path = data_manager.core_data_path()
else:
self.data_path = data_manager.cog_data_path(raw_name=cog_name)
self.data_path.mkdir(parents=True, exist_ok=True)
self.data_path = self.data_path / self.file_name
self._load_data()
@property
def _lock(self):
return _locks[self.cog_name]
@property
def data(self):
return _shared_datastore.get(self.cog_name)
@data.setter
def data(self, value):
_shared_datastore[self.cog_name] = value
@classmethod
async def initialize(cls, **storage_details) -> None:
# No initializing to do
return
@classmethod
async def teardown(cls) -> None:
# No tearing down to do
return
@staticmethod
def get_config_details() -> Dict[str, Any]:
# No driver-specific configuration needed
return {}
def _load_data(self):
if self.cog_name not in _driver_counts:
_driver_counts[self.cog_name] = 0
_driver_counts[self.cog_name] += 1
_finalizers.append(weakref.finalize(self, finalize_driver, self.cog_name))
if self.data is not None:
return
try:
with self.data_path.open("r", encoding="utf-8") as fs:
self.data = json.load(fs)
except FileNotFoundError:
self.data = {}
with self.data_path.open("w", encoding="utf-8") as fs:
json.dump(self.data, fs)
def migrate_identifier(self, raw_identifier: int):
if self.unique_cog_identifier in self.data:
# Data has already been migrated
return
poss_identifiers = [str(raw_identifier), str(hash(raw_identifier))]
for ident in poss_identifiers:
if ident in self.data:
self.data[self.unique_cog_identifier] = self.data[ident]
del self.data[ident]
_save_json(self.data_path, self.data)
break
async def get(self, identifier_data: IdentifierData):
partial = self.data
full_identifiers = identifier_data.to_tuple()[1:]
for i in full_identifiers:
partial = partial[i]
return pickle.loads(pickle.dumps(partial, -1))
async def set(self, identifier_data: IdentifierData, value=None):
partial = self.data
full_identifiers = identifier_data.to_tuple()[1:]
# This is both our deepcopy() and our way of making sure this value is actually JSON
# serializable.
value_copy = json.loads(json.dumps(value))
async with self._lock:
for i in full_identifiers[:-1]:
try:
partial = partial.setdefault(i, {})
except AttributeError:
# Tried to set sub-field of non-object
raise errors.CannotSetSubfield
partial[full_identifiers[-1]] = value_copy
await self._save()
async def clear(self, identifier_data: IdentifierData):
partial = self.data
full_identifiers = identifier_data.to_tuple()[1:]
try:
for i in full_identifiers[:-1]:
partial = partial[i]
except KeyError:
pass
else:
async with self._lock:
try:
del partial[full_identifiers[-1]]
except KeyError:
pass
else:
await self._save()
@classmethod
async def aiter_cogs(cls) -> AsyncIterator[Tuple[str, str]]:
yield "Core", "0"
for _dir in data_manager.cog_data_path().iterdir():
fpath = _dir / "settings.json"
if not fpath.exists():
continue
with fpath.open() as f:
try:
data = json.load(f)
except json.JSONDecodeError:
continue
if not isinstance(data, dict):
continue
cog_name = _dir.stem
for cog_id, inner in data.items():
if not isinstance(inner, dict):
continue
yield cog_name, cog_id
async def import_data(self, cog_data, custom_group_data):
def update_write_data(identifier_data: IdentifierData, _data):
partial = self.data
idents = identifier_data.to_tuple()[1:]
for ident in idents[:-1]:
partial = partial.setdefault(ident, {})
partial[idents[-1]] = _data
async with self._lock:
for category, all_data in cog_data:
splitted_pkey = self._split_primary_key(category, custom_group_data, all_data)
for pkey, data in splitted_pkey:
ident_data = IdentifierData(
self.cog_name,
self.unique_cog_identifier,
category,
pkey,
(),
*ConfigCategory.get_pkey_info(category, custom_group_data),
)
update_write_data(ident_data, data)
await self._save()
async def _save(self) -> None:
loop = asyncio.get_running_loop()
await loop.run_in_executor(None, _save_json, self.data_path, self.data)
def _save_json(path: Path, data: Dict[str, Any]) -> None:
"""
This fsync stuff here is entirely necessary.
On windows, it is not available in entirety.
If a windows user ends up with tons of temp files, they should consider hosting on
something POSIX compatible, or using a different backend instead.
Most users wont encounter this issue, but with high write volumes,
without the fsync on both the temp file, and after the replace on the directory,
There's no real durability or atomicity guarantee from the filesystem.
In depth overview of underlying reasons why this is needed:
https://lwn.net/Articles/457667/
Also see:
http://man7.org/linux/man-pages/man2/open.2.html#NOTES (synchronous I/O section)
And:
https://www.mjmwired.net/kernel/Documentation/filesystems/ext4.txt#310
"""
filename = path.stem
tmp_file = "{}-{}.tmp".format(filename, uuid4().fields[0])
tmp_path = path.parent / tmp_file
with tmp_path.open(encoding="utf-8", mode="w") as fs:
json.dump(data, fs)
fs.flush() # This does get closed on context exit, ...
os.fsync(fs.fileno()) # but that needs to happen prior to this line
tmp_path.replace(path)
try:
flag = os.O_DIRECTORY # pylint: disable=no-member
except AttributeError:
pass
else:
fd = os.open(path.parent, flag)
try:
os.fsync(fd)
finally:
os.close(fd)
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.