text
stringlengths 213
32.3k
|
---|
import logging
import os.path
import pytest
from PyQt5.QtCore import QUrl
from qutebrowser.browser import pdfjs
from qutebrowser.utils import urlmatch
pytestmark = [pytest.mark.usefixtures('data_tmpdir')]
@pytest.mark.parametrize('available, snippet', [
(True, '<title>PDF.js viewer</title>'),
(False, '<h1>No pdf.js installation found</h1>'),
('force', 'fake PDF.js'),
])
def test_generate_pdfjs_page(available, snippet, monkeypatch):
if available == 'force':
monkeypatch.setattr(pdfjs, 'is_available', lambda: True)
monkeypatch.setattr(pdfjs, 'get_pdfjs_res',
lambda filename: b'fake PDF.js')
elif available:
if not pdfjs.is_available():
pytest.skip("PDF.js unavailable")
monkeypatch.setattr(pdfjs, 'is_available', lambda: True)
else:
monkeypatch.setattr(pdfjs, 'is_available', lambda: False)
content = pdfjs.generate_pdfjs_page('example.pdf', QUrl())
print(content)
assert snippet in content
def test_broken_installation(data_tmpdir, monkeypatch):
"""Make sure we don't crash with a broken local installation."""
monkeypatch.setattr(pdfjs, '_SYSTEM_PATHS', [])
(data_tmpdir / 'pdfjs' / 'pdf.js').ensure() # But no viewer.html
content = pdfjs.generate_pdfjs_page('example.pdf', QUrl())
assert '<h1>No pdf.js installation found</h1>' in content
# Note that we got double protection, once because we use QUrl.FullyEncoded and
# because we use qutebrowser.utils.javascript.to_js. Characters like " are
# already replaced by QUrl.
@pytest.mark.parametrize('filename, expected', [
('foo.bar', "foo.bar"),
('foo"bar', "foo%22bar"),
('foo\0bar', 'foo%00bar'),
('foobar");alert("attack!");',
'foobar%22);alert(%22attack!%22);'),
])
def test_generate_pdfjs_script(filename, expected):
expected_open = 'open("qute://pdfjs/file?filename={}");'.format(expected)
actual = pdfjs._generate_pdfjs_script(filename)
assert expected_open in actual
assert 'PDFView' in actual
class TestResources:
@pytest.fixture
def read_system_mock(self, mocker):
return mocker.patch.object(pdfjs, '_read_from_system', autospec=True)
@pytest.fixture
def read_file_mock(self, mocker):
return mocker.patch.object(pdfjs.utils, 'read_file', autospec=True)
def test_get_pdfjs_res_system(self, read_system_mock):
read_system_mock.return_value = (b'content', 'path')
assert pdfjs.get_pdfjs_res_and_path('web/test') == (b'content', 'path')
assert pdfjs.get_pdfjs_res('web/test') == b'content'
read_system_mock.assert_called_with('/usr/share/pdf.js/',
['web/test', 'test'])
def test_get_pdfjs_res_bundled(self, read_system_mock, read_file_mock,
tmpdir):
read_system_mock.return_value = (None, None)
read_file_mock.return_value = b'content'
assert pdfjs.get_pdfjs_res_and_path('web/test') == (b'content', None)
assert pdfjs.get_pdfjs_res('web/test') == b'content'
for path in ['/usr/share/pdf.js/',
str(tmpdir / 'data' / 'pdfjs'),
# hardcoded for --temp-basedir
os.path.expanduser('~/.local/share/qutebrowser/pdfjs/')]:
read_system_mock.assert_any_call(path, ['web/test', 'test'])
def test_get_pdfjs_res_not_found(self, read_system_mock, read_file_mock,
caplog):
read_system_mock.return_value = (None, None)
read_file_mock.side_effect = FileNotFoundError
with pytest.raises(pdfjs.PDFJSNotFound,
match="Path 'web/test' not found"):
pdfjs.get_pdfjs_res_and_path('web/test')
assert not caplog.records
def test_get_pdfjs_res_oserror(self, read_system_mock, read_file_mock,
caplog):
read_system_mock.return_value = (None, None)
read_file_mock.side_effect = OSError("Message")
with caplog.at_level(logging.WARNING):
with pytest.raises(pdfjs.PDFJSNotFound,
match="Path 'web/test' not found"):
pdfjs.get_pdfjs_res_and_path('web/test')
expected = 'OSError while reading PDF.js file: Message'
assert caplog.messages == [expected]
@pytest.mark.parametrize('path, expected', [
('web/viewer.js', 'viewer.js'),
('build/locale/foo.bar', 'locale/foo.bar'),
('viewer.js', 'viewer.js'),
('foo/viewer.css', 'foo/viewer.css'),
])
def test_remove_prefix(path, expected):
assert pdfjs._remove_prefix(path) == expected
@pytest.mark.parametrize('names, expected_name', [
(['one'], 'one'),
(['doesnotexist', 'two'], 'two'),
(['one', 'two'], 'one'),
(['does', 'not', 'onexist'], None),
])
def test_read_from_system(names, expected_name, tmpdir):
file1 = tmpdir / 'one'
file1.write_text('text1', encoding='ascii')
file2 = tmpdir / 'two'
file2.write_text('text2', encoding='ascii')
if expected_name == 'one':
expected = (b'text1', str(file1))
elif expected_name == 'two':
expected = (b'text2', str(file2))
elif expected_name is None:
expected = (None, None)
assert pdfjs._read_from_system(str(tmpdir), names) == expected
@pytest.fixture
def unreadable_file(tmpdir):
unreadable_file = tmpdir / 'unreadable'
unreadable_file.ensure()
unreadable_file.chmod(0)
if os.access(str(unreadable_file), os.R_OK):
# Docker container or similar
pytest.skip("File was still readable")
yield unreadable_file
unreadable_file.chmod(0o755)
def test_read_from_system_oserror(tmpdir, caplog, unreadable_file):
expected = (None, None)
with caplog.at_level(logging.WARNING):
assert pdfjs._read_from_system(str(tmpdir), ['unreadable']) == expected
assert len(caplog.records) == 1
message = caplog.messages[0]
assert message.startswith('OSError while reading PDF.js file:')
@pytest.mark.parametrize('available', [True, False])
def test_is_available(available, mocker):
mock = mocker.patch.object(pdfjs, 'get_pdfjs_res', autospec=True)
if available:
mock.return_value = b'foo'
else:
mock.side_effect = pdfjs.PDFJSNotFound('build/pdf.js')
assert pdfjs.is_available() == available
@pytest.mark.parametrize('mimetype, url, enabled, expected', [
# PDF files
('application/pdf', 'http://www.example.com', True, True),
('application/x-pdf', 'http://www.example.com', True, True),
# Not a PDF
('application/octet-stream', 'http://www.example.com', True, False),
# PDF.js disabled
('application/pdf', 'http://www.example.com', False, False),
# Download button in PDF.js
('application/pdf', 'blob:qute%3A///b45250b3', True, False),
])
def test_should_use_pdfjs(mimetype, url, enabled, expected, config_stub):
config_stub.val.content.pdfjs = enabled
assert pdfjs.should_use_pdfjs(mimetype, QUrl(url)) == expected
@pytest.mark.parametrize('url, expected', [
('http://example.com', True),
('http://example.org', False),
])
def test_should_use_pdfjs_url_pattern(config_stub, url, expected):
config_stub.val.content.pdfjs = False
pattern = urlmatch.UrlPattern('http://example.com')
config_stub.set_obj('content.pdfjs', True, pattern=pattern)
assert pdfjs.should_use_pdfjs('application/pdf', QUrl(url)) == expected
def test_get_main_url():
expected = QUrl('qute://pdfjs/web/viewer.html?filename=hello?world.pdf&'
'file=&source=http://a.com/hello?world.pdf')
original_url = QUrl('http://a.com/hello?world.pdf')
assert pdfjs.get_main_url('hello?world.pdf', original_url) == expected
|
from sqlalchemy.orm import relationship
from sqlalchemy import Boolean, Column, Integer, String, Text, ForeignKey
from lemur.database import db
from lemur.utils import Vault
from lemur.models import (
roles_users,
roles_authorities,
roles_certificates,
pending_cert_role_associations,
)
class Role(db.Model):
__tablename__ = "roles"
id = Column(Integer, primary_key=True)
name = Column(String(128), unique=True)
username = Column(String(128))
password = Column(Vault)
description = Column(Text)
authority_id = Column(Integer, ForeignKey("authorities.id"))
authorities = relationship(
"Authority",
secondary=roles_authorities,
passive_deletes=True,
backref="role",
cascade="all,delete",
)
user_id = Column(Integer, ForeignKey("users.id"))
third_party = Column(Boolean)
users = relationship(
"User", secondary=roles_users, passive_deletes=True, backref="role"
)
certificates = relationship(
"Certificate", secondary=roles_certificates, backref="role"
)
pending_certificates = relationship(
"PendingCertificate", secondary=pending_cert_role_associations, backref="role"
)
sensitive_fields = ("password",)
def __repr__(self):
return "Role(name={name})".format(name=self.name)
|
from __future__ import division
import math
import numpy as np
import unittest
from chainercv.utils import testing
from chainercv.utils import tile_images
@testing.parameterize(*testing.product({
'fill': [128, (104, 117, 123), np.random.uniform(255, size=(3, 1, 1))],
'pad': [0, 1, 2, 3, (3, 5), (5, 2)]
}))
class TestTileImages(unittest.TestCase):
def test_tile_images(self):
B = np.random.randint(10, 20)
n_col = np.random.randint(2, 5)
H = 30
W = 40
imgs = np.random.uniform(255, size=(B, 3, H, W))
tile = tile_images(imgs, n_col, self.pad, fill=self.fill)
if isinstance(self.pad, int):
pad_y = self.pad
pad_x = self.pad
else:
pad_y, pad_x = self.pad
n_row = int(math.ceil(B / n_col))
self.assertTrue(n_col >= 1 and n_row >= 1)
start_y_11 = H + pad_y + pad_y // 2
start_x_11 = W + pad_x + pad_x // 2
tile_11 = tile[:,
start_y_11:start_y_11 + H,
start_x_11:start_x_11 + W]
np.testing.assert_equal(tile_11, imgs[(n_col - 1) + 2])
testing.run_module(__name__, __file__)
|
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN, SwitchEntity
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .const import SIGNAL_ADD_ENTITIES
from .insteon_entity import InsteonEntity
from .utils import async_add_insteon_entities
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Insteon switches from a config entry."""
def add_entities(discovery_info=None):
"""Add the Insteon entities for the platform."""
async_add_insteon_entities(
hass, SWITCH_DOMAIN, InsteonSwitchEntity, async_add_entities, discovery_info
)
signal = f"{SIGNAL_ADD_ENTITIES}_{SWITCH_DOMAIN}"
async_dispatcher_connect(hass, signal, add_entities)
add_entities()
class InsteonSwitchEntity(InsteonEntity, SwitchEntity):
"""A Class for an Insteon switch entity."""
@property
def is_on(self):
"""Return the boolean response if the node is on."""
return bool(self._insteon_device_group.value)
async def async_turn_on(self, **kwargs):
"""Turn switch on."""
await self._insteon_device.async_on(group=self._insteon_device_group.group)
async def async_turn_off(self, **kwargs):
"""Turn switch off."""
await self._insteon_device.async_off(group=self._insteon_device_group.group)
|
import glob
colour_file = glob.glob('/sys/bus/hid/drivers/razerkbd/0*/matrix_custom_frame')[0]
custom_mode = glob.glob('/sys/bus/hid/drivers/razerkbd/0*/matrix_effect_custom')[0]
def clear_row(row_num):
result = bytes([row_num, 0x00, 0x15]) # Results in b'\x00', b'\x01' ...
for i in range(0, 22):
result += b'\x00\x00\x00'
return result
def gen_row(row_num):
result = bytes([row_num, 0, 0x15]) # Results in b'\x00', b'\x01' ...
for i in range(0, 22):
for j in range(0, 22):
if j == i:
result += b'\x00\xFF\x00'
else:
result += b'\x00\x00\x00'
yield result
# Reset result
result = bytes([row_num, 0x00, 0x15])
def write_binarystr(filename, binary_str):
with open(filename, 'wb') as bin_file:
bin_file.write(binary_str)
for i in range(0, 6):
write_binarystr(colour_file, clear_row(i))
write_binarystr(custom_mode, bytes('1', 'ascii'))
for i in range(0, 6):
for key_id, byte_str in enumerate(gen_row(i)):
write_binarystr(colour_file, byte_str)
write_binarystr(custom_mode, bytes('1', 'ascii'))
print("ROW {0}:{1}".format(i, key_id))
input()
write_binarystr(colour_file, clear_row(i))
write_binarystr(custom_mode, bytes('1', 'ascii'))
|
import os
import os.path
import sys
import subprocess
sys.path.insert(0, os.path.join(os.path.dirname(__file__), os.pardir,
os.pardir))
from scripts import utils
def main():
"""Main entry point.
Return:
The pylint exit status.
"""
utils.change_cwd()
files = []
for dirpath, _dirnames, filenames in os.walk('tests'):
for fn in filenames:
if os.path.splitext(fn)[1] == '.py':
files.append(os.path.join(dirpath, fn))
disabled = [
# pytest fixtures
'redefined-outer-name',
'unused-argument',
'too-many-arguments',
# things which are okay in tests
'missing-docstring',
'protected-access',
'len-as-condition',
'compare-to-empty-string',
# directories without __init__.py...
'import-error',
]
toxinidir = sys.argv[1]
pythonpath = os.environ.get('PYTHONPATH', '').split(os.pathsep) + [
toxinidir,
]
args = [
'--disable={}'.format(','.join(disabled)),
'--ignored-modules=helpers,pytest,PyQt5',
r'--ignore-long-lines=(<?https?://|^# Copyright 201\d)|^ *def [a-z]',
r'--method-rgx=[a-z_][A-Za-z0-9_]{1,100}$',
] + sys.argv[2:] + files
env = os.environ.copy()
env['PYTHONPATH'] = os.pathsep.join(pythonpath)
ret = subprocess.run(['pylint'] + args, env=env, check=False).returncode
return ret
if __name__ == '__main__':
sys.exit(main())
|
import unittest
import numpy as np
import PIL
from chainer import testing
from chainercv.transforms import scale
@testing.parameterize(*testing.product_dict(
[
{'in_shape': (3, 24, 16), 'size': 8,
'fit_short': True, 'out_shape': (3, 12, 8)},
{'in_shape': (3, 16, 24), 'size': 8,
'fit_short': True, 'out_shape': (3, 8, 12)},
{'in_shape': (3, 16, 24), 'size': 24,
'fit_short': True, 'out_shape': (3, 24, 36)},
{'in_shape': (3, 24, 16), 'size': 36,
'fit_short': False, 'out_shape': (3, 36, 24)},
{'in_shape': (3, 16, 24), 'size': 36,
'fit_short': False, 'out_shape': (3, 24, 36)},
{'in_shape': (3, 24, 12), 'size': 12,
'fit_short': False, 'out_shape': (3, 12, 6)},
# grayscale
{'in_shape': (1, 16, 24), 'size': 8,
'fit_short': True, 'out_shape': (1, 8, 12)},
{'in_shape': (1, 16, 24), 'size': 36,
'fit_short': False, 'out_shape': (1, 24, 36)},
],
[
{'interpolation': PIL.Image.NEAREST},
{'interpolation': PIL.Image.BILINEAR},
{'interpolation': PIL.Image.BICUBIC},
{'interpolation': PIL.Image.LANCZOS},
]
))
class TestScale(unittest.TestCase):
def test_scale(self):
img = np.random.uniform(size=self.in_shape)
out = scale(img, self.size, fit_short=self.fit_short,
interpolation=self.interpolation)
self.assertEqual(out.shape, self.out_shape)
@testing.parameterize(*testing.product_dict(
[
{'in_shape': (3, 24, 16), 'size': 16, 'fit_short': True},
{'in_shape': (3, 16, 24), 'size': 16, 'fit_short': True},
{'in_shape': (3, 24, 16), 'size': 24, 'fit_short': False},
{'in_shape': (3, 16, 24), 'size': 24, 'fit_short': False},
# grayscale
{'in_shape': (1, 16, 24), 'size': 24, 'fit_short': False},
],
[
{'interpolation': PIL.Image.NEAREST},
{'interpolation': PIL.Image.BILINEAR},
{'interpolation': PIL.Image.BICUBIC},
{'interpolation': PIL.Image.LANCZOS},
]
))
class TestScaleNoResize(unittest.TestCase):
def test_scale_no_resize(self):
img = np.random.uniform(size=self.in_shape)
out = scale(img, self.size, fit_short=self.fit_short,
interpolation=self.interpolation)
self.assertIs(img, out)
testing.run_module(__name__, __file__)
|
import asyncio
import json
from pathlib import Path
import pytest
from smart_meter_texas.const import (
AUTH_ENDPOINT,
BASE_ENDPOINT,
BASE_URL,
LATEST_OD_READ_ENDPOINT,
METER_ENDPOINT,
OD_READ_ENDPOINT,
)
from homeassistant.components.homeassistant import (
DOMAIN as HA_DOMAIN,
SERVICE_UPDATE_ENTITY,
)
from homeassistant.components.smart_meter_texas.const import DOMAIN
from homeassistant.const import ATTR_ENTITY_ID
from homeassistant.setup import async_setup_component
from tests.common import MockConfigEntry, load_fixture
TEST_ENTITY_ID = "sensor.electric_meter_123456789"
def load_smt_fixture(name):
"""Return a dict of the json fixture."""
json_fixture = load_fixture(Path() / DOMAIN / f"{name}.json")
return json.loads(json_fixture)
async def setup_integration(hass, config_entry, aioclient_mock, **kwargs):
"""Initialize the Smart Meter Texas integration for testing."""
mock_connection(aioclient_mock, **kwargs)
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
async def refresh_data(hass, config_entry, aioclient_mock):
"""Request a DataUpdateCoordinator refresh."""
mock_connection(aioclient_mock)
await async_setup_component(hass, HA_DOMAIN, {})
await hass.services.async_call(
HA_DOMAIN,
SERVICE_UPDATE_ENTITY,
{ATTR_ENTITY_ID: TEST_ENTITY_ID},
blocking=True,
)
await hass.async_block_till_done()
def mock_connection(
aioclient_mock, auth_fail=False, auth_timeout=False, bad_reading=False
):
"""Mock all calls to the API."""
aioclient_mock.get(BASE_URL)
auth_endpoint = f"{BASE_ENDPOINT}{AUTH_ENDPOINT}"
if not auth_fail and not auth_timeout:
aioclient_mock.post(
auth_endpoint,
json={"token": "token123"},
)
elif auth_fail:
aioclient_mock.post(
auth_endpoint,
status=400,
json={"errormessage": "ERR-USR-INVALIDPASSWORDERROR"},
)
else: # auth_timeout
aioclient_mock.post(auth_endpoint, exc=asyncio.TimeoutError)
aioclient_mock.post(
f"{BASE_ENDPOINT}{METER_ENDPOINT}",
json=load_smt_fixture("meter"),
)
aioclient_mock.post(f"{BASE_ENDPOINT}{OD_READ_ENDPOINT}", json={"data": None})
if not bad_reading:
aioclient_mock.post(
f"{BASE_ENDPOINT}{LATEST_OD_READ_ENDPOINT}",
json=load_smt_fixture("latestodrread"),
)
else:
aioclient_mock.post(
f"{BASE_ENDPOINT}{LATEST_OD_READ_ENDPOINT}",
json={},
)
@pytest.fixture(name="config_entry")
def mock_config_entry(hass):
"""Return a mock config entry."""
config_entry = MockConfigEntry(
domain=DOMAIN,
unique_id="user123",
data={"username": "user123", "password": "password123"},
)
config_entry.add_to_hass(hass)
return config_entry
|
import logging
import requests
import voluptuous as vol
from homeassistant.components.notify import (
ATTR_MESSAGE,
ATTR_TARGET,
ATTR_TITLE,
ATTR_TITLE_DEFAULT,
PLATFORM_SCHEMA,
BaseNotificationService,
)
from homeassistant.const import (
CONF_AUTHENTICATION,
CONF_HEADERS,
CONF_METHOD,
CONF_NAME,
CONF_PASSWORD,
CONF_RESOURCE,
CONF_USERNAME,
CONF_VERIFY_SSL,
HTTP_BAD_REQUEST,
HTTP_BASIC_AUTHENTICATION,
HTTP_DIGEST_AUTHENTICATION,
HTTP_INTERNAL_SERVER_ERROR,
HTTP_OK,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.reload import setup_reload_service
from . import DOMAIN, PLATFORMS
CONF_DATA = "data"
CONF_DATA_TEMPLATE = "data_template"
CONF_MESSAGE_PARAMETER_NAME = "message_param_name"
CONF_TARGET_PARAMETER_NAME = "target_param_name"
CONF_TITLE_PARAMETER_NAME = "title_param_name"
DEFAULT_MESSAGE_PARAM_NAME = "message"
DEFAULT_METHOD = "GET"
DEFAULT_VERIFY_SSL = True
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_RESOURCE): cv.url,
vol.Optional(
CONF_MESSAGE_PARAMETER_NAME, default=DEFAULT_MESSAGE_PARAM_NAME
): cv.string,
vol.Optional(CONF_METHOD, default=DEFAULT_METHOD): vol.In(
["POST", "GET", "POST_JSON"]
),
vol.Optional(CONF_HEADERS): vol.Schema({cv.string: cv.string}),
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_TARGET_PARAMETER_NAME): cv.string,
vol.Optional(CONF_TITLE_PARAMETER_NAME): cv.string,
vol.Optional(CONF_DATA): dict,
vol.Optional(CONF_DATA_TEMPLATE): {cv.match_all: cv.template_complex},
vol.Optional(CONF_AUTHENTICATION): vol.In(
[HTTP_BASIC_AUTHENTICATION, HTTP_DIGEST_AUTHENTICATION]
),
vol.Optional(CONF_PASSWORD): cv.string,
vol.Optional(CONF_USERNAME): cv.string,
vol.Optional(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL): cv.boolean,
}
)
_LOGGER = logging.getLogger(__name__)
def get_service(hass, config, discovery_info=None):
"""Get the RESTful notification service."""
setup_reload_service(hass, DOMAIN, PLATFORMS)
resource = config.get(CONF_RESOURCE)
method = config.get(CONF_METHOD)
headers = config.get(CONF_HEADERS)
message_param_name = config.get(CONF_MESSAGE_PARAMETER_NAME)
title_param_name = config.get(CONF_TITLE_PARAMETER_NAME)
target_param_name = config.get(CONF_TARGET_PARAMETER_NAME)
data = config.get(CONF_DATA)
data_template = config.get(CONF_DATA_TEMPLATE)
username = config.get(CONF_USERNAME)
password = config.get(CONF_PASSWORD)
verify_ssl = config.get(CONF_VERIFY_SSL)
if username and password:
if config.get(CONF_AUTHENTICATION) == HTTP_DIGEST_AUTHENTICATION:
auth = requests.auth.HTTPDigestAuth(username, password)
else:
auth = requests.auth.HTTPBasicAuth(username, password)
else:
auth = None
return RestNotificationService(
hass,
resource,
method,
headers,
message_param_name,
title_param_name,
target_param_name,
data,
data_template,
auth,
verify_ssl,
)
class RestNotificationService(BaseNotificationService):
"""Implementation of a notification service for REST."""
def __init__(
self,
hass,
resource,
method,
headers,
message_param_name,
title_param_name,
target_param_name,
data,
data_template,
auth,
verify_ssl,
):
"""Initialize the service."""
self._resource = resource
self._hass = hass
self._method = method.upper()
self._headers = headers
self._message_param_name = message_param_name
self._title_param_name = title_param_name
self._target_param_name = target_param_name
self._data = data
self._data_template = data_template
self._auth = auth
self._verify_ssl = verify_ssl
def send_message(self, message="", **kwargs):
"""Send a message to a user."""
data = {self._message_param_name: message}
if self._title_param_name is not None:
data[self._title_param_name] = kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT)
if self._target_param_name is not None and ATTR_TARGET in kwargs:
# Target is a list as of 0.29 and we don't want to break existing
# integrations, so just return the first target in the list.
data[self._target_param_name] = kwargs[ATTR_TARGET][0]
if self._data:
data.update(self._data)
elif self._data_template:
kwargs[ATTR_MESSAGE] = message
def _data_template_creator(value):
"""Recursive template creator helper function."""
if isinstance(value, list):
return [_data_template_creator(item) for item in value]
if isinstance(value, dict):
return {
key: _data_template_creator(item) for key, item in value.items()
}
value.hass = self._hass
return value.async_render(kwargs, parse_result=False)
data.update(_data_template_creator(self._data_template))
if self._method == "POST":
response = requests.post(
self._resource,
headers=self._headers,
data=data,
timeout=10,
auth=self._auth,
verify=self._verify_ssl,
)
elif self._method == "POST_JSON":
response = requests.post(
self._resource,
headers=self._headers,
json=data,
timeout=10,
auth=self._auth,
verify=self._verify_ssl,
)
else: # default GET
response = requests.get(
self._resource,
headers=self._headers,
params=data,
timeout=10,
auth=self._auth,
verify=self._verify_ssl,
)
if (
response.status_code >= HTTP_INTERNAL_SERVER_ERROR
and response.status_code < 600
):
_LOGGER.exception(
"Server error. Response %d: %s:", response.status_code, response.reason
)
elif (
response.status_code >= HTTP_BAD_REQUEST
and response.status_code < HTTP_INTERNAL_SERVER_ERROR
):
_LOGGER.exception(
"Client error. Response %d: %s:", response.status_code, response.reason
)
elif response.status_code >= HTTP_OK and response.status_code < 300:
_LOGGER.debug(
"Success. Response %d: %s:", response.status_code, response.reason
)
else:
_LOGGER.debug("Response %d: %s:", response.status_code, response.reason)
|
from __future__ import division
import numpy as np
import chainer
import chainer.functions as F
from chainer import initializers
import chainer.links as L
from chainercv.links import Conv2DBNActiv
from chainercv.links.model.resnet import ResBlock
from chainercv.links import PickableSequentialChain
from chainercv import utils
# RGB order
# This is channel wise mean of mean image distributed at
# https://github.com/KaimingHe/deep-residual-networks
_imagenet_mean = np.array(
[123.15163084, 115.90288257, 103.0626238],
dtype=np.float32)[:, np.newaxis, np.newaxis]
class SEResNeXt(PickableSequentialChain):
"""Base class for SE-ResNeXt architecture.
ResNeXt is a ResNet-based architecture, where grouped convolution is
adopted to the second convolution layer of each bottleneck block.
In addition, a squeeze-and-excitation block is applied at the end of
each non-identity branch of residual block. Please refer to `Aggregated
Residual Transformations for Deep Neural Networks
<https://arxiv.org/pdf/1611.05431.pdf>`_ and `Squeeze-and-Excitation
Networks <https://arxiv.org/pdf/1709.01507.pdf>`_ for detailed
description of network architecture.
Similar to :class:`chainercv.links.model.resnet.ResNet`, ImageNet
pretrained weights are downloaded when :obj:`pretrained_model` argument
is :obj:`imagenet`, originally distributed at `the Github repository by
one of the paper authors of SENet <https://github.com/hujie-frank/SENet>`_.
.. seealso::
:class:`chainercv.links.model.resnet.ResNet`
:class:`chainercv.links.model.senet.SEResNet`
:class:`chainercv.links.connection.SEBlock`
Args:
n_layer (int): The number of layers.
n_class (int): The number of classes. If :obj:`None`,
the default values are used.
If a supported pretrained model is used,
the number of classes used to train the pretrained model
is used. Otherwise, the number of classes in ILSVRC 2012 dataset
is used.
pretrained_model (string): The destination of the pre-trained
chainer model serialized as a :obj:`.npz` file.
If this is one of the strings described
above, it automatically loads weights stored under a directory
:obj:`$CHAINER_DATASET_ROOT/pfnet/chainercv/models/`,
where :obj:`$CHAINER_DATASET_ROOT` is set as
:obj:`$HOME/.chainer/dataset` unless you specify another value
by modifying the environment variable.
mean (numpy.ndarray): A mean value. If :obj:`None`,
the default values are used.
If a supported pretrained model is used,
the mean value used to train the pretrained model is used.
Otherwise, the mean value calculated from ILSVRC 2012 dataset
is used.
initialW (callable): Initializer for the weights of
convolution kernels.
fc_kwargs (dict): Keyword arguments passed to initialize
the :class:`chainer.links.Linear`.
"""
_blocks = {
50: [3, 4, 6, 3],
101: [3, 4, 23, 3],
}
_models = {
50: {
'imagenet': {
'param': {'n_class': 1000, 'mean': _imagenet_mean},
'overwritable': {'mean'},
'url': 'https://chainercv-models.preferred.jp/'
'se_resnext50_imagenet_converted_2018_06_28.npz'
},
},
101: {
'imagenet': {
'param': {'n_class': 1000, 'mean': _imagenet_mean},
'overwritable': {'mean'},
'url': 'https://chainercv-models.preferred.jp/'
'se_resnext101_imagenet_converted_2018_06_28.npz'
},
},
}
def __init__(self, n_layer,
n_class=None,
pretrained_model=None,
mean=None, initialW=None, fc_kwargs={}):
blocks = self._blocks[n_layer]
param, path = utils.prepare_pretrained_model(
{'n_class': n_class, 'mean': mean},
pretrained_model, self._models[n_layer],
{'n_class': 1000, 'mean': _imagenet_mean})
self.mean = param['mean']
if initialW is None:
initialW = initializers.HeNormal(scale=1., fan_option='fan_out')
if 'initialW' not in fc_kwargs:
fc_kwargs['initialW'] = initializers.Normal(scale=0.01)
if pretrained_model:
# As a sampling process is time-consuming,
# we employ a zero initializer for faster computation.
initialW = initializers.constant.Zero()
fc_kwargs['initialW'] = initializers.constant.Zero()
kwargs = {
'groups': 32, 'initialW': initialW, 'stride_first': False,
'add_seblock': True}
super(SEResNeXt, self).__init__()
with self.init_scope():
self.conv1 = Conv2DBNActiv(None, 64, 7, 2, 3, nobias=True,
initialW=initialW)
self.pool1 = lambda x: F.max_pooling_2d(x, ksize=3, stride=2)
self.res2 = ResBlock(blocks[0], None, 128, 256, 1, **kwargs)
self.res3 = ResBlock(blocks[1], None, 256, 512, 2, **kwargs)
self.res4 = ResBlock(blocks[2], None, 512, 1024, 2, **kwargs)
self.res5 = ResBlock(blocks[3], None, 1024, 2048, 2, **kwargs)
self.pool5 = lambda x: F.average(x, axis=(2, 3))
self.fc6 = L.Linear(None, param['n_class'], **fc_kwargs)
self.prob = F.softmax
if path:
chainer.serializers.load_npz(path, self)
class SEResNeXt50(SEResNeXt):
"""SE-ResNeXt-50 Network
Please consult the documentation for :class:`SEResNeXt`.
.. seealso::
:class:`chainercv.links.model.senet.SEResNeXt`
"""
def __init__(self, n_class=None, pretrained_model=None,
mean=None, initialW=None, fc_kwargs={}):
super(SEResNeXt50, self).__init__(
50, n_class, pretrained_model,
mean, initialW, fc_kwargs)
class SEResNeXt101(SEResNeXt):
"""SE-ResNeXt-101 Network
Please consult the documentation for :class:`SEResNeXt`.
.. seealso::
:class:`chainercv.links.model.senet.SEResNeXt`
"""
def __init__(self, n_class=None, pretrained_model=None,
mean=None, initialW=None, fc_kwargs={}):
super(SEResNeXt101, self).__init__(
101, n_class, pretrained_model,
mean, initialW, fc_kwargs)
|
import pytest
from homeassistant.components.shopping_list import intent as sl_intent
from tests.async_mock import patch
from tests.common import MockConfigEntry
@pytest.fixture(autouse=True)
def mock_shopping_list_io():
"""Stub out the persistence."""
with patch("homeassistant.components.shopping_list.ShoppingData.save"), patch(
"homeassistant.components.shopping_list.ShoppingData.async_load"
):
yield
@pytest.fixture
async def sl_setup(hass):
"""Set up the shopping list."""
entry = MockConfigEntry(domain="shopping_list")
entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(entry.entry_id)
await sl_intent.async_setup_intents(hass)
|
from collections import namedtuple
import itertools
import networkx as nx
import numpy as np
from tqdm import tqdm
from pgmpy.factors import factor_product
from pgmpy.inference import Inference
from pgmpy.models import BayesianModel, MarkovChain, MarkovModel
from pgmpy.utils.mathext import sample_discrete
from pgmpy.sampling import _return_samples
from pgmpy.global_vars import SHOW_PROGRESS
State = namedtuple("State", ["var", "state"])
class BayesianModelSampling(Inference):
"""
Class for sampling methods specific to Bayesian Models
Parameters
----------
model: instance of BayesianModel
model on which inference queries will be computed
"""
def __init__(self, model):
if not isinstance(model, BayesianModel):
raise TypeError(
"Model expected type: BayesianModel, got type: ", type(model)
)
self.topological_order = list(nx.topological_sort(model))
super(BayesianModelSampling, self).__init__(model)
def forward_sample(
self, size=1, return_type="dataframe", seed=None, show_progress=True
):
"""
Generates sample(s) from joint distribution of the bayesian network.
Parameters
----------
size: int
size of sample to be generated
return_type: string (dataframe | recarray)
Return type for samples, either of 'dataframe' or 'recarray'.
Defaults to 'dataframe'
Returns
-------
sampled: A pandas.DataFrame or a numpy.recarray object depending upon return_type argument
the generated samples
Examples
--------
>>> from pgmpy.models.BayesianModel import BayesianModel
>>> from pgmpy.factors.discrete import TabularCPD
>>> from pgmpy.sampling import BayesianModelSampling
>>> student = BayesianModel([('diff', 'grade'), ('intel', 'grade')])
>>> cpd_d = TabularCPD('diff', 2, [[0.6], [0.4]])
>>> cpd_i = TabularCPD('intel', 2, [[0.7], [0.3]])
>>> cpd_g = TabularCPD('grade', 3, [[0.3, 0.05, 0.9, 0.5], [0.4, 0.25,
... 0.08, 0.3], [0.3, 0.7, 0.02, 0.2]],
... ['intel', 'diff'], [2, 2])
>>> student.add_cpds(cpd_d, cpd_i, cpd_g)
>>> inference = BayesianModelSampling(student)
>>> inference.forward_sample(size=2, return_type='recarray')
rec.array([(0, 0, 1), (1, 0, 2)], dtype=
[('diff', '<i8'), ('intel', '<i8'), ('grade', '<i8')])
"""
types = [(var_name, "int") for var_name in self.topological_order]
sampled = np.zeros(size, dtype=types).view(np.recarray)
if show_progress and SHOW_PROGRESS:
pbar = tqdm(self.topological_order)
else:
pbar = self.topological_order
for node in pbar:
if show_progress and SHOW_PROGRESS:
pbar.set_description(f"Generating for node: {node}")
cpd = self.model.get_cpds(node)
states = range(self.cardinality[node])
evidence = cpd.variables[:0:-1]
if evidence:
cached_values = self.pre_compute_reduce(variable=node)
evidence = np.vstack([sampled[i] for i in evidence])
weights = list(map(lambda t: cached_values[tuple(t)], evidence.T))
else:
weights = cpd.values
sampled[node] = sample_discrete(states, weights, size, seed=seed)
return _return_samples(return_type, sampled, self.state_names_map)
def pre_compute_reduce(self, variable):
variable_cpd = self.model.get_cpds(variable)
variable_evid = variable_cpd.variables[:0:-1]
cached_values = {}
for state_combination in itertools.product(
*[range(self.cardinality[var]) for var in variable_evid]
):
states = list(zip(variable_evid, state_combination))
cached_values[state_combination] = variable_cpd.reduce(
states, inplace=False
).values
return cached_values
def rejection_sample(
self,
evidence=[],
size=1,
return_type="dataframe",
seed=None,
show_progress=True,
):
"""
Generates sample(s) from joint distribution of the bayesian network,
given the evidence.
Parameters
----------
evidence: list of `pgmpy.factor.State` namedtuples
None if no evidence
size: int
size of sample to be generated
return_type: string (dataframe | recarray)
Return type for samples, either of 'dataframe' or 'recarray'.
Defaults to 'dataframe'
Returns
-------
sampled: A pandas.DataFrame or a numpy.recarray object depending upon return_type argument
the generated samples
Examples
--------
>>> from pgmpy.models.BayesianModel import BayesianModel
>>> from pgmpy.factors.discrete import TabularCPD
>>> from pgmpy.factors.discrete import State
>>> from pgmpy.sampling import BayesianModelSampling
>>> student = BayesianModel([('diff', 'grade'), ('intel', 'grade')])
>>> cpd_d = TabularCPD('diff', 2, [[0.6], [0.4]])
>>> cpd_i = TabularCPD('intel', 2, [[0.7], [0.3]])
>>> cpd_g = TabularCPD('grade', 3, [[0.3, 0.05, 0.9, 0.5], [0.4, 0.25,
... 0.08, 0.3], [0.3, 0.7, 0.02, 0.2]],
... ['intel', 'diff'], [2, 2])
>>> student.add_cpds(cpd_d, cpd_i, cpd_g)
>>> inference = BayesianModelSampling(student)
>>> evidence = [State(var='diff', state=0)]
>>> inference.rejection_sample(evidence=evidence, size=2, return_type='dataframe')
intel diff grade
0 0 0 1
1 0 0 1
"""
# Covert evidence state names to number
evidence = [
(var, self.model.get_cpds(var).get_state_no(var, state))
for var, state in evidence
]
# If no evidence is given, it is equivalent to forward sampling.
if len(evidence) == 0:
return self.forward_sample(size, seed=seed)
# Setup array to be returned
types = [(var_name, "int") for var_name in self.topological_order]
sampled = np.zeros(0, dtype=types).view(np.recarray)
prob = 1
i = 0
# Do the sampling by generating samples from forward sampling and rejecting the
# samples which do not match our evidence. Keep doing until we have enough
# samples.
if show_progress and SHOW_PROGRESS:
pbar = tqdm(total=size)
while i < size:
_size = int(((size - i) / prob) * 1.5)
_sampled = self.forward_sample(_size, "recarray", seed=seed)
for evid in evidence:
_sampled = _sampled[_sampled[evid[0]] == evid[1]]
prob = max(len(_sampled) / _size, 0.01)
sampled = np.append(sampled, _sampled)[:size]
i += len(_sampled)
if show_progress and SHOW_PROGRESS:
pbar.update(len(_sampled))
if show_progress and SHOW_PROGRESS:
pbar.close()
# Post process: Correct return type and replace state numbers with names.
return _return_samples(return_type, sampled, self.state_names_map)
def likelihood_weighted_sample(
self, evidence=[], size=1, return_type="dataframe", seed=None
):
"""
Generates weighted sample(s) from joint distribution of the bayesian
network, that comply with the given evidence.
'Probabilistic Graphical Model Principles and Techniques', Koller and
Friedman, Algorithm 12.2 pp 493.
Parameters
----------
evidence: list of `pgmpy.factor.State` namedtuples
None if no evidence
size: int
size of sample to be generated
return_type: string (dataframe | recarray)
Return type for samples, either of 'dataframe' or 'recarray'.
Defaults to 'dataframe'
Returns
-------
sampled: A pandas.DataFrame or a numpy.recarray object depending upon return_type argument
the generated samples with corresponding weights
Examples
--------
>>> from pgmpy.factors.discrete import State
>>> from pgmpy.models.BayesianModel import BayesianModel
>>> from pgmpy.factors.discrete import TabularCPD
>>> from pgmpy.sampling import BayesianModelSampling
>>> student = BayesianModel([('diff', 'grade'), ('intel', 'grade')])
>>> cpd_d = TabularCPD('diff', 2, [[0.6], [0.4]])
>>> cpd_i = TabularCPD('intel', 2, [[0.7], [0.3]])
>>> cpd_g = TabularCPD('grade', 3, [[0.3, 0.05, 0.9, 0.5], [0.4, 0.25,
... 0.08, 0.3], [0.3, 0.7, 0.02, 0.2]],
... ['intel', 'diff'], [2, 2])
>>> student.add_cpds(cpd_d, cpd_i, cpd_g)
>>> inference = BayesianModelSampling(student)
>>> evidence = [State('diff', 0)]
>>> inference.likelihood_weighted_sample(evidence=evidence, size=2, return_type='recarray')
rec.array([(0, 0, 1, 0.6), (0, 0, 2, 0.6)], dtype=
[('diff', '<i8'), ('intel', '<i8'), ('grade', '<i8'), ('_weight', '<f8')])
"""
# Covert evidence state names to number
evidence = [
(var, self.model.get_cpds(var).get_state_no(var, state))
for var, state in evidence
]
# Prepare the return array
types = [(var_name, "int") for var_name in self.topological_order]
types.append(("_weight", "float"))
sampled = np.zeros(size, dtype=types).view(np.recarray)
sampled["_weight"] = np.ones(size)
evidence_dict = {var: st for var, st in evidence}
# Do the sampling
for node in self.topological_order:
cpd = self.model.get_cpds(node)
states = range(self.cardinality[node])
evidence = cpd.get_evidence()
if evidence:
evidence_values = np.vstack([sampled[i] for i in evidence])
cached_values = self.pre_compute_reduce(node)
weights = list(
map(lambda t: cached_values[tuple(t)], evidence_values.T)
)
if node in evidence_dict:
sampled[node] = evidence_dict[node]
for i in range(size):
sampled["_weight"][i] *= weights[i][evidence_dict[node]]
else:
sampled[node] = sample_discrete(states, weights, size, seed=seed)
else:
if node in evidence_dict:
sampled[node] = evidence_dict[node]
for i in range(size):
sampled["_weight"][i] *= cpd.values[evidence_dict[node]]
else:
sampled[node] = sample_discrete(states, cpd.values, size, seed=seed)
# Postprocess the samples: Correct return type and change state numbers to names
return _return_samples(return_type, sampled, self.state_names_map)
class GibbsSampling(MarkovChain):
"""
Class for performing Gibbs sampling.
Parameters
----------
model: BayesianModel or MarkovModel
Model from which variables are inherited and transition probabilities computed.
Examples
--------
Initialization from a BayesianModel object:
>>> from pgmpy.factors.discrete import TabularCPD
>>> from pgmpy.models import BayesianModel
>>> intel_cpd = TabularCPD('intel', 2, [[0.7], [0.3]])
>>> sat_cpd = TabularCPD('sat', 2, [[0.95, 0.2], [0.05, 0.8]], evidence=['intel'], evidence_card=[2])
>>> student = BayesianModel()
>>> student.add_nodes_from(['intel', 'sat'])
>>> student.add_edge('intel', 'sat')
>>> student.add_cpds(intel_cpd, sat_cpd)
>>> from pgmpy.sampling import GibbsSampling
>>> gibbs_chain = GibbsSampling(student)
>>> gibbs_chain.sample(size=3)
intel sat
0 0 0
1 0 0
2 1 1
"""
def __init__(self, model=None):
super(GibbsSampling, self).__init__()
if isinstance(model, BayesianModel):
self._get_kernel_from_bayesian_model(model)
elif isinstance(model, MarkovModel):
self._get_kernel_from_markov_model(model)
def _get_kernel_from_bayesian_model(self, model):
"""
Computes the Gibbs transition models from a Bayesian Network.
'Probabilistic Graphical Model Principles and Techniques', Koller and
Friedman, Section 12.3.3 pp 512-513.
Parameters
----------
model: BayesianModel
The model from which probabilities will be computed.
"""
self.variables = np.array(model.nodes())
self.cardinalities = {
var: model.get_cpds(var).variable_card for var in self.variables
}
for var in self.variables:
other_vars = [v for v in self.variables if var != v]
other_cards = [self.cardinalities[v] for v in other_vars]
cpds = [cpd for cpd in model.cpds if var in cpd.scope()]
prod_cpd = factor_product(*cpds)
kernel = {}
scope = set(prod_cpd.scope())
for tup in itertools.product(*[range(card) for card in other_cards]):
states = [State(v, s) for v, s in zip(other_vars, tup) if v in scope]
prod_cpd_reduced = prod_cpd.to_factor().reduce(states, inplace=False)
kernel[tup] = prod_cpd_reduced.values / sum(prod_cpd_reduced.values)
self.transition_models[var] = kernel
def _get_kernel_from_markov_model(self, model):
"""
Computes the Gibbs transition models from a Markov Network.
'Probabilistic Graphical Model Principles and Techniques', Koller and
Friedman, Section 12.3.3 pp 512-513.
Parameters
----------
model: MarkovModel
The model from which probabilities will be computed.
"""
self.variables = np.array(model.nodes())
factors_dict = {var: [] for var in self.variables}
for factor in model.get_factors():
for var in factor.scope():
factors_dict[var].append(factor)
# Take factor product
factors_dict = {
var: factor_product(*factors) if len(factors) > 1 else factors[0]
for var, factors in factors_dict.items()
}
self.cardinalities = {
var: factors_dict[var].get_cardinality([var])[var] for var in self.variables
}
for var in self.variables:
other_vars = [v for v in self.variables if var != v]
other_cards = [self.cardinalities[v] for v in other_vars]
kernel = {}
factor = factors_dict[var]
scope = set(factor.scope())
for tup in itertools.product(*[range(card) for card in other_cards]):
states = [
State(first_var, s)
for first_var, s in zip(other_vars, tup)
if first_var in scope
]
reduced_factor = factor.reduce(states, inplace=False)
kernel[tup] = reduced_factor.values / sum(reduced_factor.values)
self.transition_models[var] = kernel
def sample(self, start_state=None, size=1, return_type="dataframe", seed=None):
"""
Sample from the Markov Chain.
Parameters
----------
start_state: dict or array-like iterable
Representing the starting states of the variables. If None is passed, a random start_state is chosen.
size: int
Number of samples to be generated.
return_type: string (dataframe | recarray)
Return type for samples, either of 'dataframe' or 'recarray'.
Defaults to 'dataframe'
Returns
-------
sampled: A pandas.DataFrame or a numpy.recarray object depending upon return_type argument
the generated samples
Examples
--------
>>> from pgmpy.factors import DiscreteFactor
>>> from pgmpy.sampling import GibbsSampling
>>> from pgmpy.models import MarkovModel
>>> model = MarkovModel([('A', 'B'), ('C', 'B')])
>>> factor_ab = DiscreteFactor(['A', 'B'], [2, 2], [1, 2, 3, 4])
>>> factor_cb = DiscreteFactor(['C', 'B'], [2, 2], [5, 6, 7, 8])
>>> model.add_factors(factor_ab, factor_cb)
>>> gibbs = GibbsSampling(model)
>>> gibbs.sample(size=4, return_tupe='dataframe')
A B C
0 0 1 1
1 1 0 0
2 1 1 0
3 1 1 1
"""
if start_state is None and self.state is None:
self.state = self.random_state()
elif start_state is not None:
self.set_start_state(start_state)
types = [(var_name, "int") for var_name in self.variables]
sampled = np.zeros(size, dtype=types).view(np.recarray)
sampled[0] = tuple(st for var, st in self.state)
for i in tqdm(range(size - 1)):
for j, (var, st) in enumerate(self.state):
other_st = tuple(st for v, st in self.state if var != v)
next_st = sample_discrete(
list(range(self.cardinalities[var])),
self.transition_models[var][other_st],
seed=seed,
)[0]
self.state[j] = State(var, next_st)
sampled[i + 1] = tuple(st for var, st in self.state)
return _return_samples(return_type, sampled)
def generate_sample(self, start_state=None, size=1, seed=None):
"""
Generator version of self.sample
Returns
-------
List of State namedtuples, representing the assignment to all variables of the model.
Examples
--------
>>> from pgmpy.factors.discrete import DiscreteFactor
>>> from pgmpy.sampling import GibbsSampling
>>> from pgmpy.models import MarkovModel
>>> model = MarkovModel([('A', 'B'), ('C', 'B')])
>>> factor_ab = DiscreteFactor(['A', 'B'], [2, 2], [1, 2, 3, 4])
>>> factor_cb = DiscreteFactor(['C', 'B'], [2, 2], [5, 6, 7, 8])
>>> model.add_factors(factor_ab, factor_cb)
>>> gibbs = GibbsSampling(model)
>>> gen = gibbs.generate_sample(size=2)
>>> [sample for sample in gen]
[[State(var='C', state=1), State(var='B', state=1), State(var='A', state=0)],
[State(var='C', state=0), State(var='B', state=1), State(var='A', state=1)]]
"""
if start_state is None and self.state is None:
self.state = self.random_state()
elif start_state is not None:
self.set_start_state(start_state)
for i in range(size):
for j, (var, st) in enumerate(self.state):
other_st = tuple(st for v, st in self.state if var != v)
next_st = sample_discrete(
list(range(self.cardinalities[var])),
self.transition_models[var][other_st],
seed=seed,
)[0]
self.state[j] = State(var, next_st)
yield self.state[:]
|
import os.path as op
from copy import deepcopy
from functools import partial
import pytest
import numpy as np
from scipy.io import savemat
from numpy.testing import assert_array_equal, assert_equal, assert_allclose
from mne.channels import (rename_channels, read_ch_adjacency, combine_channels,
find_ch_adjacency, make_1020_channel_selections,
read_custom_montage, equalize_channels)
from mne.channels.channels import (_ch_neighbor_adjacency,
_compute_ch_adjacency)
from mne.io import (read_info, read_raw_fif, read_raw_ctf, read_raw_bti,
read_raw_eeglab, read_raw_kit, RawArray)
from mne.io.constants import FIFF
from mne.utils import _TempDir, run_tests_if_main
from mne import (pick_types, pick_channels, EpochsArray, EvokedArray,
make_ad_hoc_cov, create_info, read_events, Epochs)
from mne.datasets import testing
io_dir = op.join(op.dirname(__file__), '..', '..', 'io')
base_dir = op.join(io_dir, 'tests', 'data')
raw_fname = op.join(base_dir, 'test_raw.fif')
eve_fname = op .join(base_dir, 'test-eve.fif')
fname_kit_157 = op.join(io_dir, 'kit', 'tests', 'data', 'test.sqd')
@pytest.mark.parametrize('preload', (True, False))
@pytest.mark.parametrize('proj', (True, False))
def test_reorder_channels(preload, proj):
"""Test reordering of channels."""
raw = read_raw_fif(raw_fname).crop(0, 0.1).del_proj()
if proj: # a no-op but should test it
raw._projector = np.eye(len(raw.ch_names))
if preload:
raw.load_data()
# with .reorder_channels
if proj and not preload:
with pytest.raises(RuntimeError, match='load data'):
raw.copy().reorder_channels(raw.ch_names[::-1])
return
raw_new = raw.copy().reorder_channels(raw.ch_names[::-1])
assert raw_new.ch_names == raw.ch_names[::-1]
if proj:
assert_allclose(raw_new._projector, raw._projector, atol=1e-12)
else:
assert raw._projector is None
assert raw_new._projector is None
assert_array_equal(raw[:][0], raw_new[:][0][::-1])
raw_new.reorder_channels(raw_new.ch_names[::-1][1:-1])
raw.drop_channels(raw.ch_names[:1] + raw.ch_names[-1:])
assert_array_equal(raw[:][0], raw_new[:][0])
with pytest.raises(ValueError, match='repeated'):
raw.reorder_channels(raw.ch_names[:1] + raw.ch_names[:1])
# and with .pick
reord = [1, 0] + list(range(2, len(raw.ch_names)))
rev = np.argsort(reord)
raw_new = raw.copy().pick(reord)
assert_array_equal(raw[:][0], raw_new[rev][0])
def test_rename_channels():
"""Test rename channels."""
info = read_info(raw_fname)
# Error Tests
# Test channel name exists in ch_names
mapping = {'EEG 160': 'EEG060'}
pytest.raises(ValueError, rename_channels, info, mapping)
# Test improper mapping configuration
mapping = {'MEG 2641': 1.0}
pytest.raises(TypeError, rename_channels, info, mapping)
# Test non-unique mapping configuration
mapping = {'MEG 2641': 'MEG 2642'}
pytest.raises(ValueError, rename_channels, info, mapping)
# Test bad input
pytest.raises(ValueError, rename_channels, info, 1.)
pytest.raises(ValueError, rename_channels, info, 1.)
# Test name too long (channel names must be less than 15 characters)
A16 = 'A' * 16
mapping = {'MEG 2641': A16}
pytest.raises(ValueError, rename_channels, info, mapping)
# Test successful changes
# Test ch_name and ch_names are changed
info2 = deepcopy(info) # for consistency at the start of each test
info2['bads'] = ['EEG 060', 'EOG 061']
mapping = {'EEG 060': 'EEG060', 'EOG 061': 'EOG061'}
rename_channels(info2, mapping)
assert info2['chs'][374]['ch_name'] == 'EEG060'
assert info2['ch_names'][374] == 'EEG060'
assert info2['chs'][375]['ch_name'] == 'EOG061'
assert info2['ch_names'][375] == 'EOG061'
assert_array_equal(['EEG060', 'EOG061'], info2['bads'])
info2 = deepcopy(info)
rename_channels(info2, lambda x: x.replace(' ', ''))
assert info2['chs'][373]['ch_name'] == 'EEG059'
info2 = deepcopy(info)
info2['bads'] = ['EEG 060', 'EEG 060']
rename_channels(info2, mapping)
assert_array_equal(['EEG060', 'EEG060'], info2['bads'])
def test_set_channel_types():
"""Test set_channel_types."""
raw = read_raw_fif(raw_fname)
# Error Tests
# Test channel name exists in ch_names
mapping = {'EEG 160': 'EEG060'}
with pytest.raises(ValueError, match=r"name \(EEG 160\) doesn't exist"):
raw.set_channel_types(mapping)
# Test change to illegal channel type
mapping = {'EOG 061': 'xxx'}
with pytest.raises(ValueError, match='cannot change to this channel type'):
raw.set_channel_types(mapping)
# Test changing type if in proj
mapping = {'EEG 058': 'ecog', 'EEG 059': 'ecg', 'EEG 060': 'eog',
'EOG 061': 'seeg', 'MEG 2441': 'eeg', 'MEG 2443': 'eeg',
'MEG 2442': 'hbo'}
raw2 = read_raw_fif(raw_fname)
raw2.info['bads'] = ['EEG 059', 'EEG 060', 'EOG 061']
with pytest.raises(RuntimeError, match='type .* in projector "PCA-v1"'):
raw2.set_channel_types(mapping) # has prj
raw2.add_proj([], remove_existing=True)
with pytest.warns(RuntimeWarning, match='unit for channel.* has changed'):
raw2 = raw2.set_channel_types(mapping)
info = raw2.info
assert info['chs'][372]['ch_name'] == 'EEG 058'
assert info['chs'][372]['kind'] == FIFF.FIFFV_ECOG_CH
assert info['chs'][372]['unit'] == FIFF.FIFF_UNIT_V
assert info['chs'][372]['coil_type'] == FIFF.FIFFV_COIL_EEG
assert info['chs'][373]['ch_name'] == 'EEG 059'
assert info['chs'][373]['kind'] == FIFF.FIFFV_ECG_CH
assert info['chs'][373]['unit'] == FIFF.FIFF_UNIT_V
assert info['chs'][373]['coil_type'] == FIFF.FIFFV_COIL_NONE
assert info['chs'][374]['ch_name'] == 'EEG 060'
assert info['chs'][374]['kind'] == FIFF.FIFFV_EOG_CH
assert info['chs'][374]['unit'] == FIFF.FIFF_UNIT_V
assert info['chs'][374]['coil_type'] == FIFF.FIFFV_COIL_NONE
assert info['chs'][375]['ch_name'] == 'EOG 061'
assert info['chs'][375]['kind'] == FIFF.FIFFV_SEEG_CH
assert info['chs'][375]['unit'] == FIFF.FIFF_UNIT_V
assert info['chs'][375]['coil_type'] == FIFF.FIFFV_COIL_EEG
for idx in pick_channels(raw.ch_names, ['MEG 2441', 'MEG 2443']):
assert info['chs'][idx]['kind'] == FIFF.FIFFV_EEG_CH
assert info['chs'][idx]['unit'] == FIFF.FIFF_UNIT_V
assert info['chs'][idx]['coil_type'] == FIFF.FIFFV_COIL_EEG
idx = pick_channels(raw.ch_names, ['MEG 2442'])[0]
assert info['chs'][idx]['kind'] == FIFF.FIFFV_FNIRS_CH
assert info['chs'][idx]['unit'] == FIFF.FIFF_UNIT_MOL
assert info['chs'][idx]['coil_type'] == FIFF.FIFFV_COIL_FNIRS_HBO
# Test meaningful error when setting channel type with unknown unit
raw.info['chs'][0]['unit'] = 0.
ch_types = {raw.ch_names[0]: 'misc'}
pytest.raises(ValueError, raw.set_channel_types, ch_types)
def test_read_ch_adjacency():
"""Test reading channel adjacency templates."""
tempdir = _TempDir()
a = partial(np.array, dtype='<U7')
# no pep8
nbh = np.array([[(['MEG0111'], [[a(['MEG0131'])]]),
(['MEG0121'], [[a(['MEG0111'])],
[a(['MEG0131'])]]),
(['MEG0131'], [[a(['MEG0111'])],
[a(['MEG0121'])]])]],
dtype=[('label', 'O'), ('neighblabel', 'O')])
mat = dict(neighbours=nbh)
mat_fname = op.join(tempdir, 'test_mat.mat')
savemat(mat_fname, mat, oned_as='row')
ch_adjacency, ch_names = read_ch_adjacency(mat_fname)
x = ch_adjacency
assert_equal(x.shape[0], len(ch_names))
assert_equal(x.shape, (3, 3))
assert_equal(x[0, 1], False)
assert_equal(x[0, 2], True)
assert np.all(x.diagonal())
pytest.raises(ValueError, read_ch_adjacency, mat_fname, [0, 3])
ch_adjacency, ch_names = read_ch_adjacency(mat_fname, picks=[0, 2])
assert_equal(ch_adjacency.shape[0], 2)
assert_equal(len(ch_names), 2)
ch_names = ['EEG01', 'EEG02', 'EEG03']
neighbors = [['EEG02'], ['EEG04'], ['EEG02']]
pytest.raises(ValueError, _ch_neighbor_adjacency, ch_names, neighbors)
neighbors = [['EEG02'], ['EEG01', 'EEG03'], ['EEG 02']]
pytest.raises(ValueError, _ch_neighbor_adjacency, ch_names[:2],
neighbors)
neighbors = [['EEG02'], 'EEG01', ['EEG 02']]
pytest.raises(ValueError, _ch_neighbor_adjacency, ch_names, neighbors)
adjacency, ch_names = read_ch_adjacency('neuromag306mag')
assert_equal(adjacency.shape, (102, 102))
assert_equal(len(ch_names), 102)
pytest.raises(ValueError, read_ch_adjacency, 'bananas!')
# In EGI 256, E31 sensor has no neighbour
a = partial(np.array)
nbh = np.array([[(['E31'], []),
(['E1'], [[a(['E2'])],
[a(['E3'])]]),
(['E2'], [[a(['E1'])],
[a(['E3'])]]),
(['E3'], [[a(['E1'])],
[a(['E2'])]])]],
dtype=[('label', 'O'), ('neighblabel', 'O')])
mat = dict(neighbours=nbh)
mat_fname = op.join(tempdir, 'test_isolated_mat.mat')
savemat(mat_fname, mat, oned_as='row')
ch_adjacency, ch_names = read_ch_adjacency(mat_fname)
x = ch_adjacency.todense()
assert_equal(x.shape[0], len(ch_names))
assert_equal(x.shape, (4, 4))
assert np.all(x.diagonal())
assert not np.any(x[0, 1:])
assert not np.any(x[1:, 0])
# Check for neighbours consistency. If a sensor is marked as a neighbour,
# then it should also have its neighbours defined.
a = partial(np.array)
nbh = np.array([[(['E31'], []),
(['E1'], [[a(['E8'])],
[a(['E3'])]]),
(['E2'], [[a(['E1'])],
[a(['E3'])]]),
(['E3'], [[a(['E1'])],
[a(['E2'])]])]],
dtype=[('label', 'O'), ('neighblabel', 'O')])
mat = dict(neighbours=nbh)
mat_fname = op.join(tempdir, 'test_error_mat.mat')
savemat(mat_fname, mat, oned_as='row')
pytest.raises(ValueError, read_ch_adjacency, mat_fname)
def test_get_set_sensor_positions():
"""Test get/set functions for sensor positions."""
raw1 = read_raw_fif(raw_fname)
picks = pick_types(raw1.info, meg=False, eeg=True)
pos = np.array([ch['loc'][:3] for ch in raw1.info['chs']])[picks]
raw_pos = raw1._get_channel_positions(picks=picks)
assert_array_equal(raw_pos, pos)
ch_name = raw1.info['ch_names'][13]
pytest.raises(ValueError, raw1._set_channel_positions, [1, 2], ['name'])
raw2 = read_raw_fif(raw_fname)
raw2.info['chs'][13]['loc'][:3] = np.array([1, 2, 3])
raw1._set_channel_positions([[1, 2, 3]], [ch_name])
assert_array_equal(raw1.info['chs'][13]['loc'],
raw2.info['chs'][13]['loc'])
@testing.requires_testing_data
def test_1020_selection():
"""Test making a 10/20 selection dict."""
base_dir = op.join(testing.data_path(download=False), 'EEGLAB')
raw_fname = op.join(base_dir, 'test_raw.set')
loc_fname = op.join(base_dir, 'test_chans.locs')
raw = read_raw_eeglab(raw_fname, preload=True)
montage = read_custom_montage(loc_fname)
raw = raw.rename_channels(dict(zip(raw.ch_names, montage.ch_names)))
raw.set_montage(montage)
for input in ("a_string", 100, raw, [1, 2]):
pytest.raises(TypeError, make_1020_channel_selections, input)
sels = make_1020_channel_selections(raw.info)
# are all frontal channels placed before all occipital channels?
for name, picks in sels.items():
fs = min([ii for ii, pick in enumerate(picks)
if raw.ch_names[pick].startswith("F")])
ps = max([ii for ii, pick in enumerate(picks)
if raw.ch_names[pick].startswith("O")])
assert fs > ps
# are channels in the correct selection?
fz_c3_c4 = [raw.ch_names.index(ch) for ch in ("Fz", "C3", "C4")]
for channel, roi in zip(fz_c3_c4, ("Midline", "Left", "Right")):
assert channel in sels[roi]
@testing.requires_testing_data
def test_find_ch_adjacency():
"""Test computing the adjacency matrix."""
data_path = testing.data_path()
raw = read_raw_fif(raw_fname, preload=True)
sizes = {'mag': 828, 'grad': 1700, 'eeg': 384}
nchans = {'mag': 102, 'grad': 204, 'eeg': 60}
for ch_type in ['mag', 'grad', 'eeg']:
conn, ch_names = find_ch_adjacency(raw.info, ch_type)
# Silly test for checking the number of neighbors.
assert_equal(conn.getnnz(), sizes[ch_type])
assert_equal(len(ch_names), nchans[ch_type])
pytest.raises(ValueError, find_ch_adjacency, raw.info, None)
# Test computing the conn matrix with gradiometers.
conn, ch_names = _compute_ch_adjacency(raw.info, 'grad')
assert_equal(conn.getnnz(), 2680)
# Test ch_type=None.
raw.pick_types(meg='mag')
find_ch_adjacency(raw.info, None)
bti_fname = op.join(data_path, 'BTi', 'erm_HFH', 'c,rfDC')
bti_config_name = op.join(data_path, 'BTi', 'erm_HFH', 'config')
raw = read_raw_bti(bti_fname, bti_config_name, None)
_, ch_names = find_ch_adjacency(raw.info, 'mag')
assert 'A1' in ch_names
ctf_fname = op.join(data_path, 'CTF', 'testdata_ctf_short.ds')
raw = read_raw_ctf(ctf_fname)
_, ch_names = find_ch_adjacency(raw.info, 'mag')
assert 'MLC11' in ch_names
pytest.raises(ValueError, find_ch_adjacency, raw.info, 'eog')
raw_kit = read_raw_kit(fname_kit_157)
neighb, ch_names = find_ch_adjacency(raw_kit.info, 'mag')
assert neighb.data.size == 1329
assert ch_names[0] == 'MEG 001'
def test_drop_channels():
"""Test if dropping channels works with various arguments."""
raw = read_raw_fif(raw_fname, preload=True).crop(0, 0.1)
raw.drop_channels(["MEG 0111"]) # list argument
raw.drop_channels("MEG 0112") # str argument
raw.drop_channels({"MEG 0132", "MEG 0133"}) # set argument
pytest.raises(ValueError, raw.drop_channels, ["MEG 0111", 5])
pytest.raises(ValueError, raw.drop_channels, 5) # must be list or str
def test_equalize_channels():
"""Test equalizing channels and their ordering."""
# This function only tests the generic functionality of equalize_channels.
# Additional tests for each instance type are included in the accompanying
# test suite for each type.
pytest.raises(TypeError, equalize_channels, ['foo', 'bar'],
match='Instances to be modified must be an instance of')
raw = RawArray([[1.], [2.], [3.], [4.]],
create_info(['CH1', 'CH2', 'CH3', 'CH4'], sfreq=1.))
epochs = EpochsArray([[[1.], [2.], [3.]]],
create_info(['CH5', 'CH2', 'CH1'], sfreq=1.))
cov = make_ad_hoc_cov(create_info(['CH2', 'CH1', 'CH8'], sfreq=1.,
ch_types='eeg'))
cov['bads'] = ['CH1']
ave = EvokedArray([[1.], [2.]], create_info(['CH1', 'CH2'], sfreq=1.))
raw2, epochs2, cov2, ave2 = equalize_channels([raw, epochs, cov, ave],
copy=True)
# The Raw object was the first in the list, so should have been used as
# template for the ordering of the channels. No bad channels should have
# been dropped.
assert raw2.ch_names == ['CH1', 'CH2']
assert_array_equal(raw2.get_data(), [[1.], [2.]])
assert epochs2.ch_names == ['CH1', 'CH2']
assert_array_equal(epochs2.get_data(), [[[3.], [2.]]])
assert cov2.ch_names == ['CH1', 'CH2']
assert cov2['bads'] == cov['bads']
assert ave2.ch_names == ave.ch_names
assert_array_equal(ave2.data, ave.data)
# All objects should have been copied, except for the Evoked object which
# did not have to be touched.
assert raw is not raw2
assert epochs is not epochs2
assert cov is not cov2
assert ave is ave2
# Test in-place operation
raw2, epochs2 = equalize_channels([raw, epochs], copy=False)
assert raw is raw2
assert epochs is epochs2
def test_combine_channels():
"""Test channel combination on Raw, Epochs, and Evoked."""
raw = read_raw_fif(raw_fname, preload=True)
raw_ch_bad = read_raw_fif(raw_fname, preload=True)
raw_ch_bad.info['bads'] = ['MEG 0113', 'MEG 0112']
epochs = Epochs(raw, read_events(eve_fname))
evoked = epochs.average()
good = dict(foo=[0, 1, 3, 4], bar=[5, 2]) # good grad and mag
# Test good cases
combine_channels(raw, good)
combined_epochs = combine_channels(epochs, good)
assert_array_equal(combined_epochs.events, epochs.events)
combine_channels(evoked, good)
combine_channels(raw, good, drop_bad=True)
combine_channels(raw_ch_bad, good, drop_bad=True)
# Test with stimulus channels
combine_stim = combine_channels(raw, good, keep_stim=True)
target_nchan = len(good) + len(pick_types(raw.info, meg=False, stim=True))
assert combine_stim.info['nchan'] == target_nchan
# Test results with one ROI
good_single = dict(foo=[0, 1, 3, 4]) # good grad
combined_mean = combine_channels(raw, good_single, method='mean')
combined_median = combine_channels(raw, good_single, method='median')
combined_std = combine_channels(raw, good_single, method='std')
foo_mean = np.mean(raw.get_data()[good_single['foo']], axis=0)
foo_median = np.median(raw.get_data()[good_single['foo']], axis=0)
foo_std = np.std(raw.get_data()[good_single['foo']], axis=0)
assert_array_equal(combined_mean.get_data(),
np.expand_dims(foo_mean, axis=0))
assert_array_equal(combined_median.get_data(),
np.expand_dims(foo_median, axis=0))
assert_array_equal(combined_std.get_data(),
np.expand_dims(foo_std, axis=0))
# Test bad cases
bad1 = dict(foo=[0, 376], bar=[5, 2]) # out of bounds
bad2 = dict(foo=[0, 2], bar=[5, 2]) # type mix in same group
with pytest.raises(ValueError, match='"method" must be a callable, or'):
combine_channels(raw, good, method='bad_method')
with pytest.raises(TypeError, match='"keep_stim" must be of type bool'):
combine_channels(raw, good, keep_stim='bad_type')
with pytest.raises(TypeError, match='"drop_bad" must be of type bool'):
combine_channels(raw, good, drop_bad='bad_type')
with pytest.raises(ValueError, match='Some channel indices are out of'):
combine_channels(raw, bad1)
with pytest.raises(ValueError, match='Cannot combine sensors of diff'):
combine_channels(raw, bad2)
# Test warnings
raw_no_stim = read_raw_fif(raw_fname, preload=True)
raw_no_stim.pick_types(meg=True, stim=False)
warn1 = dict(foo=[375, 375], bar=[5, 2]) # same channel in same group
warn2 = dict(foo=[375], bar=[5, 2]) # one channel (last channel)
warn3 = dict(foo=[0, 4], bar=[5, 2]) # one good channel left
with pytest.warns(RuntimeWarning, match='Could not find stimulus'):
combine_channels(raw_no_stim, good, keep_stim=True)
with pytest.warns(RuntimeWarning, match='Less than 2 channels') as record:
combine_channels(raw, warn1)
combine_channels(raw, warn2)
combine_channels(raw_ch_bad, warn3, drop_bad=True)
assert len(record) == 3
run_tests_if_main()
|
from datetime import timedelta
import json
from homeassistant.components.airly.sensor import ATTRIBUTION
from homeassistant.const import (
ATTR_ATTRIBUTION,
ATTR_DEVICE_CLASS,
ATTR_ENTITY_ID,
ATTR_ICON,
ATTR_UNIT_OF_MEASUREMENT,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_PRESSURE,
DEVICE_CLASS_TEMPERATURE,
PERCENTAGE,
PRESSURE_HPA,
STATE_UNAVAILABLE,
TEMP_CELSIUS,
)
from homeassistant.setup import async_setup_component
from homeassistant.util.dt import utcnow
from tests.async_mock import patch
from tests.common import async_fire_time_changed, load_fixture
from tests.components.airly import init_integration
async def test_sensor(hass):
"""Test states of the sensor."""
await init_integration(hass)
registry = await hass.helpers.entity_registry.async_get_registry()
state = hass.states.get("sensor.home_humidity")
assert state
assert state.state == "92.8"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE
assert state.attributes.get(ATTR_DEVICE_CLASS) == DEVICE_CLASS_HUMIDITY
entry = registry.async_get("sensor.home_humidity")
assert entry
assert entry.unique_id == "55.55-122.12-humidity"
state = hass.states.get("sensor.home_pm1")
assert state
assert state.state == "9"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert (
state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
== CONCENTRATION_MICROGRAMS_PER_CUBIC_METER
)
assert state.attributes.get(ATTR_ICON) == "mdi:blur"
entry = registry.async_get("sensor.home_pm1")
assert entry
assert entry.unique_id == "55.55-122.12-pm1"
state = hass.states.get("sensor.home_pressure")
assert state
assert state.state == "1001"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PRESSURE_HPA
assert state.attributes.get(ATTR_DEVICE_CLASS) == DEVICE_CLASS_PRESSURE
entry = registry.async_get("sensor.home_pressure")
assert entry
assert entry.unique_id == "55.55-122.12-pressure"
state = hass.states.get("sensor.home_temperature")
assert state
assert state.state == "14.2"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == TEMP_CELSIUS
assert state.attributes.get(ATTR_DEVICE_CLASS) == DEVICE_CLASS_TEMPERATURE
entry = registry.async_get("sensor.home_temperature")
assert entry
assert entry.unique_id == "55.55-122.12-temperature"
async def test_availability(hass):
"""Ensure that we mark the entities unavailable correctly when service is offline."""
await init_integration(hass)
state = hass.states.get("sensor.home_humidity")
assert state
assert state.state != STATE_UNAVAILABLE
assert state.state == "92.8"
future = utcnow() + timedelta(minutes=60)
with patch("airly._private._RequestsHandler.get", side_effect=ConnectionError()):
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get("sensor.home_humidity")
assert state
assert state.state == STATE_UNAVAILABLE
future = utcnow() + timedelta(minutes=120)
with patch(
"airly._private._RequestsHandler.get",
return_value=json.loads(load_fixture("airly_valid_station.json")),
):
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get("sensor.home_humidity")
assert state
assert state.state != STATE_UNAVAILABLE
assert state.state == "92.8"
async def test_manual_update_entity(hass):
"""Test manual update entity via service homeasasistant/update_entity."""
await init_integration(hass)
await async_setup_component(hass, "homeassistant", {})
with patch(
"homeassistant.components.airly.AirlyDataUpdateCoordinator._async_update_data"
) as mock_update:
await hass.services.async_call(
"homeassistant",
"update_entity",
{ATTR_ENTITY_ID: ["sensor.home_humidity"]},
blocking=True,
)
assert mock_update.call_count == 1
|
import pytest
from tuyaha.tuyaapi import TuyaAPIException, TuyaNetException
from homeassistant import config_entries, data_entry_flow, setup
from homeassistant.components.tuya.const import CONF_COUNTRYCODE, DOMAIN
from homeassistant.const import CONF_PASSWORD, CONF_PLATFORM, CONF_USERNAME
from tests.async_mock import Mock, patch
from tests.common import MockConfigEntry
USERNAME = "myUsername"
PASSWORD = "myPassword"
COUNTRY_CODE = "1"
TUYA_PLATFORM = "tuya"
TUYA_USER_DATA = {
CONF_USERNAME: USERNAME,
CONF_PASSWORD: PASSWORD,
CONF_COUNTRYCODE: COUNTRY_CODE,
CONF_PLATFORM: TUYA_PLATFORM,
}
@pytest.fixture(name="tuya")
def tuya_fixture() -> Mock:
"""Patch libraries."""
with patch("homeassistant.components.tuya.config_flow.TuyaApi") as tuya:
yield tuya
async def test_user(hass, tuya):
"""Test user config."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
with patch(
"homeassistant.components.tuya.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.tuya.async_setup_entry", return_value=True
) as mock_setup_entry:
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=TUYA_USER_DATA
)
await hass.async_block_till_done()
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == USERNAME
assert result["data"][CONF_USERNAME] == USERNAME
assert result["data"][CONF_PASSWORD] == PASSWORD
assert result["data"][CONF_COUNTRYCODE] == COUNTRY_CODE
assert result["data"][CONF_PLATFORM] == TUYA_PLATFORM
assert not result["result"].unique_id
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_import(hass, tuya):
"""Test import step."""
await setup.async_setup_component(hass, "persistent_notification", {})
with patch(
"homeassistant.components.tuya.async_setup",
return_value=True,
) as mock_setup, patch(
"homeassistant.components.tuya.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data=TUYA_USER_DATA,
)
await hass.async_block_till_done()
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == USERNAME
assert result["data"][CONF_USERNAME] == USERNAME
assert result["data"][CONF_PASSWORD] == PASSWORD
assert result["data"][CONF_COUNTRYCODE] == COUNTRY_CODE
assert result["data"][CONF_PLATFORM] == TUYA_PLATFORM
assert not result["result"].unique_id
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_abort_if_already_setup(hass, tuya):
"""Test we abort if Tuya is already setup."""
MockConfigEntry(domain=DOMAIN, data=TUYA_USER_DATA).add_to_hass(hass)
# Should fail, config exist (import)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}, data=TUYA_USER_DATA
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "single_instance_allowed"
# Should fail, config exist (flow)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TUYA_USER_DATA
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "single_instance_allowed"
async def test_abort_on_invalid_credentials(hass, tuya):
"""Test when we have invalid credentials."""
tuya().init.side_effect = TuyaAPIException("Boom")
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}, data=TUYA_USER_DATA
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "invalid_auth"}
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TUYA_USER_DATA
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "invalid_auth"
async def test_abort_on_connection_error(hass, tuya):
"""Test when we have a network error."""
tuya().init.side_effect = TuyaNetException("Boom")
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}, data=TUYA_USER_DATA
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "cannot_connect"
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=TUYA_USER_DATA
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "cannot_connect"
|
revision = "131ec6accff5"
down_revision = "e3691fc396e9"
from alembic import op
import sqlalchemy as sa
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column(
"certificates",
sa.Column("rotation", sa.Boolean(), nullable=False, server_default=sa.false()),
)
op.add_column(
"endpoints",
sa.Column(
"last_updated",
sa.DateTime(),
server_default=sa.text("now()"),
nullable=False,
),
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("endpoints", "last_updated")
op.drop_column("certificates", "rotation")
# ### end Alembic commands ###
|
import unittest
import numpy as np
from chainer import testing
from chainercv.transforms import pca_lighting
class TestPCALighting(unittest.TestCase):
def test_pca_lighting(self):
img = np.random.uniform(size=(3, 48, 32))
out = pca_lighting(img, 0.1)
self.assertEqual(img.shape, out.shape)
self.assertEqual(img.dtype, out.dtype)
out = pca_lighting(img, 0)
self.assertEqual(img.shape, out.shape)
self.assertEqual(img.dtype, out.dtype)
np.testing.assert_equal(out, img)
testing.run_module(__name__, __file__)
|
from typing import Dict, List, Set, cast
from .types import CategoryType, PolicyType
def merge_policies(policies: List[PolicyType]) -> PolicyType:
"""Merge policies."""
new_policy: Dict[str, CategoryType] = {}
seen: Set[str] = set()
for policy in policies:
for category in policy:
if category in seen:
continue
seen.add(category)
new_policy[category] = _merge_policies(
[policy.get(category) for policy in policies]
)
cast(PolicyType, new_policy)
return new_policy
def _merge_policies(sources: List[CategoryType]) -> CategoryType:
"""Merge a policy."""
# When merging policies, the most permissive wins.
# This means we order it like this:
# True > Dict > None
#
# True: allow everything
# Dict: specify more granular permissions
# None: no opinion
#
# If there are multiple sources with a dict as policy, we recursively
# merge each key in the source.
policy: CategoryType = None
seen: Set[str] = set()
for source in sources:
if source is None:
continue
# A source that's True will always win. Shortcut return.
if source is True:
return True
assert isinstance(source, dict)
if policy is None:
policy = cast(CategoryType, {})
assert isinstance(policy, dict)
for key in source:
if key in seen:
continue
seen.add(key)
key_sources = []
for src in sources:
if isinstance(src, dict):
key_sources.append(src.get(key))
policy[key] = _merge_policies(key_sources)
return policy
|
from aiohomekit.model.characteristics import CharacteristicsTypes
from aiohomekit.model.services import ServicesTypes
from tests.components.homekit_controller.common import setup_test_component
LOCK_CURRENT_STATE = ("lock-mechanism", "lock-mechanism.current-state")
LOCK_TARGET_STATE = ("lock-mechanism", "lock-mechanism.target-state")
def create_lock_service(accessory):
"""Define a lock characteristics as per page 219 of HAP spec."""
service = accessory.add_service(ServicesTypes.LOCK_MECHANISM)
cur_state = service.add_char(CharacteristicsTypes.LOCK_MECHANISM_CURRENT_STATE)
cur_state.value = 0
targ_state = service.add_char(CharacteristicsTypes.LOCK_MECHANISM_TARGET_STATE)
targ_state.value = 0
# According to the spec, a battery-level characteristic is normally
# part of a separate service. However as the code was written (which
# predates this test) the battery level would have to be part of the lock
# service as it is here.
targ_state = service.add_char(CharacteristicsTypes.BATTERY_LEVEL)
targ_state.value = 50
return service
async def test_switch_change_lock_state(hass, utcnow):
"""Test that we can turn a HomeKit lock on and off again."""
helper = await setup_test_component(hass, create_lock_service)
await hass.services.async_call(
"lock", "lock", {"entity_id": "lock.testdevice"}, blocking=True
)
assert helper.characteristics[LOCK_TARGET_STATE].value == 1
await hass.services.async_call(
"lock", "unlock", {"entity_id": "lock.testdevice"}, blocking=True
)
assert helper.characteristics[LOCK_TARGET_STATE].value == 0
async def test_switch_read_lock_state(hass, utcnow):
"""Test that we can read the state of a HomeKit lock accessory."""
helper = await setup_test_component(hass, create_lock_service)
helper.characteristics[LOCK_CURRENT_STATE].value = 0
helper.characteristics[LOCK_TARGET_STATE].value = 0
state = await helper.poll_and_get_state()
assert state.state == "unlocked"
assert state.attributes["battery_level"] == 50
helper.characteristics[LOCK_CURRENT_STATE].value = 1
helper.characteristics[LOCK_TARGET_STATE].value = 1
state = await helper.poll_and_get_state()
assert state.state == "locked"
|
import diamond.collector
from diamond.collector import str_to_bool
import diamond.convertor
import os
import re
try:
import psutil
except ImportError:
psutil = None
class NetworkCollector(diamond.collector.Collector):
PROC = '/proc/net/dev'
def get_default_config_help(self):
config_help = super(NetworkCollector, self).get_default_config_help()
config_help.update({
'interfaces': 'List of interface types to collect',
'greedy': 'Greedy match interfaces',
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(NetworkCollector, self).get_default_config()
config.update({
'path': 'network',
'interfaces': ['eth', 'bond', 'em', 'p1p', 'eno', 'enp', 'ens',
'enx'],
'byte_unit': ['bit', 'byte'],
'greedy': 'true',
})
return config
def collect(self):
"""
Collect network interface stats.
"""
# Initialize results
results = {}
if os.access(self.PROC, os.R_OK):
# Open File
file = open(self.PROC)
# Build Regular Expression
greed = ''
if str_to_bool(self.config['greedy']):
greed = '\S*'
exp = (('^(?:\s*)((?:%s)%s):(?:\s*)' +
'(?P<rx_bytes>\d+)(?:\s*)' +
'(?P<rx_packets>\w+)(?:\s*)' +
'(?P<rx_errors>\d+)(?:\s*)' +
'(?P<rx_drop>\d+)(?:\s*)' +
'(?P<rx_fifo>\d+)(?:\s*)' +
'(?P<rx_frame>\d+)(?:\s*)' +
'(?P<rx_compressed>\d+)(?:\s*)' +
'(?P<rx_multicast>\d+)(?:\s*)' +
'(?P<tx_bytes>\d+)(?:\s*)' +
'(?P<tx_packets>\w+)(?:\s*)' +
'(?P<tx_errors>\d+)(?:\s*)' +
'(?P<tx_drop>\d+)(?:\s*)' +
'(?P<tx_fifo>\d+)(?:\s*)' +
'(?P<tx_colls>\d+)(?:\s*)' +
'(?P<tx_carrier>\d+)(?:\s*)' +
'(?P<tx_compressed>\d+)(?:.*)$') %
(('|'.join(self.config['interfaces'])), greed))
reg = re.compile(exp)
# Match Interfaces
for line in file:
match = reg.match(line)
if match:
device = match.group(1)
results[device] = match.groupdict()
# Close File
file.close()
else:
if not psutil:
self.log.error('Unable to import psutil')
self.log.error('No network metrics retrieved')
return None
network_stats = psutil.network_io_counters(True)
for device in network_stats.keys():
network_stat = network_stats[device]
results[device] = {}
results[device]['rx_bytes'] = network_stat.bytes_recv
results[device]['tx_bytes'] = network_stat.bytes_sent
results[device]['rx_packets'] = network_stat.packets_recv
results[device]['tx_packets'] = network_stat.packets_sent
for device in results:
stats = results[device]
for s, v in stats.items():
# Get Metric Name
metric_name = '.'.join([device, s])
# Get Metric Value
metric_value = self.derivative(metric_name,
long(v),
diamond.collector.MAX_COUNTER)
# Convert rx_bytes and tx_bytes
if s == 'rx_bytes' or s == 'tx_bytes':
convertor = diamond.convertor.binary(value=metric_value,
unit='byte')
for u in self.config['byte_unit']:
# Public Converted Metric
self.publish(metric_name.replace('bytes', u),
convertor.get(unit=u), 2)
else:
# Publish Metric Derivative
self.publish(metric_name, metric_value)
return None
|
class Conf(object):
_render_url = 'http://127.0.0.1:9009/render'
_render = True
# Indicates that we should rely on Django's settings as the
# canonical reference and use the above defaults as fallbacks.
# Proxying to django.conf.settings allows us to swap out Django's
# tests during tests
_PROXY_DJANGO_SETTINGS = False
@property
def RENDER_URL(self):
if not self._PROXY_DJANGO_SETTINGS:
return self._render_url
from django.conf import settings
if hasattr(settings, 'REACT'):
return settings.REACT.get('RENDER_URL', self._render_url)
return self._render_url
@property
def RENDER(self):
if not self._PROXY_DJANGO_SETTINGS:
return self._render
from django.conf import settings
if hasattr(settings, 'REACT'):
return settings.REACT.get('RENDER', self._render)
return self._render
def configure(self, RENDER_URL=None, RENDER=None):
if RENDER_URL is not None:
self._render_url = RENDER_URL
if RENDER is not None:
self._render = RENDER
settings = Conf()
|
import logging
from velbus.util import VelbusException
from homeassistant.components.cover import (
ATTR_POSITION,
SUPPORT_CLOSE,
SUPPORT_OPEN,
SUPPORT_SET_POSITION,
SUPPORT_STOP,
CoverEntity,
)
from . import VelbusEntity
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up Velbus cover based on config_entry."""
cntrl = hass.data[DOMAIN][entry.entry_id]["cntrl"]
modules_data = hass.data[DOMAIN][entry.entry_id]["cover"]
entities = []
for address, channel in modules_data:
module = cntrl.get_module(address)
entities.append(VelbusCover(module, channel))
async_add_entities(entities)
class VelbusCover(VelbusEntity, CoverEntity):
"""Representation a Velbus cover."""
@property
def supported_features(self):
"""Flag supported features."""
if self._module.support_position():
return SUPPORT_OPEN | SUPPORT_CLOSE | SUPPORT_STOP | SUPPORT_SET_POSITION
return SUPPORT_OPEN | SUPPORT_CLOSE | SUPPORT_STOP
@property
def is_closed(self):
"""Return if the cover is closed."""
if self._module.get_position(self._channel) == 100:
return True
return False
@property
def current_cover_position(self):
"""Return current position of cover.
None is unknown, 0 is closed, 100 is fully open
Velbus: 100 = closed, 0 = open
"""
pos = self._module.get_position(self._channel)
return 100 - pos
def open_cover(self, **kwargs):
"""Open the cover."""
try:
self._module.open(self._channel)
except VelbusException as err:
_LOGGER.error("A Velbus error occurred: %s", err)
def close_cover(self, **kwargs):
"""Close the cover."""
try:
self._module.close(self._channel)
except VelbusException as err:
_LOGGER.error("A Velbus error occurred: %s", err)
def stop_cover(self, **kwargs):
"""Stop the cover."""
try:
self._module.stop(self._channel)
except VelbusException as err:
_LOGGER.error("A Velbus error occurred: %s", err)
def set_cover_position(self, **kwargs):
"""Move the cover to a specific position."""
try:
self._module.set(self._channel, (100 - kwargs[ATTR_POSITION]))
except VelbusException as err:
_LOGGER.error("A Velbus error occurred: %s", err)
|
import logging
from aip import AipSpeech
import voluptuous as vol
from homeassistant.components.tts import CONF_LANG, PLATFORM_SCHEMA, Provider
from homeassistant.const import CONF_API_KEY
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
SUPPORTED_LANGUAGES = ["zh"]
DEFAULT_LANG = "zh"
SUPPORTED_PERSON = [0, 1, 3, 4, 5, 103, 106, 110, 111, 5003, 5118]
CONF_APP_ID = "app_id"
CONF_SECRET_KEY = "secret_key"
CONF_SPEED = "speed"
CONF_PITCH = "pitch"
CONF_VOLUME = "volume"
CONF_PERSON = "person"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_LANG, default=DEFAULT_LANG): vol.In(SUPPORTED_LANGUAGES),
vol.Required(CONF_APP_ID): cv.string,
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_SECRET_KEY): cv.string,
vol.Optional(CONF_SPEED, default=5): vol.All(
vol.Coerce(int), vol.Range(min=0, max=9)
),
vol.Optional(CONF_PITCH, default=5): vol.All(
vol.Coerce(int), vol.Range(min=0, max=9)
),
vol.Optional(CONF_VOLUME, default=5): vol.All(
vol.Coerce(int), vol.Range(min=0, max=15)
),
vol.Optional(CONF_PERSON, default=0): vol.In(SUPPORTED_PERSON),
}
)
# Keys are options in the config file, and Values are options
# required by Baidu TTS API.
_OPTIONS = {
CONF_PERSON: "per",
CONF_PITCH: "pit",
CONF_SPEED: "spd",
CONF_VOLUME: "vol",
}
SUPPORTED_OPTIONS = [CONF_PERSON, CONF_PITCH, CONF_SPEED, CONF_VOLUME]
def get_engine(hass, config, discovery_info=None):
"""Set up Baidu TTS component."""
return BaiduTTSProvider(hass, config)
class BaiduTTSProvider(Provider):
"""Baidu TTS speech api provider."""
def __init__(self, hass, conf):
"""Init Baidu TTS service."""
self.hass = hass
self._lang = conf[CONF_LANG]
self._codec = "mp3"
self.name = "BaiduTTS"
self._app_data = {
"appid": conf[CONF_APP_ID],
"apikey": conf[CONF_API_KEY],
"secretkey": conf[CONF_SECRET_KEY],
}
self._speech_conf_data = {
_OPTIONS[CONF_PERSON]: conf[CONF_PERSON],
_OPTIONS[CONF_PITCH]: conf[CONF_PITCH],
_OPTIONS[CONF_SPEED]: conf[CONF_SPEED],
_OPTIONS[CONF_VOLUME]: conf[CONF_VOLUME],
}
@property
def default_language(self):
"""Return the default language."""
return self._lang
@property
def supported_languages(self):
"""Return a list of supported languages."""
return SUPPORTED_LANGUAGES
@property
def default_options(self):
"""Return a dict including default options."""
return {
CONF_PERSON: self._speech_conf_data[_OPTIONS[CONF_PERSON]],
CONF_PITCH: self._speech_conf_data[_OPTIONS[CONF_PITCH]],
CONF_SPEED: self._speech_conf_data[_OPTIONS[CONF_SPEED]],
CONF_VOLUME: self._speech_conf_data[_OPTIONS[CONF_VOLUME]],
}
@property
def supported_options(self):
"""Return a list of supported options."""
return SUPPORTED_OPTIONS
def get_tts_audio(self, message, language, options=None):
"""Load TTS from BaiduTTS."""
aip_speech = AipSpeech(
self._app_data["appid"],
self._app_data["apikey"],
self._app_data["secretkey"],
)
if options is None:
result = aip_speech.synthesis(message, language, 1, self._speech_conf_data)
else:
speech_data = self._speech_conf_data.copy()
for key, value in options.items():
speech_data[_OPTIONS[key]] = value
result = aip_speech.synthesis(message, language, 1, speech_data)
if isinstance(result, dict):
_LOGGER.error(
"Baidu TTS error-- err_no:%d; err_msg:%s; err_detail:%s",
result["err_no"],
result["err_msg"],
result["err_detail"],
)
return None, None
return self._codec, result
|
from django import forms
from weblate.fonts.models import Font, FontGroup, FontOverride
class FontForm(forms.ModelForm):
class Meta:
model = Font
fields = ("font",)
class FontGroupForm(forms.ModelForm):
class Meta:
model = FontGroup
fields = ("name", "font")
def __init__(self, data=None, project=None, **kwargs):
super().__init__(data, **kwargs)
self.fields["font"].queryset = self.fields["font"].queryset.filter(
project=project
)
class FontOverrideForm(forms.ModelForm):
class Meta:
model = FontOverride
fields = ("language", "font")
|
import argparse
import logging
from paasta_tools.autoscaling.autoscaling_service_lib import autoscale_services
from paasta_tools.marathon_tools import DEFAULT_SOA_DIR
def parse_args():
parser = argparse.ArgumentParser(description="Autoscales marathon jobs")
parser.add_argument(
"-d",
"--soa-dir",
dest="soa_dir",
metavar="SOA_DIR",
default=DEFAULT_SOA_DIR,
help="define a different soa config directory",
)
parser.add_argument(
"-v", "--verbose", action="store_true", help="Increase logging verboseness"
)
parser.add_argument(
"services",
type=str,
nargs="*",
help="name of services to scale (optional defaults to all autoscaling enabled services)",
)
args = parser.parse_args()
return args
def main():
args = parse_args()
if args.verbose:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.WARNING)
autoscale_services(soa_dir=args.soa_dir, services=args.services)
if __name__ == "__main__":
main()
|
import logging
import os
import subprocess
logging.basicConfig()
logger = logging.getLogger("kalliope")
MPLAYER_EXEC_PATH = "/usr/bin/mplayer"
class Mplayer(object):
"""
This Class is representing the MPlayer Object used to play the all sound of the system.
"""
def __init__(self, **kwargs):
logger.debug("[Mplayer.__init__] instance")
logger.debug("[Mplayer.__init__] args : %s " % str(kwargs))
@classmethod
def play(cls, filepath):
"""
Play the sound located in the provided filepath
:param filepath: The file path of the sound to play
:type filepath: str
:Example:
Mplayer.play(self.file_path)
.. seealso:: TTS
.. raises::
.. warnings:: Class Method and Public
"""
mplayer_exec_path = [MPLAYER_EXEC_PATH]
mplayer_options = ['-slave', '-quiet']
mplayer_command = list()
mplayer_command.extend(mplayer_exec_path)
mplayer_command.extend(mplayer_options)
mplayer_command.append(filepath)
logger.debug("Mplayer cmd: %s" % str(mplayer_command))
fnull = open(os.devnull, 'w')
subprocess.call(mplayer_command, stdout=fnull, stderr=fnull)
|
import keras
from matchzoo.engine.base_model import BaseModel
from matchzoo.engine import hyper_spaces
class Naive(BaseModel):
"""
Naive model with a simplest structure for testing purposes.
Bare minimum functioning model. The best choice to get things rolling.
The worst choice to fit and evaluate performance.
"""
@classmethod
def get_default_params(cls):
"""Default parameters."""
params = super().get_default_params()
params.get('optimizer').hyper_space = \
hyper_spaces.choice(['adam', 'adagrad', 'rmsprop'])
return params
def build(self):
"""Build."""
x_in = self._make_inputs()
x = keras.layers.concatenate(x_in)
x_out = self._make_output_layer()(x)
self._backend = keras.models.Model(inputs=x_in, outputs=x_out)
|
from collections import defaultdict
from ..tree import Tree
from ..exceptions import UnexpectedCharacters
from ..lexer import Token
from ..grammar import Terminal
from .earley import Parser as BaseParser
from .earley_forest import SymbolNode
class Parser(BaseParser):
def __init__(self, parser_conf, term_matcher, resolve_ambiguity=True, ignore = (), complete_lex = False, debug=False, tree_class=Tree):
BaseParser.__init__(self, parser_conf, term_matcher, resolve_ambiguity, debug, tree_class)
self.ignore = [Terminal(t) for t in ignore]
self.complete_lex = complete_lex
def _parse(self, stream, columns, to_scan, start_symbol=None):
def scan(i, to_scan):
"""The core Earley Scanner.
This is a custom implementation of the scanner that uses the
Lark lexer to match tokens. The scan list is built by the
Earley predictor, based on the previously completed tokens.
This ensures that at each phase of the parse we have a custom
lexer context, allowing for more complex ambiguities."""
node_cache = {}
# 1) Loop the expectations and ask the lexer to match.
# Since regexp is forward looking on the input stream, and we only
# want to process tokens when we hit the point in the stream at which
# they complete, we push all tokens into a buffer (delayed_matches), to
# be held possibly for a later parse step when we reach the point in the
# input stream at which they complete.
for item in set(to_scan):
m = match(item.expect, stream, i)
if m:
t = Token(item.expect.name, m.group(0), i, text_line, text_column)
delayed_matches[m.end()].append( (item, i, t) )
if self.complete_lex:
s = m.group(0)
for j in range(1, len(s)):
m = match(item.expect, s[:-j])
if m:
t = Token(item.expect.name, m.group(0), i, text_line, text_column)
delayed_matches[i+m.end()].append( (item, i, t) )
# XXX The following 3 lines were commented out for causing a bug. See issue #768
# # Remove any items that successfully matched in this pass from the to_scan buffer.
# # This ensures we don't carry over tokens that already matched, if we're ignoring below.
# to_scan.remove(item)
# 3) Process any ignores. This is typically used for e.g. whitespace.
# We carry over any unmatched items from the to_scan buffer to be matched again after
# the ignore. This should allow us to use ignored symbols in non-terminals to implement
# e.g. mandatory spacing.
for x in self.ignore:
m = match(x, stream, i)
if m:
# Carry over any items still in the scan buffer, to past the end of the ignored items.
delayed_matches[m.end()].extend([(item, i, None) for item in to_scan ])
# If we're ignoring up to the end of the file, # carry over the start symbol if it already completed.
delayed_matches[m.end()].extend([(item, i, None) for item in columns[i] if item.is_complete and item.s == start_symbol])
next_to_scan = set()
next_set = set()
columns.append(next_set)
transitives.append({})
## 4) Process Tokens from delayed_matches.
# This is the core of the Earley scanner. Create an SPPF node for each Token,
# and create the symbol node in the SPPF tree. Advance the item that completed,
# and add the resulting new item to either the Earley set (for processing by the
# completer/predictor) or the to_scan buffer for the next parse step.
for item, start, token in delayed_matches[i+1]:
if token is not None:
token.end_line = text_line
token.end_column = text_column + 1
token.end_pos = i + 1
new_item = item.advance()
label = (new_item.s, new_item.start, i)
new_item.node = node_cache[label] if label in node_cache else node_cache.setdefault(label, SymbolNode(*label))
new_item.node.add_family(new_item.s, item.rule, new_item.start, item.node, token)
else:
new_item = item
if new_item.expect in self.TERMINALS:
# add (B ::= Aai+1.B, h, y) to Q'
next_to_scan.add(new_item)
else:
# add (B ::= Aa+1.B, h, y) to Ei+1
next_set.add(new_item)
del delayed_matches[i+1] # No longer needed, so unburden memory
if not next_set and not delayed_matches and not next_to_scan:
raise UnexpectedCharacters(stream, i, text_line, text_column, {item.expect.name for item in to_scan},
set(to_scan), state=frozenset(i.s for i in to_scan))
return next_to_scan
delayed_matches = defaultdict(list)
match = self.term_matcher
# Cache for nodes & tokens created in a particular parse step.
transitives = [{}]
text_line = 1
text_column = 1
## The main Earley loop.
# Run the Prediction/Completion cycle for any Items in the current Earley set.
# Completions will be added to the SPPF tree, and predictions will be recursively
# processed down to terminals/empty nodes to be added to the scanner for the next
# step.
i = 0
for token in stream:
self.predict_and_complete(i, to_scan, columns, transitives)
to_scan = scan(i, to_scan)
if token == '\n':
text_line += 1
text_column = 1
else:
text_column += 1
i += 1
self.predict_and_complete(i, to_scan, columns, transitives)
## Column is now the final column in the parse.
assert i == len(columns)-1
return to_scan
|
from django.core.exceptions import ValidationError
from django.test import SimpleTestCase
from weblate.checks.flags import TYPED_FLAGS, TYPED_FLAGS_ARGS, Flags
class FlagTest(SimpleTestCase):
def test_parse(self):
self.assertEqual(Flags("foo, bar").items(), {"foo", "bar"})
def test_parse_blank(self):
self.assertEqual(Flags("foo, bar, ").items(), {"foo", "bar"})
def test_parse_alias(self):
self.assertEqual(
Flags("foo, md-text, bar, markdown-text").items(), {"foo", "bar", "md-text"}
)
def test_iter(self):
self.assertEqual(sorted(Flags("foo, bar")), ["bar", "foo"])
def test_parse_empty(self):
self.assertEqual(Flags("").items(), set())
def test_merge(self):
self.assertEqual(Flags({"foo"}, {"bar"}).items(), {"foo", "bar"})
def test_merge_prefix(self):
self.assertEqual(Flags({("foo", "1")}, {("foo", "2")}).items(), {("foo", "2")})
def test_values(self):
flags = Flags("placeholders:bar:baz")
self.assertEqual(flags.get_value("placeholders"), ["bar", "baz"])
def test_quoted_values(self):
flags = Flags(r"""placeholders:"bar: \"value\"":'baz \'value\''""")
self.assertEqual(
flags.get_value("placeholders"), ['bar: "value"', "baz 'value'"]
)
self.assertEqual(
flags.format(), r'''placeholders:"bar: \"value\"":"baz 'value'"'''
)
flags = Flags(r'regex:"((?:@:\(|\{)[^\)\}]+(?:\)|\}))"')
self.assertEqual(flags.format(), r'regex:"((?:@:\(|\{)[^\)\}]+(?:\)|\}))"')
def test_validate_value(self):
with self.assertRaises(ValidationError):
Flags("max-length:x").validate()
Flags("max-length:30").validate()
def test_validate_name(self):
with self.assertRaises(ValidationError):
Flags("invalid-check-name").validate()
with self.assertRaises(ValidationError):
Flags("invalid-check-name:1").validate()
Flags("ignore-max-length").validate()
def test_typed(self):
self.assertEqual(TYPED_FLAGS.keys(), TYPED_FLAGS_ARGS.keys())
def test_remove(self):
flags = Flags("placeholders:bar:baz, foo:1, bar")
flags.remove("foo")
self.assertEqual(flags.items(), {("placeholders", "bar", "baz"), "bar"})
flags.remove("bar")
self.assertEqual(flags.items(), {("placeholders", "bar", "baz")})
def test_empty_value(self):
flags = Flags("regex:")
regex = flags.get_value("regex")
self.assertEqual(regex.pattern, "")
flags = Flags("regex:,bar")
regex = flags.get_value("regex")
self.assertEqual(regex.pattern, "")
def test_regex(self):
flags = Flags("regex:.*")
regex = flags.get_value("regex")
self.assertEqual(regex.pattern, ".*")
flags = Flags('regex:r".*"')
regex = flags.get_value("regex")
self.assertEqual(regex.pattern, ".*")
def test_regex_value(self):
flags = Flags("placeholders:r")
self.assertEqual(flags.get_value("placeholders"), ["r"])
flags = Flags("placeholders:r:r")
self.assertEqual(flags.get_value("placeholders"), ["r", "r"])
flags = Flags("placeholders:r,r")
self.assertEqual(flags.get_value("placeholders"), ["r"])
flags = Flags('placeholders:r".*"')
values = flags.get_value("placeholders")
self.assertEqual(len(values), 1)
self.assertEqual(values[0].pattern, ".*")
def test_whitespace(self):
self.assertEqual(Flags(" foo , bar ").items(), {"foo", "bar"})
flags = Flags(
"max-size:120:2,font-family:DIN next pro,font-spacing:2, priority:140"
)
self.assertEqual(
flags.items(),
{
("font-family", "DIN next pro"),
("priority", "140"),
("max-size", "120", "2"),
("font-spacing", "2"),
},
)
def test_unicode(self):
self.assertEqual(
Flags("zkouška, Memóriakártya").items(), {"zkouška", "Memóriakártya"}
)
self.assertEqual(
Flags("placeholder:'zkouška sirén'").items(),
{("placeholder", "zkouška sirén")},
)
def test_replacements(
self, text='replacements:{COLOR-GREY}:"":{COLOR-GARNET}:"":{VARIABLE-01}:99'
):
flags = Flags(text)
self.assertEqual(
flags.items(),
{
(
"replacements",
"{COLOR-GREY}",
"",
"{COLOR-GARNET}",
"",
"{VARIABLE-01}",
"99",
)
},
)
self.assertEqual(
flags.get_value("replacements"),
["{COLOR-GREY}", "", "{COLOR-GARNET}", "", "{VARIABLE-01}", "99"],
)
def test_empty_params(self):
self.test_replacements(
"replacements:{COLOR-GREY}::{COLOR-GARNET}::{VARIABLE-01}:99"
)
|
import numpy as np
from scattertext import CorpusDF
from scattertext.CSRMatrixTools import CSRMatrixFactory
from scattertext.ParsedCorpus import ParsedCorpus
from scattertext.indexstore.IndexStore import IndexStore
class CorpusFromFeatureDict(object):
def __init__(self,
df,
category_col,
text_col,
feature_col,
metadata_col=None,
parsed_col=None):
'''
Parameters
----------
df : pd.DataFrame
contains category_col, and parse_col, were parsed col is entirely spacy docs
category_col : str
name of category column in convention_df
text_col : str
The name of the column which contains each document's raw text.
feature_col : str
name of column in convention_df with a feature dictionary
metadata_col : str, optional
name of column in convention_df with a meatadata dictionary
parsed_col : str, optional
name of column in convention_df with parsed strings
'''
self._df = df.reset_index()
self._category_col = category_col
self._text_col = text_col
self._feature_col = feature_col
self._parsed_col = parsed_col
self._metadata_col = metadata_col
self._category_idx_store = IndexStore()
self._X_factory = CSRMatrixFactory()
self._mX_factory = CSRMatrixFactory()
self._term_idx_store = IndexStore()
self._metadata_idx_store = IndexStore()
def build(self):
'''Constructs the term doc matrix.
Returns
-------
scattertext.ParsedCorpus.ParsedCorpus
'''
self._y = self._get_y_and_populate_category_idx_store()
self._df.apply(self._add_to_x_factory, axis=1)
self._X = self._X_factory.set_last_row_idx(len(self._y) - 1).get_csr_matrix()
self._mX = self._mX_factory.set_last_row_idx(len(self._y) - 1).get_csr_matrix()
if self._parsed_col is not None and self._parsed_col in self._df:
return ParsedCorpus(self._df,
self._X,
self._mX,
self._y,
self._term_idx_store,
self._category_idx_store,
self._metadata_idx_store,
self._parsed_col,
self._category_col)
else:
return CorpusDF(self._df,
self._X,
self._mX,
self._y,
self._text_col,
self._term_idx_store,
self._category_idx_store,
self._metadata_idx_store)
def _get_y_and_populate_category_idx_store(self):
return np.array(self._df[self._category_col].apply(self._category_idx_store.getidx))
def _add_to_x_factory(self, row):
for feat, count in row[self._feature_col].items():
feat_idx = self._term_idx_store.getidx(feat)
self._X_factory[row.name, feat_idx] = count
if self._metadata_col in self._df:
for meta, count in row[self._metadata_col].items():
meta_idx = self._metadata_idx_store.getidx(meta)
self._mX_factory[row.name, meta_idx] = count
def _make_new_term_doc_matrix(self,
new_X,
new_mX,
new_y,
new_term_idx_store,
new_category_idx_store,
new_metadata_idx_store,
new_y_mask):
if self._parsed_col is not None and self._parsed_col in self._df:
return ParsedCorpus(self._df[new_y_mask],
new_X,
new_mX,
new_y,
new_term_idx_store,
new_category_idx_store,
new_metadata_idx_store,
self._parsed_col,
self._category_col)
else:
return CorpusDF(self._df[new_y_mask],
new_X,
new_mX,
new_y,
self._text_col,
new_term_idx_store,
new_category_idx_store,
new_metadata_idx_store,
self._df[self._text_col][new_y_mask])
|
from copy import deepcopy, copy
from ..exceptions import UnexpectedInput, UnexpectedToken
from ..lexer import Token
from ..utils import Serialize
from .lalr_analysis import LALR_Analyzer, Shift, Reduce, IntParseTable
from .lalr_puppet import ParserPuppet
###{standalone
class LALR_Parser(Serialize):
def __init__(self, parser_conf, debug=False):
analysis = LALR_Analyzer(parser_conf, debug=debug)
analysis.compute_lalr()
callbacks = parser_conf.callbacks
self._parse_table = analysis.parse_table
self.parser_conf = parser_conf
self.parser = _Parser(analysis.parse_table, callbacks, debug)
@classmethod
def deserialize(cls, data, memo, callbacks, debug=False):
inst = cls.__new__(cls)
inst._parse_table = IntParseTable.deserialize(data, memo)
inst.parser = _Parser(inst._parse_table, callbacks, debug)
return inst
def serialize(self, memo):
return self._parse_table.serialize(memo)
def parse(self, *args):
return self.parser.parse(*args)
class ParseConf(object):
__slots__ = 'parse_table', 'callbacks', 'start', 'start_state', 'end_state', 'states'
def __init__(self, parse_table, callbacks, start):
self.parse_table = parse_table
self.start_state = self.parse_table.start_states[start]
self.end_state = self.parse_table.end_states[start]
self.states = self.parse_table.states
self.callbacks = callbacks
self.start = start
class ParserState(object):
__slots__ = 'parse_conf', 'lexer', 'state_stack', 'value_stack'
def __init__(self, parse_conf, lexer, state_stack=None, value_stack=None):
self.parse_conf = parse_conf
self.lexer = lexer
self.state_stack = state_stack or [self.parse_conf.start_state]
self.value_stack = value_stack or []
@property
def position(self):
return self.state_stack[-1]
# Necessary for match_examples() to work
def __eq__(self, other):
if not isinstance(other, ParserState):
return False
return self.position == other.position
def __copy__(self):
return type(self)(
self.parse_conf,
self.lexer, # XXX copy
copy(self.state_stack),
deepcopy(self.value_stack),
)
def copy(self):
return copy(self)
def feed_token(self, token, is_end=False):
state_stack = self.state_stack
value_stack = self.value_stack
states = self.parse_conf.states
end_state = self.parse_conf.end_state
callbacks = self.parse_conf.callbacks
while True:
state = state_stack[-1]
try:
action, arg = states[state][token.type]
except KeyError:
expected = {s for s in states[state].keys() if s.isupper()}
raise UnexpectedToken(token, expected, state=self, puppet=None)
assert arg != end_state
if action is Shift:
# shift once and return
assert not is_end
state_stack.append(arg)
value_stack.append(token)
return
else:
# reduce+shift as many times as necessary
rule = arg
size = len(rule.expansion)
if size:
s = value_stack[-size:]
del state_stack[-size:]
del value_stack[-size:]
else:
s = []
value = callbacks[rule](s)
_action, new_state = states[state_stack[-1]][rule.origin.name]
assert _action is Shift
state_stack.append(new_state)
value_stack.append(value)
if is_end and state_stack[-1] == end_state:
return value_stack[-1]
class _Parser(object):
def __init__(self, parse_table, callbacks, debug=False):
self.parse_table = parse_table
self.callbacks = callbacks
self.debug = debug
def parse(self, lexer, start, value_stack=None, state_stack=None):
parse_conf = ParseConf(self.parse_table, self.callbacks, start)
parser_state = ParserState(parse_conf, lexer, state_stack, value_stack)
return self.parse_from_state(parser_state)
def parse_from_state(self, state):
# Main LALR-parser loop
try:
token = None
for token in state.lexer.lex(state):
state.feed_token(token)
token = Token.new_borrow_pos('$END', '', token) if token else Token('$END', '', 0, 1, 1)
return state.feed_token(token, True)
except UnexpectedInput as e:
try:
e.puppet = ParserPuppet(self, state, state.lexer)
except NameError:
pass
raise e
except Exception as e:
if self.debug:
print("")
print("STATE STACK DUMP")
print("----------------")
for i, s in enumerate(state.state_stack):
print('%d)' % i , s)
print("")
raise
###}
|
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse, JsonResponse
from django.views.decorators.http import require_POST
from weblate.lang.models import Language
from weblate.trans.forms import ReportsForm
from weblate.trans.models.change import Change
from weblate.trans.util import redirect_param
from weblate.utils.views import get_component, get_project, show_form_errors
# Header, two longer fields for name and email, shorter fields for numbers
RST_HEADING = " ".join(["=" * 40] * 2 + ["=" * 24] * 20)
HTML_HEADING = "<table>\n<tr>{0}</tr>"
def generate_credits(user, start_date, end_date, **kwargs):
"""Generate credits data for given component."""
result = []
base = Change.objects.content()
if user:
base = base.filter(author=user)
for language in Language.objects.filter(**kwargs).distinct().iterator():
authors = base.filter(language=language, **kwargs).authors_list(
(start_date, end_date)
)
if not authors:
continue
result.append({language.name: sorted(authors, key=lambda item: item[2])})
return result
@login_required
@require_POST
def get_credits(request, project=None, component=None):
"""View for credits."""
if project is None:
obj = None
kwargs = {"translation__isnull": False}
elif component is None:
obj = get_project(request, project)
kwargs = {"translation__component__project": obj}
else:
obj = get_component(request, project, component)
kwargs = {"translation__component": obj}
form = ReportsForm(request.POST)
if not form.is_valid():
show_form_errors(request, form)
return redirect_param(obj or "home", "#reports")
data = generate_credits(
None if request.user.has_perm("reports.view", obj) else request.user,
form.cleaned_data["start_date"],
form.cleaned_data["end_date"],
**kwargs,
)
if form.cleaned_data["style"] == "json":
return JsonResponse(data=data, safe=False)
if form.cleaned_data["style"] == "html":
start = "<table>"
row_start = "<tr>"
language_format = "<th>{0}</th>"
translator_start = "<td><ul>"
translator_format = '<li><a href="mailto:{0}">{1}</a> ({2})</li>'
translator_end = "</ul></td>"
row_end = "</tr>"
mime = "text/html"
end = "</table>"
else:
start = ""
row_start = ""
language_format = "* {0}\n"
translator_start = ""
translator_format = " * {1} <{0}> ({2})"
translator_end = ""
row_end = ""
mime = "text/plain"
end = ""
result = []
result.append(start)
for language in data:
name, translators = language.popitem()
result.append(row_start)
result.append(language_format.format(name))
result.append(
translator_start
+ "\n".join(translator_format.format(*t) for t in translators)
+ translator_end
)
result.append(row_end)
result.append(end)
return HttpResponse("\n".join(result), content_type=f"{mime}; charset=utf-8")
COUNT_DEFAULTS = {
field: 0
for field in (
"t_chars",
"t_words",
"chars",
"words",
"edits",
"count",
"t_chars_new",
"t_words_new",
"chars_new",
"words_new",
"edits_new",
"count_new",
"t_chars_approve",
"t_words_approve",
"chars_approve",
"words_approve",
"edits_approve",
"count_approve",
"t_chars_edit",
"t_words_edit",
"chars_edit",
"words_edit",
"edits_edit",
"count_edit",
)
}
def generate_counts(user, start_date, end_date, **kwargs):
"""Generate credits data for given component."""
result = {}
action_map = {Change.ACTION_NEW: "new", Change.ACTION_APPROVE: "approve"}
base = Change.objects.content().filter(unit__isnull=False)
if user:
base = base.filter(author=user)
else:
base = base.filter(author__isnull=False)
changes = base.filter(
timestamp__range=(start_date, end_date), **kwargs
).prefetch_related("author", "unit")
for change in changes:
email = change.author.email
if email not in result:
result[email] = current = {"name": change.author.full_name, "email": email}
current.update(COUNT_DEFAULTS)
else:
current = result[email]
src_chars = len(change.unit.source)
src_words = change.unit.num_words
tgt_chars = len(change.target)
tgt_words = len(change.target.split())
edits = change.get_distance()
current["chars"] += src_chars
current["words"] += src_words
current["t_chars"] += tgt_chars
current["t_words"] += tgt_words
current["edits"] += edits
current["count"] += 1
suffix = action_map.get(change.action, "edit")
current["t_chars_" + suffix] += tgt_chars
current["t_words_" + suffix] += tgt_words
current["chars_" + suffix] += src_chars
current["words_" + suffix] += src_words
current["edits_" + suffix] += edits
current["count_" + suffix] += 1
return list(result.values())
@login_required
@require_POST
def get_counts(request, project=None, component=None):
"""View for work counts."""
if project is None:
obj = None
kwargs = {}
elif component is None:
obj = get_project(request, project)
kwargs = {"project": obj}
else:
obj = get_component(request, project, component)
kwargs = {"component": obj}
form = ReportsForm(request.POST)
if not form.is_valid():
show_form_errors(request, form)
return redirect_param(obj or "home", "#reports")
data = generate_counts(
None if request.user.has_perm("reports.view", obj) else request.user,
form.cleaned_data["start_date"],
form.cleaned_data["end_date"],
**kwargs,
)
if form.cleaned_data["style"] == "json":
return JsonResponse(data=data, safe=False)
headers = (
"Name",
"Email",
"Count total",
"Edits total",
"Source words total",
"Source chars total",
"Target words total",
"Target chars total",
"Count new",
"Edits new",
"Source words new",
"Source chars new",
"Target words new",
"Target chars new",
"Count approved",
"Edits approved",
"Source words approved",
"Source chars approved",
"Target words approved",
"Target chars approved",
"Count edited",
"Edits edited",
"Source words edited",
"Source chars edited",
"Target words edited",
"Target chars edited",
)
if form.cleaned_data["style"] == "html":
start = HTML_HEADING.format("".join(f"<th>{h}</th>" for h in headers))
row_start = "<tr>"
cell_name = cell_count = "<td>{0}</td>\n"
row_end = "</tr>"
mime = "text/html"
end = "</table>"
else:
start = "{0}\n{1} {2}\n{0}".format(
RST_HEADING,
" ".join(f"{h:40}" for h in headers[:2]),
" ".join(f"{h:24}" for h in headers[2:]),
)
row_start = ""
cell_name = "{0:40} "
cell_count = "{0:24} "
row_end = ""
mime = "text/plain"
end = RST_HEADING
result = []
result.append(start)
for item in data:
if row_start:
result.append(row_start)
result.append(
"".join(
(
cell_name.format(item["name"] or "Anonymous"),
cell_name.format(item["email"] or ""),
cell_count.format(item["count"]),
cell_count.format(item["edits"]),
cell_count.format(item["words"]),
cell_count.format(item["chars"]),
cell_count.format(item["t_words"]),
cell_count.format(item["t_chars"]),
cell_count.format(item["count_new"]),
cell_count.format(item["edits_new"]),
cell_count.format(item["words_new"]),
cell_count.format(item["chars_new"]),
cell_count.format(item["t_words_new"]),
cell_count.format(item["t_chars_new"]),
cell_count.format(item["count_approve"]),
cell_count.format(item["edits_approve"]),
cell_count.format(item["words_approve"]),
cell_count.format(item["chars_approve"]),
cell_count.format(item["t_words_approve"]),
cell_count.format(item["t_chars_approve"]),
cell_count.format(item["count_edit"]),
cell_count.format(item["edits_edit"]),
cell_count.format(item["words_edit"]),
cell_count.format(item["chars_edit"]),
cell_count.format(item["t_words_edit"]),
cell_count.format(item["t_chars_edit"]),
)
)
)
if row_end:
result.append(row_end)
result.append(end)
return HttpResponse("\n".join(result), content_type=f"{mime}; charset=utf-8")
|
import asyncio
from datetime import timedelta
import logging
from typing import Any, Dict
from sonarr import Sonarr, SonarrAccessRestricted, SonarrError
from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntry
from homeassistant.const import (
ATTR_NAME,
CONF_API_KEY,
CONF_HOST,
CONF_PORT,
CONF_SOURCE,
CONF_SSL,
CONF_VERIFY_SSL,
)
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.typing import HomeAssistantType
from .const import (
ATTR_IDENTIFIERS,
ATTR_MANUFACTURER,
ATTR_SOFTWARE_VERSION,
CONF_BASE_PATH,
CONF_UPCOMING_DAYS,
CONF_WANTED_MAX_ITEMS,
DATA_SONARR,
DATA_UNDO_UPDATE_LISTENER,
DEFAULT_UPCOMING_DAYS,
DEFAULT_WANTED_MAX_ITEMS,
DOMAIN,
)
PLATFORMS = ["sensor"]
SCAN_INTERVAL = timedelta(seconds=30)
_LOGGER = logging.getLogger(__name__)
async def async_setup(hass: HomeAssistantType, config: Dict) -> bool:
"""Set up the Sonarr component."""
hass.data.setdefault(DOMAIN, {})
return True
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool:
"""Set up Sonarr from a config entry."""
if not entry.options:
options = {
CONF_UPCOMING_DAYS: entry.data.get(
CONF_UPCOMING_DAYS, DEFAULT_UPCOMING_DAYS
),
CONF_WANTED_MAX_ITEMS: entry.data.get(
CONF_WANTED_MAX_ITEMS, DEFAULT_WANTED_MAX_ITEMS
),
}
hass.config_entries.async_update_entry(entry, options=options)
sonarr = Sonarr(
host=entry.data[CONF_HOST],
port=entry.data[CONF_PORT],
api_key=entry.data[CONF_API_KEY],
base_path=entry.data[CONF_BASE_PATH],
session=async_get_clientsession(hass),
tls=entry.data[CONF_SSL],
verify_ssl=entry.data[CONF_VERIFY_SSL],
)
try:
await sonarr.update()
except SonarrAccessRestricted:
_async_start_reauth(hass, entry)
return False
except SonarrError as err:
raise ConfigEntryNotReady from err
undo_listener = entry.add_update_listener(_async_update_listener)
hass.data[DOMAIN][entry.entry_id] = {
DATA_SONARR: sonarr,
DATA_UNDO_UPDATE_LISTENER: undo_listener,
}
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
return True
async def async_unload_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
hass.data[DOMAIN][entry.entry_id][DATA_UNDO_UPDATE_LISTENER]()
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
def _async_start_reauth(hass: HomeAssistantType, entry: ConfigEntry):
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={CONF_SOURCE: SOURCE_REAUTH},
data={"config_entry_id": entry.entry_id, **entry.data},
)
)
_LOGGER.error("API Key is no longer valid. Please reauthenticate")
async def _async_update_listener(hass: HomeAssistantType, entry: ConfigEntry) -> None:
"""Handle options update."""
async_dispatcher_send(
hass, f"sonarr.{entry.entry_id}.entry_options_update", entry.options
)
class SonarrEntity(Entity):
"""Defines a base Sonarr entity."""
def __init__(
self,
*,
sonarr: Sonarr,
entry_id: str,
device_id: str,
name: str,
icon: str,
enabled_default: bool = True,
) -> None:
"""Initialize the Sonar entity."""
self._entry_id = entry_id
self._device_id = device_id
self._enabled_default = enabled_default
self._icon = icon
self._name = name
self.sonarr = sonarr
@property
def name(self) -> str:
"""Return the name of the entity."""
return self._name
@property
def icon(self) -> str:
"""Return the mdi icon of the entity."""
return self._icon
@property
def entity_registry_enabled_default(self) -> bool:
"""Return if the entity should be enabled when first added to the entity registry."""
return self._enabled_default
@property
def device_info(self) -> Dict[str, Any]:
"""Return device information about the application."""
if self._device_id is None:
return None
return {
ATTR_IDENTIFIERS: {(DOMAIN, self._device_id)},
ATTR_NAME: "Activity Sensor",
ATTR_MANUFACTURER: "Sonarr",
ATTR_SOFTWARE_VERSION: self.sonarr.app.info.version,
"entry_type": "service",
}
|
import logging
import select
import socket
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PAYLOAD,
CONF_PORT,
CONF_TIMEOUT,
CONF_UNIT_OF_MEASUREMENT,
CONF_VALUE_TEMPLATE,
)
from homeassistant.exceptions import TemplateError
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
CONF_BUFFER_SIZE = "buffer_size"
CONF_VALUE_ON = "value_on"
DEFAULT_BUFFER_SIZE = 1024
DEFAULT_NAME = "TCP Sensor"
DEFAULT_TIMEOUT = 10
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PORT): cv.port,
vol.Required(CONF_PAYLOAD): cv.string,
vol.Optional(CONF_BUFFER_SIZE, default=DEFAULT_BUFFER_SIZE): cv.positive_int,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
vol.Optional(CONF_VALUE_ON): cv.string,
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the TCP Sensor."""
add_entities([TcpSensor(hass, config)])
class TcpSensor(Entity):
"""Implementation of a TCP socket based sensor."""
required = ()
def __init__(self, hass, config):
"""Set all the config values if they exist and get initial state."""
value_template = config.get(CONF_VALUE_TEMPLATE)
if value_template is not None:
value_template.hass = hass
self._hass = hass
self._config = {
CONF_NAME: config.get(CONF_NAME),
CONF_HOST: config.get(CONF_HOST),
CONF_PORT: config.get(CONF_PORT),
CONF_TIMEOUT: config.get(CONF_TIMEOUT),
CONF_PAYLOAD: config.get(CONF_PAYLOAD),
CONF_UNIT_OF_MEASUREMENT: config.get(CONF_UNIT_OF_MEASUREMENT),
CONF_VALUE_TEMPLATE: value_template,
CONF_VALUE_ON: config.get(CONF_VALUE_ON),
CONF_BUFFER_SIZE: config.get(CONF_BUFFER_SIZE),
}
self._state = None
self.update()
@property
def name(self):
"""Return the name of this sensor."""
name = self._config[CONF_NAME]
if name is not None:
return name
return super().name
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity."""
return self._config[CONF_UNIT_OF_MEASUREMENT]
def update(self):
"""Get the latest value for this sensor."""
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
sock.settimeout(self._config[CONF_TIMEOUT])
try:
sock.connect((self._config[CONF_HOST], self._config[CONF_PORT]))
except OSError as err:
_LOGGER.error(
"Unable to connect to %s on port %s: %s",
self._config[CONF_HOST],
self._config[CONF_PORT],
err,
)
return
try:
sock.send(self._config[CONF_PAYLOAD].encode())
except OSError as err:
_LOGGER.error(
"Unable to send payload %r to %s on port %s: %s",
self._config[CONF_PAYLOAD],
self._config[CONF_HOST],
self._config[CONF_PORT],
err,
)
return
readable, _, _ = select.select([sock], [], [], self._config[CONF_TIMEOUT])
if not readable:
_LOGGER.warning(
"Timeout (%s second(s)) waiting for a response after "
"sending %r to %s on port %s",
self._config[CONF_TIMEOUT],
self._config[CONF_PAYLOAD],
self._config[CONF_HOST],
self._config[CONF_PORT],
)
return
value = sock.recv(self._config[CONF_BUFFER_SIZE]).decode()
if self._config[CONF_VALUE_TEMPLATE] is not None:
try:
self._state = self._config[CONF_VALUE_TEMPLATE].render(
parse_result=False, value=value
)
return
except TemplateError:
_LOGGER.error(
"Unable to render template of %r with value: %r",
self._config[CONF_VALUE_TEMPLATE],
value,
)
return
self._state = value
|
from ipaddress import ip_address
import os
from aiohttp import web
from aiohttp.web_exceptions import HTTPUnauthorized
from aiohttp.web_middlewares import middleware
import pytest
import homeassistant.components.http as http
from homeassistant.components.http import KEY_AUTHENTICATED
from homeassistant.components.http.ban import (
IP_BANS_FILE,
KEY_BANNED_IPS,
KEY_FAILED_LOGIN_ATTEMPTS,
IpBan,
setup_bans,
)
from homeassistant.components.http.view import request_handler_factory
from homeassistant.const import HTTP_FORBIDDEN
from homeassistant.setup import async_setup_component
from . import mock_real_ip
from tests.async_mock import Mock, mock_open, patch
from tests.common import async_mock_service
SUPERVISOR_IP = "1.2.3.4"
BANNED_IPS = ["200.201.202.203", "100.64.0.2"]
BANNED_IPS_WITH_SUPERVISOR = BANNED_IPS + [SUPERVISOR_IP]
@pytest.fixture(name="hassio_env")
def hassio_env_fixture():
"""Fixture to inject hassio env."""
with patch.dict(os.environ, {"HASSIO": "127.0.0.1"}), patch(
"homeassistant.components.hassio.HassIO.is_connected",
return_value={"result": "ok", "data": {}},
), patch.dict(os.environ, {"HASSIO_TOKEN": "123456"}):
yield
@pytest.fixture(autouse=True)
def gethostbyaddr_mock():
"""Fixture to mock out I/O on getting host by address."""
with patch(
"homeassistant.components.http.ban.gethostbyaddr",
return_value=("example.com", ["0.0.0.0.in-addr.arpa"], ["0.0.0.0"]),
):
yield
async def test_access_from_banned_ip(hass, aiohttp_client):
"""Test accessing to server from banned IP. Both trusted and not."""
app = web.Application()
app["hass"] = hass
setup_bans(hass, app, 5)
set_real_ip = mock_real_ip(app)
with patch(
"homeassistant.components.http.ban.async_load_ip_bans_config",
return_value=[IpBan(banned_ip) for banned_ip in BANNED_IPS],
):
client = await aiohttp_client(app)
for remote_addr in BANNED_IPS:
set_real_ip(remote_addr)
resp = await client.get("/")
assert resp.status == HTTP_FORBIDDEN
@pytest.mark.parametrize(
"remote_addr, bans, status",
list(
zip(
BANNED_IPS_WITH_SUPERVISOR, [1, 1, 0], [HTTP_FORBIDDEN, HTTP_FORBIDDEN, 401]
)
),
)
async def test_access_from_supervisor_ip(
remote_addr, bans, status, hass, aiohttp_client, hassio_env
):
"""Test accessing to server from supervisor IP."""
app = web.Application()
app["hass"] = hass
async def unauth_handler(request):
"""Return a mock web response."""
raise HTTPUnauthorized
app.router.add_get("/", unauth_handler)
setup_bans(hass, app, 1)
mock_real_ip(app)(remote_addr)
with patch(
"homeassistant.components.http.ban.async_load_ip_bans_config", return_value=[]
):
client = await aiohttp_client(app)
assert await async_setup_component(hass, "hassio", {"hassio": {}})
m_open = mock_open()
with patch.dict(os.environ, {"SUPERVISOR": SUPERVISOR_IP}), patch(
"homeassistant.components.http.ban.open", m_open, create=True
):
resp = await client.get("/")
assert resp.status == 401
assert len(app[KEY_BANNED_IPS]) == bans
assert m_open.call_count == bans
# second request should be forbidden if banned
resp = await client.get("/")
assert resp.status == status
assert len(app[KEY_BANNED_IPS]) == bans
async def test_ban_middleware_not_loaded_by_config(hass):
"""Test accessing to server from banned IP when feature is off."""
with patch("homeassistant.components.http.setup_bans") as mock_setup:
await async_setup_component(
hass, "http", {"http": {http.CONF_IP_BAN_ENABLED: False}}
)
assert len(mock_setup.mock_calls) == 0
async def test_ban_middleware_loaded_by_default(hass):
"""Test accessing to server from banned IP when feature is off."""
with patch("homeassistant.components.http.setup_bans") as mock_setup:
await async_setup_component(hass, "http", {"http": {}})
assert len(mock_setup.mock_calls) == 1
async def test_ip_bans_file_creation(hass, aiohttp_client):
"""Testing if banned IP file created."""
notification_calls = async_mock_service(hass, "persistent_notification", "create")
app = web.Application()
app["hass"] = hass
async def unauth_handler(request):
"""Return a mock web response."""
raise HTTPUnauthorized
app.router.add_get("/", unauth_handler)
setup_bans(hass, app, 2)
mock_real_ip(app)("200.201.202.204")
with patch(
"homeassistant.components.http.ban.async_load_ip_bans_config",
return_value=[IpBan(banned_ip) for banned_ip in BANNED_IPS],
):
client = await aiohttp_client(app)
m_open = mock_open()
with patch("homeassistant.components.http.ban.open", m_open, create=True):
resp = await client.get("/")
assert resp.status == 401
assert len(app[KEY_BANNED_IPS]) == len(BANNED_IPS)
assert m_open.call_count == 0
resp = await client.get("/")
assert resp.status == 401
assert len(app[KEY_BANNED_IPS]) == len(BANNED_IPS) + 1
m_open.assert_called_once_with(hass.config.path(IP_BANS_FILE), "a")
resp = await client.get("/")
assert resp.status == HTTP_FORBIDDEN
assert m_open.call_count == 1
assert len(notification_calls) == 3
assert (
"Login attempt or request with invalid authentication from example.com (200.201.202.204) (Python"
in notification_calls[0].data["message"]
)
async def test_failed_login_attempts_counter(hass, aiohttp_client):
"""Testing if failed login attempts counter increased."""
app = web.Application()
app["hass"] = hass
async def auth_handler(request):
"""Return 200 status code."""
return None, 200
app.router.add_get(
"/auth_true", request_handler_factory(Mock(requires_auth=True), auth_handler)
)
app.router.add_get(
"/auth_false", request_handler_factory(Mock(requires_auth=True), auth_handler)
)
app.router.add_get(
"/", request_handler_factory(Mock(requires_auth=False), auth_handler)
)
setup_bans(hass, app, 5)
remote_ip = ip_address("200.201.202.204")
mock_real_ip(app)("200.201.202.204")
@middleware
async def mock_auth(request, handler):
"""Mock auth middleware."""
if "auth_true" in request.path:
request[KEY_AUTHENTICATED] = True
else:
request[KEY_AUTHENTICATED] = False
return await handler(request)
app.middlewares.append(mock_auth)
client = await aiohttp_client(app)
resp = await client.get("/auth_false")
assert resp.status == 401
assert app[KEY_FAILED_LOGIN_ATTEMPTS][remote_ip] == 1
resp = await client.get("/auth_false")
assert resp.status == 401
assert app[KEY_FAILED_LOGIN_ATTEMPTS][remote_ip] == 2
resp = await client.get("/")
assert resp.status == 200
assert app[KEY_FAILED_LOGIN_ATTEMPTS][remote_ip] == 2
# This used to check that with trusted networks we reset login attempts
# We no longer support trusted networks.
resp = await client.get("/auth_true")
assert resp.status == 200
assert app[KEY_FAILED_LOGIN_ATTEMPTS][remote_ip] == 2
|
import numpy as np
import pandas as pd
import pytest
from mock import patch, Mock, sentinel
from numpy.testing import assert_array_equal
from pandas import Timestamp
import arctic.serialization.numpy_records as anr
class FastCheckSerializable(object):
def __init__(self, enabled):
self.enabled = bool(enabled)
self.orig_setting = None
def __enter__(self):
self.orig_setting = anr.FAST_CHECK_DF_SERIALIZABLE
anr.set_fast_check_df_serializable(self.enabled)
def __exit__(self, *args):
anr.set_fast_check_df_serializable(self.orig_setting)
def test_to_primitive_timestamps():
arr = anr._to_primitive(np.array([Timestamp('2010-11-12 00:00:00')]))
assert_array_equal(arr, np.array([Timestamp('2010-11-12 00:00:00').value], dtype='datetime64[ns]'))
def test_to_primitive_fixed_length_strings():
mydf = pd.DataFrame({'a': ['abc', u'xyz', '']})
primitives_arr = anr._to_primitive(np.array(mydf.a.values), string_max_len=32)
assert_array_equal(primitives_arr, np.array([u'abc', u'xyz', u''], dtype='U32'))
assert primitives_arr.dtype == np.dtype('U32')
@pytest.mark.parametrize("fast_serializable_check", (True, False))
def test_can_convert_to_records_without_objects_returns_false_on_exception_in_to_records(fast_serializable_check):
with FastCheckSerializable(fast_serializable_check):
store = anr.PandasSerializer()
mymock = Mock(side_effect=TypeError('uhoh'))
if fast_serializable_check:
store.fast_check_serializable = mymock
else:
store._to_records = mymock
with patch('arctic.serialization.numpy_records.log') as mock_log:
assert store.can_convert_to_records_without_objects(sentinel.df, 'my_symbol') is False
assert 'Pandas dataframe my_symbol caused exception' in str(mock_log.warning.call_args)
if fast_serializable_check:
store.fast_check_serializable.assert_called_once_with(sentinel.df)
else:
store._to_records.assert_called_once_with(sentinel.df)
@pytest.mark.parametrize("fast_serializable_check", (True, False))
def test_can_convert_to_records_without_objects_returns_false_when_records_have_object_dtype(fast_serializable_check):
with FastCheckSerializable(fast_serializable_check):
store = anr.PandasSerializer()
mymock = Mock(return_value=(np.array(['a', 'b', None, 'd']), None))
if fast_serializable_check:
store.fast_check_serializable = mymock
else:
store._to_records = mymock
with patch('arctic.serialization.numpy_records.log') as mock_log:
assert store.can_convert_to_records_without_objects(sentinel.df, 'my_symbol') is False
mock_log.warning.assert_called_once_with('Pandas dataframe my_symbol contains Objects, saving as Blob')
if fast_serializable_check:
store.fast_check_serializable.assert_called_once_with(sentinel.df)
else:
store._to_records.assert_called_once_with(sentinel.df)
@pytest.mark.parametrize("fast_serializable_check", (True, False))
def test_can_convert_to_records_without_objects_returns_false_when_records_have_arrays_in_them(fast_serializable_check):
with FastCheckSerializable(fast_serializable_check):
store = anr.PandasSerializer()
mymock = Mock(return_value=(np.rec.array([(1356998400000000000, ['A', 'BC'])], dtype=[('index', '<M8[ns]'), ('values', 'S2', (2,))]), None))
if fast_serializable_check:
store.fast_check_serializable = mymock
else:
store._to_records = mymock
with patch('arctic.serialization.numpy_records.log') as mock_log:
assert store.can_convert_to_records_without_objects(sentinel.df, 'my_symbol') is False
mock_log.warning.assert_called_once_with('Pandas dataframe my_symbol contains >1 dimensional arrays, saving as Blob')
if fast_serializable_check:
store.fast_check_serializable.assert_called_once_with(sentinel.df)
else:
store._to_records.assert_called_once_with(sentinel.df)
@pytest.mark.parametrize("fast_serializable_check", (True, False))
def test_can_convert_to_records_without_objects_returns_true_otherwise(fast_serializable_check):
with FastCheckSerializable(fast_serializable_check):
store = anr.PandasSerializer()
mymock = Mock(return_value=(np.rec.array([(1356998400000000000, 'a')], dtype=[('index', '<M8[ns]'), ('values', 'S2')]), None))
if fast_serializable_check:
store.fast_check_serializable = mymock
else:
store._to_records = mymock
with patch('arctic.serialization.numpy_records.log') as mock_log:
assert store.can_convert_to_records_without_objects(sentinel.df, 'my_symbol') is True
assert mock_log.warning.call_count == 0
if fast_serializable_check:
store.fast_check_serializable.assert_called_once_with(sentinel.df)
else:
store._to_records.assert_called_once_with(sentinel.df)
@pytest.mark.parametrize("fast_serializable_check", (False, True))
def test_can_convert_to_records_mixed_object_column_string_nan(fast_serializable_check):
with FastCheckSerializable(fast_serializable_check):
serializer = anr.DataFrameSerializer()
df = pd.DataFrame({'a': [1, 3, 4], 'b': [1.2, 8.0, 0.2]})
assert serializer.can_convert_to_records_without_objects(df, 'my_symbol')
df = pd.DataFrame({'a': [1, 3, 4], 'b': [1, 8.0, 2]})
assert serializer.can_convert_to_records_without_objects(df, 'my_symbol')
df = pd.DataFrame({'a': [1, 3, 4], 'b': [1.2, 8.0, np.NaN]})
assert serializer.can_convert_to_records_without_objects(df, 'my_symbol')
df = pd.DataFrame({'a': ['abc', 'cde', 'def'], 'b': [1.2, 8.0, np.NaN]})
assert serializer.can_convert_to_records_without_objects(df, 'my_symbol')
df = pd.DataFrame({'a': [u'abc', u'cde', 'def'], 'b': [1.2, 8.0, np.NaN]})
assert serializer.can_convert_to_records_without_objects(df, 'my_symbol')
df = pd.DataFrame({'a': [u'abc', u'cde', 'def'], 'b': [1.2, '8.0', np.NaN]})
assert not serializer.can_convert_to_records_without_objects(df, 'my_symbol')
# Do not serialize and force-stringify None
df = pd.DataFrame({'a': ['abc', None, 'def'], 'b': [1.2, 8.0, np.NaN]})
assert not serializer.can_convert_to_records_without_objects(df, 'my_symbol')
# Do not serialize and force-stringify np.NaN among strings, rather pickle
df = pd.DataFrame({'a': ['abc', np.NaN, 'def'], 'b': [1.2, 8.0, np.NaN]})
assert not serializer.can_convert_to_records_without_objects(df, 'my_symbol')
|
from datetime import timedelta
import logging
import requests
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_ATTRIBUTION,
CONF_API_KEY,
CONF_BASE,
CONF_NAME,
CONF_QUOTE,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
_RESOURCE = "http://apilayer.net/api/live"
ATTRIBUTION = "Data provided by currencylayer.com"
DEFAULT_BASE = "USD"
DEFAULT_NAME = "CurrencyLayer Sensor"
ICON = "mdi:currency"
SCAN_INTERVAL = timedelta(hours=4)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_QUOTE): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_BASE, default=DEFAULT_BASE): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Currencylayer sensor."""
base = config[CONF_BASE]
api_key = config[CONF_API_KEY]
parameters = {"source": base, "access_key": api_key, "format": 1}
rest = CurrencylayerData(_RESOURCE, parameters)
response = requests.get(_RESOURCE, params=parameters, timeout=10)
sensors = []
for variable in config[CONF_QUOTE]:
sensors.append(CurrencylayerSensor(rest, base, variable))
if "error" in response.json():
return False
add_entities(sensors, True)
class CurrencylayerSensor(Entity):
"""Implementing the Currencylayer sensor."""
def __init__(self, rest, base, quote):
"""Initialize the sensor."""
self.rest = rest
self._quote = quote
self._base = base
self._state = None
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._quote
@property
def name(self):
"""Return the name of the sensor."""
return self._base
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return ICON
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def device_state_attributes(self):
"""Return the state attributes of the sensor."""
return {ATTR_ATTRIBUTION: ATTRIBUTION}
def update(self):
"""Update current date."""
self.rest.update()
value = self.rest.data
if value is not None:
self._state = round(value[f"{self._base}{self._quote}"], 4)
class CurrencylayerData:
"""Get data from Currencylayer.org."""
def __init__(self, resource, parameters):
"""Initialize the data object."""
self._resource = resource
self._parameters = parameters
self.data = None
def update(self):
"""Get the latest data from Currencylayer."""
try:
result = requests.get(self._resource, params=self._parameters, timeout=10)
if "error" in result.json():
raise ValueError(result.json()["error"]["info"])
self.data = result.json()["quotes"]
_LOGGER.debug("Currencylayer data updated: %s", result.json()["timestamp"])
except ValueError as err:
_LOGGER.error("Check Currencylayer API %s", err.args)
self.data = None
|
import chainer
import chainer.functions as F
from chainer import initializers
import chainer.links as L
class Multibox(chainer.Chain):
"""Multibox head of Single Shot Multibox Detector.
This is a head part of Single Shot Multibox Detector [#]_.
This link computes :obj:`mb_locs` and :obj:`mb_confs` from feature maps.
:obj:`mb_locs` contains information of the coordinates of bounding boxes
and :obj:`mb_confs` contains confidence scores of each classes.
.. [#] Wei Liu, Dragomir Anguelov, Dumitru Erhan,
Christian Szegedy, Scott Reed, Cheng-Yang Fu, Alexander C. Berg.
SSD: Single Shot MultiBox Detector. ECCV 2016.
Args:
n_class (int): The number of classes possibly including the background.
aspect_ratios (iterable of tuple or int): The aspect ratios of
default bounding boxes for each feature map.
initialW: An initializer used in
:meth:`chainer.links.Convolution2d.__init__`.
The default value is :class:`chainer.initializers.LeCunUniform`.
initial_bias: An initializer used in
:meth:`chainer.links.Convolution2d.__init__`.
The default value is :class:`chainer.initializers.Zero`.
"""
def __init__(
self, n_class, aspect_ratios,
initialW=None, initial_bias=None):
self.n_class = n_class
self.aspect_ratios = aspect_ratios
super(Multibox, self).__init__()
with self.init_scope():
self.loc = chainer.ChainList()
self.conf = chainer.ChainList()
if initialW is None:
initialW = initializers.LeCunUniform()
if initial_bias is None:
initial_bias = initializers.Zero()
init = {'initialW': initialW, 'initial_bias': initial_bias}
for ar in aspect_ratios:
n = (len(ar) + 1) * 2
self.loc.add_link(L.Convolution2D(n * 4, 3, pad=1, **init))
self.conf.add_link(L.Convolution2D(
n * self.n_class, 3, pad=1, **init))
def forward(self, xs):
"""Compute loc and conf from feature maps
This method computes :obj:`mb_locs` and :obj:`mb_confs`
from given feature maps.
Args:
xs (iterable of chainer.Variable): An iterable of feature maps.
The number of feature maps must be same as the number of
:obj:`aspect_ratios`.
Returns:
tuple of chainer.Variable:
This method returns two :obj:`chainer.Variable`: :obj:`mb_locs` and
:obj:`mb_confs`.
* **mb_locs**: A variable of float arrays of shape \
:math:`(B, K, 4)`, \
where :math:`B` is the number of samples in the batch and \
:math:`K` is the number of default bounding boxes.
* **mb_confs**: A variable of float arrays of shape \
:math:`(B, K, n\_fg\_class + 1)`.
"""
mb_locs = []
mb_confs = []
for i, x in enumerate(xs):
mb_loc = self.loc[i](x)
mb_loc = F.transpose(mb_loc, (0, 2, 3, 1))
mb_loc = F.reshape(mb_loc, (mb_loc.shape[0], -1, 4))
mb_locs.append(mb_loc)
mb_conf = self.conf[i](x)
mb_conf = F.transpose(mb_conf, (0, 2, 3, 1))
mb_conf = F.reshape(
mb_conf, (mb_conf.shape[0], -1, self.n_class))
mb_confs.append(mb_conf)
mb_locs = F.concat(mb_locs, axis=1)
mb_confs = F.concat(mb_confs, axis=1)
return mb_locs, mb_confs
|
from lark import Lark, UnexpectedInput
from _json_parser import json_grammar # Using the grammar from the json_parser example
json_parser = Lark(json_grammar)
class JsonSyntaxError(SyntaxError):
def __str__(self):
context, line, column = self.args
return '%s at line %s, column %s.\n\n%s' % (self.label, line, column, context)
class JsonMissingValue(JsonSyntaxError):
label = 'Missing Value'
class JsonMissingOpening(JsonSyntaxError):
label = 'Missing Opening'
class JsonMissingClosing(JsonSyntaxError):
label = 'Missing Closing'
class JsonMissingComma(JsonSyntaxError):
label = 'Missing Comma'
class JsonTrailingComma(JsonSyntaxError):
label = 'Trailing Comma'
def parse(json_text):
try:
j = json_parser.parse(json_text)
except UnexpectedInput as u:
exc_class = u.match_examples(json_parser.parse, {
JsonMissingOpening: ['{"foo": ]}',
'{"foor": }}',
'{"foo": }'],
JsonMissingClosing: ['{"foo": [}',
'{',
'{"a": 1',
'[1'],
JsonMissingComma: ['[1 2]',
'[false 1]',
'["b" 1]',
'{"a":true 1:4}',
'{"a":1 1:4}',
'{"a":"b" 1:4}'],
JsonTrailingComma: ['[,]',
'[1,]',
'[1,2,]',
'{"foo":1,}',
'{"foo":false,"bar":true,}']
}, use_accepts=True)
if not exc_class:
raise
raise exc_class(u.get_context(json_text), u.line, u.column)
def test():
try:
parse('{"example1": "value"')
except JsonMissingClosing as e:
print(e)
try:
parse('{"example2": ] ')
except JsonMissingOpening as e:
print(e)
if __name__ == '__main__':
test()
|
import os
import sys
import unittest
import uuid
import random
from nacl import utils, public
from autobahn import util
@unittest.skipIf(not ('AUTOBAHN_CI_ENABLE_RNG_DEPLETION_TESTS' in os.environ and os.environ['AUTOBAHN_CI_ENABLE_RNG_DEPLETION_TESTS']), 'entropy depletion tests not enabled (env var AUTOBAHN_CI_ENABLE_RNG_DEPLETION_TESTS not set)')
@unittest.skipIf(not sys.platform.startswith('linux'), 'entropy depletion tests only available on Linux')
class TestEntropy(unittest.TestCase):
def test_non_depleting(self):
res = {}
with open('/dev/urandom', 'rb') as rng:
for i in range(1000):
for j in range(100):
# "reseed" (seems pointless, but ..)
random.seed()
# random UUIDs
v1 = uuid.uuid4() # noqa
# stdlib random
v2 = random.random() # noqa
v3 = random.getrandbits(32) # noqa
v4 = random.randint(0, 9007199254740992) # noqa
v5 = random.normalvariate(10, 100) # noqa
v6 = random.choice(range(100)) # noqa
# PyNaCl
v7 = utils.random(public.Box.NONCE_SIZE) # noqa
# Autobahn utils
v8 = util.generate_token(4, 4) # noqa
v9 = util.id() # noqa
v10 = util.rid() # noqa
v11 = util.newid() # noqa
# direct procfs access to PRNG
d = rng.read(1000) # noqa
# check available entropy
with open('/proc/sys/kernel/random/entropy_avail', 'r') as ent:
ea = int(ent.read()) // 100
if ea not in res:
res[ea] = 0
res[ea] += 1
skeys = sorted(res.keys())
print('\nsystem entropy depletion stats:')
for k in skeys:
print('{}: {}'.format(k, res[k]))
self.assertTrue(skeys[0] > 0)
def test_depleting(self):
res = {}
with open('/dev/random', 'rb') as rng:
for i in range(10000):
# direct procfs access to "real" RNG
d = rng.read(1000) # noqa
# check available entropy
with open('/proc/sys/kernel/random/entropy_avail', 'r') as ent:
ea = int(ent.read()) // 100
if ea not in res:
res[ea] = 0
res[ea] += 1
skeys = sorted(res.keys())
print('\nsystem entropy depletion stats:')
for k in skeys:
print('{}: {}'.format(k, res[k]))
self.assertTrue(skeys[0] == 0)
|
import subprocess
from distutils.version import LooseVersion
from unittest import SkipTest
from django.core.cache import cache
from django.test import TestCase
from django.test.utils import override_settings
import weblate.vcs.gpg
from weblate.utils.checks import check_data_writable
from weblate.utils.unittest import tempdir_setting
from weblate.vcs.gpg import (
generate_gpg_key,
get_gpg_key,
get_gpg_public_key,
get_gpg_sign_key,
)
class GPGTest(TestCase):
gpg_error = None
@classmethod
def setUpClass(cls):
"""Check whether we can use gpg."""
super().setUpClass()
try:
result = subprocess.run(
["gpg", "--version"],
check=True,
universal_newlines=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
version = result.stdout.splitlines()[0].strip().rsplit(None, 1)[-1]
if LooseVersion(version) < LooseVersion("2.1"):
cls.gpg_error = "gpg too old"
except (subprocess.CalledProcessError, OSError):
cls.gpg_error = "gpg not found"
def setUp(self):
if self.gpg_error:
raise SkipTest(self.gpg_error)
def check_errors(self):
self.assertEqual(weblate.vcs.gpg.GPG_ERRORS, {})
@tempdir_setting("DATA_DIR")
@override_settings(
WEBLATE_GPG_IDENTITY="Weblate <[email protected]>", WEBLATE_GPG_ALGO="rsa512"
)
def test_generate(self):
self.assertEqual(check_data_writable(), [])
self.assertIsNone(get_gpg_key(silent=True))
key = generate_gpg_key()
self.check_errors()
self.assertIsNotNone(key)
self.assertEqual(key, get_gpg_key())
@tempdir_setting("DATA_DIR")
@override_settings(
WEBLATE_GPG_IDENTITY="Weblate <[email protected]>", WEBLATE_GPG_ALGO="rsa512"
)
def test_get(self):
self.assertEqual(check_data_writable(), [])
# This will generate new key
key = get_gpg_sign_key()
self.check_errors()
self.assertIsNotNone(key)
# Check cache access
self.assertEqual(key, get_gpg_sign_key())
# Check empty cache
cache.delete("gpg-key-id")
self.assertEqual(key, get_gpg_sign_key())
@tempdir_setting("DATA_DIR")
@override_settings(
WEBLATE_GPG_IDENTITY="Weblate <[email protected]>", WEBLATE_GPG_ALGO="rsa512"
)
def test_public(self):
self.assertEqual(check_data_writable(), [])
# This will generate new key
key = get_gpg_public_key()
self.check_errors()
self.assertIsNotNone(key)
# Check cache access
self.assertEqual(key, get_gpg_public_key())
|
import logging
from typing import Any, Dict, Optional
from sonarr import Sonarr, SonarrAccessRestricted, SonarrError
import voluptuous as vol
from homeassistant.config_entries import CONN_CLASS_LOCAL_POLL, ConfigFlow, OptionsFlow
from homeassistant.const import (
CONF_API_KEY,
CONF_HOST,
CONF_PORT,
CONF_SSL,
CONF_VERIFY_SSL,
)
from homeassistant.core import callback
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
from .const import (
CONF_BASE_PATH,
CONF_UPCOMING_DAYS,
CONF_WANTED_MAX_ITEMS,
DEFAULT_BASE_PATH,
DEFAULT_PORT,
DEFAULT_SSL,
DEFAULT_UPCOMING_DAYS,
DEFAULT_VERIFY_SSL,
DEFAULT_WANTED_MAX_ITEMS,
)
from .const import DOMAIN # pylint: disable=unused-import
_LOGGER = logging.getLogger(__name__)
async def validate_input(hass: HomeAssistantType, data: dict) -> Dict[str, Any]:
"""Validate the user input allows us to connect.
Data has the keys from DATA_SCHEMA with values provided by the user.
"""
session = async_get_clientsession(hass)
sonarr = Sonarr(
host=data[CONF_HOST],
port=data[CONF_PORT],
api_key=data[CONF_API_KEY],
base_path=data[CONF_BASE_PATH],
tls=data[CONF_SSL],
verify_ssl=data[CONF_VERIFY_SSL],
session=session,
)
await sonarr.update()
return True
class SonarrConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Sonarr."""
VERSION = 1
CONNECTION_CLASS = CONN_CLASS_LOCAL_POLL
def __init__(self):
"""Initialize the flow."""
self._reauth = False
self._entry_id = None
self._entry_data = {}
@staticmethod
@callback
def async_get_options_flow(config_entry):
"""Get the options flow for this handler."""
return SonarrOptionsFlowHandler(config_entry)
async def async_step_reauth(
self, data: Optional[ConfigType] = None
) -> Dict[str, Any]:
"""Handle configuration by re-auth."""
self._reauth = True
self._entry_data = dict(data)
self._entry_id = self._entry_data.pop("config_entry_id")
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: Optional[ConfigType] = None
) -> Dict[str, Any]:
"""Confirm reauth dialog."""
if user_input is None:
return self.async_show_form(
step_id="reauth_confirm",
description_placeholders={"host": self._entry_data[CONF_HOST]},
data_schema=vol.Schema({}),
errors={},
)
return await self.async_step_user()
async def async_step_user(
self, user_input: Optional[ConfigType] = None
) -> Dict[str, Any]:
"""Handle a flow initiated by the user."""
errors = {}
if user_input is not None:
if self._reauth:
user_input = {**self._entry_data, **user_input}
if CONF_VERIFY_SSL not in user_input:
user_input[CONF_VERIFY_SSL] = DEFAULT_VERIFY_SSL
try:
await validate_input(self.hass, user_input)
except SonarrAccessRestricted:
errors = {"base": "invalid_auth"}
except SonarrError:
errors = {"base": "cannot_connect"}
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
return self.async_abort(reason="unknown")
else:
if self._reauth:
return await self._async_reauth_update_entry(
self._entry_id, user_input
)
return self.async_create_entry(
title=user_input[CONF_HOST], data=user_input
)
data_schema = self._get_user_data_schema()
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(data_schema),
errors=errors,
)
async def _async_reauth_update_entry(
self, entry_id: str, data: dict
) -> Dict[str, Any]:
"""Update existing config entry."""
entry = self.hass.config_entries.async_get_entry(entry_id)
self.hass.config_entries.async_update_entry(entry, data=data)
await self.hass.config_entries.async_reload(entry.entry_id)
return self.async_abort(reason="reauth_successful")
def _get_user_data_schema(self) -> Dict[str, Any]:
"""Get the data schema to display user form."""
if self._reauth:
return {vol.Required(CONF_API_KEY): str}
data_schema = {
vol.Required(CONF_HOST): str,
vol.Required(CONF_API_KEY): str,
vol.Optional(CONF_BASE_PATH, default=DEFAULT_BASE_PATH): str,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): int,
vol.Optional(CONF_SSL, default=DEFAULT_SSL): bool,
}
if self.show_advanced_options:
data_schema[
vol.Optional(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL)
] = bool
return data_schema
class SonarrOptionsFlowHandler(OptionsFlow):
"""Handle Sonarr client options."""
def __init__(self, config_entry):
"""Initialize options flow."""
self.config_entry = config_entry
async def async_step_init(self, user_input: Optional[ConfigType] = None):
"""Manage Sonarr options."""
if user_input is not None:
return self.async_create_entry(title="", data=user_input)
options = {
vol.Optional(
CONF_UPCOMING_DAYS,
default=self.config_entry.options.get(
CONF_UPCOMING_DAYS, DEFAULT_UPCOMING_DAYS
),
): int,
vol.Optional(
CONF_WANTED_MAX_ITEMS,
default=self.config_entry.options.get(
CONF_WANTED_MAX_ITEMS, DEFAULT_WANTED_MAX_ITEMS
),
): int,
}
return self.async_show_form(step_id="init", data_schema=vol.Schema(options))
|
import argparse
import pandas as pd
from scattertext import CorpusFromPandas, produce_scattertext_explorer, Common
from scattertext.WhitespaceNLP import whitespace_nlp_with_sentences
from scattertext.termranking import OncePerDocFrequencyRanker
def main():
parser = argparse.ArgumentParser(description="A primitive, incomplete commandline interface to Scattertext.")
parser.add_argument('--datafile', action='store', dest='datafile', required=True,
help="Path (or URL) of a CSV file with at least two columns."
"Text and category column names are indicated by the --text_column"
"and --category_column arguments. By default, they are 'text', and 'category'. "
"Optionally, a metadata "
"column (named in the --metadata argument) can be present. ")
parser.add_argument('--outputfile', action='store', dest='outputfile', default="-",
help="Path of HTML file on which to store visualization. Pass in - (default) for stdout.")
parser.add_argument('--text_column', action='store', dest='text_column', default="text",
help="Name of the text column.")
parser.add_argument('--category_column', action='store', dest='category_column', default="category",
help="Name of the category column.")
parser.add_argument('--metadata_column', action='store', dest='metadata_column', default=None,
help="Name of the category column.")
parser.add_argument('--positive_category', action='store', required=True,
dest='positive_category',
help="Postive category. A value in category_column to be considered the positive class. "
"All others will be considered negative.")
parser.add_argument('--category_display_name', action='store',
dest='category_display_name', default=None,
help="Positive category name which will "
"be used on the visualization. By default, it will just be the"
"postive category value.")
parser.add_argument('--not_category_display_name', action='store', default=None,
dest='not_category_display_name',
help="Positive category name which will "
"be used on the visualization. By default, it will just be the word 'not' "
"in front of the positive value.")
parser.add_argument('--pmi_threshold', action='store',
dest='pmi_threshold', type=int,
help="2 * minimum allowable PMI value. Default 6.")
parser.add_argument('--width_in_pixels', action='store',
dest='width_in_pixels', type=int, default=1000,
help="Width of the visualization in pixels.")
parser.add_argument('--minimum_term_frequency', action='store',
dest='minimum_term_frequency', type=int, default=3,
help="Minimum number of times a term needs to appear. Default 3")
parser.add_argument('--regex_parser', action='store_true',
dest='regex_parser', default=False,
help="If present, don't use spaCy for preprocessing. Instead, "
"use a simple, dumb, regex.")
parser.add_argument('--spacy_language_model', action='store',
dest='spacy_language_model', default='en',
help="If present, pick the spaCy language model to use. Default is 'en'. "
"Other valid values include 'de' and 'fr'. --regex_parser will override."
"Please see https://spacy.io/docs/api/language-models for moredetails")
parser.add_argument('--one_use_per_doc', action='store_true',
dest='one_use_per_doc', default=False,
help="Only count one use per document.")
args = parser.parse_args()
df = pd.read_csv(args.datafile)
if args.category_column not in df.columns:
raise Exception("category_column (%s) must be a column name in csv. Must be one of %s"
% (args.category_column, ', '.join(df.columns)))
if args.text_column not in df.columns:
raise Exception("text_column (%s) must be a column name in csv. Must be one of %s"
% (args.text_column, ', '.join(df.columns)))
if args.metadata_column is not None and args.metadata_column not in df.columns:
raise Exception("metadata_column (%s) must be a column name in csv. Must be one of %s"
% (args.metadata_column, ', '.join(df.columns)))
if args.positive_category not in df[args.category_column].unique():
raise Exception("positive_category (%s) must be in the column ""%s"", with a case-sensitive match." %
(args.positive_category, args.category_column))
if args.regex_parser:
nlp = whitespace_nlp_with_sentences
else:
import spacy
nlp = spacy.load(args.spacy_language_model)
term_ranker = None
if args.one_use_per_doc is True:
term_ranker = OncePerDocFrequencyRanker
category_display_name = args.category_display_name
if category_display_name is None:
category_display_name = args.positive_category
not_category_display_name = args.not_category_display_name
if not_category_display_name is None:
not_category_display_name = 'Not ' + category_display_name
corpus = CorpusFromPandas(df,
category_col=args.category_column,
text_col=args.text_column,
nlp=nlp).build()
html = produce_scattertext_explorer(corpus,
category=args.positive_category,
category_name=category_display_name,
not_category_name=not_category_display_name,
minimum_term_frequency=args.minimum_term_frequency,
pmi_filter_thresold=args.pmi_threshold,
width_in_pixels=args.width_in_pixels,
term_ranker=term_ranker,
metadata=None if args.metadata_column is None \
else df[args.metadata_column]
)
if args.outputfile == '-':
print(html)
else:
with open(args.outputfile, 'wb') as o:
o.write(html.encode('utf-8'))
if __name__ == '__main__':
main()
|
import asyncio
from typing import Any, Dict, Union
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.components.cover import DEVICE_CLASSES_SCHEMA
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT, CONF_TYPE
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import config_validation as cv
# Loading the config flow file will register the flow
from .bridge import DynaliteBridge
from .const import (
ACTIVE_INIT,
ACTIVE_OFF,
ACTIVE_ON,
ATTR_AREA,
ATTR_CHANNEL,
ATTR_HOST,
CONF_ACTIVE,
CONF_AREA,
CONF_AUTO_DISCOVER,
CONF_BRIDGES,
CONF_CHANNEL,
CONF_CHANNEL_COVER,
CONF_CLOSE_PRESET,
CONF_DEFAULT,
CONF_DEVICE_CLASS,
CONF_DURATION,
CONF_FADE,
CONF_LEVEL,
CONF_NO_DEFAULT,
CONF_OPEN_PRESET,
CONF_POLL_TIMER,
CONF_PRESET,
CONF_ROOM_OFF,
CONF_ROOM_ON,
CONF_STOP_PRESET,
CONF_TEMPLATE,
CONF_TILT_TIME,
DEFAULT_CHANNEL_TYPE,
DEFAULT_NAME,
DEFAULT_PORT,
DEFAULT_TEMPLATES,
DOMAIN,
ENTITY_PLATFORMS,
LOGGER,
SERVICE_REQUEST_AREA_PRESET,
SERVICE_REQUEST_CHANNEL_LEVEL,
)
def num_string(value: Union[int, str]) -> str:
"""Test if value is a string of digits, aka an integer."""
new_value = str(value)
if new_value.isdigit():
return new_value
raise vol.Invalid("Not a string with numbers")
CHANNEL_DATA_SCHEMA = vol.Schema(
{
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_FADE): vol.Coerce(float),
vol.Optional(CONF_TYPE, default=DEFAULT_CHANNEL_TYPE): vol.Any(
"light", "switch"
),
}
)
CHANNEL_SCHEMA = vol.Schema({num_string: CHANNEL_DATA_SCHEMA})
PRESET_DATA_SCHEMA = vol.Schema(
{
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_FADE): vol.Coerce(float),
vol.Optional(CONF_LEVEL): vol.Coerce(float),
}
)
PRESET_SCHEMA = vol.Schema({num_string: vol.Any(PRESET_DATA_SCHEMA, None)})
TEMPLATE_ROOM_SCHEMA = vol.Schema(
{vol.Optional(CONF_ROOM_ON): num_string, vol.Optional(CONF_ROOM_OFF): num_string}
)
TEMPLATE_TIMECOVER_SCHEMA = vol.Schema(
{
vol.Optional(CONF_CHANNEL_COVER): num_string,
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,
vol.Optional(CONF_OPEN_PRESET): num_string,
vol.Optional(CONF_CLOSE_PRESET): num_string,
vol.Optional(CONF_STOP_PRESET): num_string,
vol.Optional(CONF_DURATION): vol.Coerce(float),
vol.Optional(CONF_TILT_TIME): vol.Coerce(float),
}
)
TEMPLATE_DATA_SCHEMA = vol.Any(TEMPLATE_ROOM_SCHEMA, TEMPLATE_TIMECOVER_SCHEMA)
TEMPLATE_SCHEMA = vol.Schema({str: TEMPLATE_DATA_SCHEMA})
def validate_area(config: Dict[str, Any]) -> Dict[str, Any]:
"""Validate that template parameters are only used if area is using the relevant template."""
conf_set = set()
for template in DEFAULT_TEMPLATES:
for conf in DEFAULT_TEMPLATES[template]:
conf_set.add(conf)
if config.get(CONF_TEMPLATE):
for conf in DEFAULT_TEMPLATES[config[CONF_TEMPLATE]]:
conf_set.remove(conf)
for conf in conf_set:
if config.get(conf):
raise vol.Invalid(
f"{conf} should not be part of area {config[CONF_NAME]} config"
)
return config
AREA_DATA_SCHEMA = vol.Schema(
vol.All(
{
vol.Required(CONF_NAME): cv.string,
vol.Optional(CONF_TEMPLATE): vol.In(DEFAULT_TEMPLATES),
vol.Optional(CONF_FADE): vol.Coerce(float),
vol.Optional(CONF_NO_DEFAULT): cv.boolean,
vol.Optional(CONF_CHANNEL): CHANNEL_SCHEMA,
vol.Optional(CONF_PRESET): PRESET_SCHEMA,
# the next ones can be part of the templates
vol.Optional(CONF_ROOM_ON): num_string,
vol.Optional(CONF_ROOM_OFF): num_string,
vol.Optional(CONF_CHANNEL_COVER): num_string,
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,
vol.Optional(CONF_OPEN_PRESET): num_string,
vol.Optional(CONF_CLOSE_PRESET): num_string,
vol.Optional(CONF_STOP_PRESET): num_string,
vol.Optional(CONF_DURATION): vol.Coerce(float),
vol.Optional(CONF_TILT_TIME): vol.Coerce(float),
},
validate_area,
)
)
AREA_SCHEMA = vol.Schema({num_string: vol.Any(AREA_DATA_SCHEMA, None)})
PLATFORM_DEFAULTS_SCHEMA = vol.Schema({vol.Optional(CONF_FADE): vol.Coerce(float)})
BRIDGE_SCHEMA = vol.Schema(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): int,
vol.Optional(CONF_AUTO_DISCOVER, default=False): vol.Coerce(bool),
vol.Optional(CONF_POLL_TIMER, default=1.0): vol.Coerce(float),
vol.Optional(CONF_AREA): AREA_SCHEMA,
vol.Optional(CONF_DEFAULT): PLATFORM_DEFAULTS_SCHEMA,
vol.Optional(CONF_ACTIVE, default=False): vol.Any(
ACTIVE_ON, ACTIVE_OFF, ACTIVE_INIT, cv.boolean
),
vol.Optional(CONF_PRESET): PRESET_SCHEMA,
vol.Optional(CONF_TEMPLATE): TEMPLATE_SCHEMA,
}
)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{vol.Optional(CONF_BRIDGES): vol.All(cv.ensure_list, [BRIDGE_SCHEMA])}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass: HomeAssistant, config: Dict[str, Any]) -> bool:
"""Set up the Dynalite platform."""
conf = config.get(DOMAIN)
LOGGER.debug("Setting up dynalite component config = %s", conf)
if conf is None:
conf = {}
hass.data[DOMAIN] = {}
# User has configured bridges
if CONF_BRIDGES not in conf:
return True
bridges = conf[CONF_BRIDGES]
for bridge_conf in bridges:
host = bridge_conf[CONF_HOST]
LOGGER.debug("Starting config entry flow host=%s conf=%s", host, bridge_conf)
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data=bridge_conf,
)
)
async def dynalite_service(service_call: ServiceCall):
data = service_call.data
host = data.get(ATTR_HOST, "")
bridges = []
for cur_bridge in hass.data[DOMAIN].values():
if not host or cur_bridge.host == host:
bridges.append(cur_bridge)
LOGGER.debug("Selected bridged for service call: %s", bridges)
if service_call.service == SERVICE_REQUEST_AREA_PRESET:
bridge_attr = "request_area_preset"
elif service_call.service == SERVICE_REQUEST_CHANNEL_LEVEL:
bridge_attr = "request_channel_level"
for bridge in bridges:
getattr(bridge.dynalite_devices, bridge_attr)(
data[ATTR_AREA], data.get(ATTR_CHANNEL)
)
hass.services.async_register(
DOMAIN,
SERVICE_REQUEST_AREA_PRESET,
dynalite_service,
vol.Schema(
{
vol.Optional(ATTR_HOST): cv.string,
vol.Required(ATTR_AREA): int,
vol.Optional(ATTR_CHANNEL): int,
}
),
)
hass.services.async_register(
DOMAIN,
SERVICE_REQUEST_CHANNEL_LEVEL,
dynalite_service,
vol.Schema(
{
vol.Optional(ATTR_HOST): cv.string,
vol.Required(ATTR_AREA): int,
vol.Required(ATTR_CHANNEL): int,
}
),
)
return True
async def async_entry_changed(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Reload entry since the data has changed."""
LOGGER.debug("Reconfiguring entry %s", entry.data)
bridge = hass.data[DOMAIN][entry.entry_id]
bridge.reload_config(entry.data)
LOGGER.debug("Reconfiguring entry finished %s", entry.data)
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up a bridge from a config entry."""
LOGGER.debug("Setting up entry %s", entry.data)
bridge = DynaliteBridge(hass, entry.data)
# need to do it before the listener
hass.data[DOMAIN][entry.entry_id] = bridge
entry.add_update_listener(async_entry_changed)
if not await bridge.async_setup():
LOGGER.error("Could not set up bridge for entry %s", entry.data)
hass.data[DOMAIN][entry.entry_id] = None
raise ConfigEntryNotReady
for platform in ENTITY_PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, platform)
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
LOGGER.debug("Unloading entry %s", entry.data)
hass.data[DOMAIN].pop(entry.entry_id)
tasks = [
hass.config_entries.async_forward_entry_unload(entry, platform)
for platform in ENTITY_PLATFORMS
]
results = await asyncio.gather(*tasks)
return False not in results
|
from pygal.adapters import none_to_zero
from pygal.graph.bar import Bar
class StackedBar(Bar):
"""Stacked Bar graph class"""
_adapters = [none_to_zero]
def _get_separated_values(self, secondary=False):
"""Separate values between positives and negatives stacked"""
series = self.secondary_series if secondary else self.series
transposed = list(zip(*[serie.values for serie in series]))
positive_vals = [
sum([val for val in vals if val is not None and val >= self.zero])
for vals in transposed
]
negative_vals = [
sum([val for val in vals if val is not None and val < self.zero])
for vals in transposed
]
return positive_vals, negative_vals
def _compute_box(self, positive_vals, negative_vals):
"""Compute Y min and max"""
if self.range and self.range[0] is not None:
self._box.ymin = self.range[0]
else:
self._box.ymin = negative_vals and min(
min(negative_vals), self.zero
) or self.zero
if self.range and self.range[1] is not None:
self._box.ymax = self.range[1]
else:
self._box.ymax = positive_vals and max(
max(positive_vals), self.zero
) or self.zero
def _compute(self):
"""Compute y min and max and y scale and set labels"""
positive_vals, negative_vals = self._get_separated_values()
if self.logarithmic:
positive_vals = list(
filter(lambda x: x > self.zero, positive_vals)
)
negative_vals = list(
filter(lambda x: x > self.zero, negative_vals)
)
self._compute_box(positive_vals, negative_vals)
positive_vals = positive_vals or [self.zero]
negative_vals = negative_vals or [self.zero]
self._x_pos = [
x / self._len for x in range(self._len + 1)
] if self._len > 1 else [0, 1] # Center if only one value
self._points(self._x_pos)
self.negative_cumulation = [0] * self._len
self.positive_cumulation = [0] * self._len
if self.secondary_series:
positive_vals, negative_vals = self._get_separated_values(True)
positive_vals = positive_vals or [self.zero]
negative_vals = negative_vals or [self.zero]
self.secondary_negative_cumulation = [0] * self._len
self.secondary_positive_cumulation = [0] * self._len
self._pre_compute_secondary(positive_vals, negative_vals)
self._x_pos = [(i + .5) / self._len for i in range(self._len)]
def _pre_compute_secondary(self, positive_vals, negative_vals):
"""Compute secondary y min and max"""
self._secondary_min = (
negative_vals and min(min(negative_vals), self.zero)
) or self.zero
self._secondary_max = (
positive_vals and max(max(positive_vals), self.zero)
) or self.zero
def _bar(self, serie, parent, x, y, i, zero, secondary=False):
"""Internal stacking bar drawing function"""
if secondary:
cumulation = (
self.secondary_negative_cumulation
if y < self.zero else self.secondary_positive_cumulation
)
else:
cumulation = (
self.negative_cumulation
if y < self.zero else self.positive_cumulation
)
zero = cumulation[i]
cumulation[i] = zero + y
if zero == 0:
zero = self.zero
y -= self.zero
y += zero
width = (self.view.x(1) - self.view.x(0)) / self._len
x, y = self.view((x, y))
y = y or 0
series_margin = width * self._series_margin
x += series_margin
width -= 2 * series_margin
if self.secondary_series:
width /= 2
x += int(secondary) * width
serie_margin = width * self._serie_margin
x += serie_margin
width -= 2 * serie_margin
height = self.view.y(zero) - y
r = serie.rounded_bars * 1 if serie.rounded_bars else 0
self.svg.transposable_node(
parent,
'rect',
x=x,
y=y,
rx=r,
ry=r,
width=width,
height=height,
class_='rect reactive tooltip-trigger'
)
return x, y, width, height
def _plot(self):
"""Draw bars for series and secondary series"""
for serie in self.series[::-1 if self.stack_from_top else 1]:
self.bar(serie)
for serie in self.secondary_series[::-1 if self.stack_from_top else 1]:
self.bar(serie, True)
|
from . import units
from .dssm_preprocessor import DSSMPreprocessor
from .naive_preprocessor import NaivePreprocessor
from .basic_preprocessor import BasicPreprocessor
from .cdssm_preprocessor import CDSSMPreprocessor
from .diin_preprocessor import DIINPreprocessor
from .bert_preprocessor import BertPreprocessor
def list_available() -> list:
from matchzoo.engine.base_preprocessor import BasePreprocessor
from matchzoo.utils import list_recursive_concrete_subclasses
return list_recursive_concrete_subclasses(BasePreprocessor)
|
from unittest.mock import PropertyMock, patch
import homeassistant.components.image_processing as ip
import homeassistant.components.microsoft_face as mf
from homeassistant.const import ATTR_ENTITY_PICTURE
from homeassistant.core import callback
from homeassistant.setup import setup_component
from tests.common import (
assert_setup_component,
get_test_home_assistant,
load_fixture,
mock_coro,
)
from tests.components.image_processing import common
class TestMicrosoftFaceDetectSetup:
"""Test class for image processing."""
def setup_method(self):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
def teardown_method(self):
"""Stop everything that was started."""
self.hass.stop()
@patch(
"homeassistant.components.microsoft_face.MicrosoftFace.update_store",
return_value=mock_coro(),
)
def test_setup_platform(self, store_mock):
"""Set up platform with one entity."""
config = {
ip.DOMAIN: {
"platform": "microsoft_face_detect",
"source": {"entity_id": "camera.demo_camera"},
"attributes": ["age", "gender"],
},
"camera": {"platform": "demo"},
mf.DOMAIN: {"api_key": "12345678abcdef6"},
}
with assert_setup_component(1, ip.DOMAIN):
setup_component(self.hass, ip.DOMAIN, config)
self.hass.block_till_done()
assert self.hass.states.get("image_processing.microsoftface_demo_camera")
@patch(
"homeassistant.components.microsoft_face.MicrosoftFace.update_store",
return_value=mock_coro(),
)
def test_setup_platform_name(self, store_mock):
"""Set up platform with one entity and set name."""
config = {
ip.DOMAIN: {
"platform": "microsoft_face_detect",
"source": {"entity_id": "camera.demo_camera", "name": "test local"},
},
"camera": {"platform": "demo"},
mf.DOMAIN: {"api_key": "12345678abcdef6"},
}
with assert_setup_component(1, ip.DOMAIN):
setup_component(self.hass, ip.DOMAIN, config)
self.hass.block_till_done()
assert self.hass.states.get("image_processing.test_local")
class TestMicrosoftFaceDetect:
"""Test class for image processing."""
def setup_method(self):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.config = {
ip.DOMAIN: {
"platform": "microsoft_face_detect",
"source": {"entity_id": "camera.demo_camera", "name": "test local"},
"attributes": ["age", "gender"],
},
"camera": {"platform": "demo"},
mf.DOMAIN: {"api_key": "12345678abcdef6"},
}
self.endpoint_url = f"https://westus.{mf.FACE_API_URL}"
def teardown_method(self):
"""Stop everything that was started."""
self.hass.stop()
@patch(
"homeassistant.components.microsoft_face_detect.image_processing."
"MicrosoftFaceDetectEntity.should_poll",
new_callable=PropertyMock(return_value=False),
)
def test_ms_detect_process_image(self, poll_mock, aioclient_mock):
"""Set up and scan a picture and test plates from event."""
aioclient_mock.get(
self.endpoint_url.format("persongroups"),
text=load_fixture("microsoft_face_persongroups.json"),
)
aioclient_mock.get(
self.endpoint_url.format("persongroups/test_group1/persons"),
text=load_fixture("microsoft_face_persons.json"),
)
aioclient_mock.get(
self.endpoint_url.format("persongroups/test_group2/persons"),
text=load_fixture("microsoft_face_persons.json"),
)
setup_component(self.hass, ip.DOMAIN, self.config)
self.hass.block_till_done()
state = self.hass.states.get("camera.demo_camera")
url = f"{self.hass.config.internal_url}{state.attributes.get(ATTR_ENTITY_PICTURE)}"
face_events = []
@callback
def mock_face_event(event):
"""Mock event."""
face_events.append(event)
self.hass.bus.listen("image_processing.detect_face", mock_face_event)
aioclient_mock.get(url, content=b"image")
aioclient_mock.post(
self.endpoint_url.format("detect"),
text=load_fixture("microsoft_face_detect.json"),
params={"returnFaceAttributes": "age,gender"},
)
common.scan(self.hass, entity_id="image_processing.test_local")
self.hass.block_till_done()
state = self.hass.states.get("image_processing.test_local")
assert len(face_events) == 1
assert state.attributes.get("total_faces") == 1
assert state.state == "1"
assert face_events[0].data["age"] == 71.0
assert face_events[0].data["gender"] == "male"
assert face_events[0].data["entity_id"] == "image_processing.test_local"
# Test that later, if a request is made that results in no face
# being detected, that this is reflected in the state object
aioclient_mock.clear_requests()
aioclient_mock.post(
self.endpoint_url.format("detect"),
text="[]",
params={"returnFaceAttributes": "age,gender"},
)
common.scan(self.hass, entity_id="image_processing.test_local")
self.hass.block_till_done()
state = self.hass.states.get("image_processing.test_local")
# No more face events were fired
assert len(face_events) == 1
# Total faces and actual qualified number of faces reset to zero
assert state.attributes.get("total_faces") == 0
assert state.state == "0"
|
from copy import deepcopy
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_MOTION,
DEVICE_CLASS_VIBRATION,
DOMAIN as BINARY_SENSOR_DOMAIN,
)
from homeassistant.components.deconz.const import (
CONF_ALLOW_CLIP_SENSOR,
CONF_ALLOW_NEW_DEVICES,
DOMAIN as DECONZ_DOMAIN,
)
from homeassistant.components.deconz.gateway import get_gateway_from_config_entry
from homeassistant.const import STATE_OFF, STATE_ON
from homeassistant.helpers.entity_registry import async_entries_for_config_entry
from homeassistant.setup import async_setup_component
from .test_gateway import DECONZ_WEB_REQUEST, setup_deconz_integration
SENSORS = {
"1": {
"id": "Presence sensor id",
"name": "Presence sensor",
"type": "ZHAPresence",
"state": {"dark": False, "presence": False},
"config": {"on": True, "reachable": True, "temperature": 10},
"uniqueid": "00:00:00:00:00:00:00:00-00",
},
"2": {
"id": "Temperature sensor id",
"name": "Temperature sensor",
"type": "ZHATemperature",
"state": {"temperature": False},
"config": {},
"uniqueid": "00:00:00:00:00:00:00:01-00",
},
"3": {
"id": "CLIP presence sensor id",
"name": "CLIP presence sensor",
"type": "CLIPPresence",
"state": {},
"config": {},
"uniqueid": "00:00:00:00:00:00:00:02-00",
},
"4": {
"id": "Vibration sensor id",
"name": "Vibration sensor",
"type": "ZHAVibration",
"state": {
"orientation": [1, 2, 3],
"tiltangle": 36,
"vibration": True,
"vibrationstrength": 10,
},
"config": {"on": True, "reachable": True, "temperature": 10},
"uniqueid": "00:00:00:00:00:00:00:03-00",
},
}
async def test_platform_manually_configured(hass):
"""Test that we do not discover anything or try to set up a gateway."""
assert (
await async_setup_component(
hass, BINARY_SENSOR_DOMAIN, {"binary_sensor": {"platform": DECONZ_DOMAIN}}
)
is True
)
assert DECONZ_DOMAIN not in hass.data
async def test_no_binary_sensors(hass):
"""Test that no sensors in deconz results in no sensor entities."""
await setup_deconz_integration(hass)
assert len(hass.states.async_all()) == 0
async def test_binary_sensors(hass):
"""Test successful creation of binary sensor entities."""
data = deepcopy(DECONZ_WEB_REQUEST)
data["sensors"] = deepcopy(SENSORS)
config_entry = await setup_deconz_integration(hass, get_state_response=data)
gateway = get_gateway_from_config_entry(hass, config_entry)
assert len(hass.states.async_all()) == 3
presence_sensor = hass.states.get("binary_sensor.presence_sensor")
assert presence_sensor.state == STATE_OFF
assert presence_sensor.attributes["device_class"] == DEVICE_CLASS_MOTION
assert hass.states.get("binary_sensor.temperature_sensor") is None
assert hass.states.get("binary_sensor.clip_presence_sensor") is None
vibration_sensor = hass.states.get("binary_sensor.vibration_sensor")
assert vibration_sensor.state == STATE_ON
assert vibration_sensor.attributes["device_class"] == DEVICE_CLASS_VIBRATION
state_changed_event = {
"t": "event",
"e": "changed",
"r": "sensors",
"id": "1",
"state": {"presence": True},
}
gateway.api.event_handler(state_changed_event)
await hass.async_block_till_done()
assert hass.states.get("binary_sensor.presence_sensor").state == STATE_ON
await hass.config_entries.async_unload(config_entry.entry_id)
assert len(hass.states.async_all()) == 0
async def test_allow_clip_sensor(hass):
"""Test that CLIP sensors can be allowed."""
data = deepcopy(DECONZ_WEB_REQUEST)
data["sensors"] = deepcopy(SENSORS)
config_entry = await setup_deconz_integration(
hass,
options={CONF_ALLOW_CLIP_SENSOR: True},
get_state_response=data,
)
assert len(hass.states.async_all()) == 4
assert hass.states.get("binary_sensor.presence_sensor").state == STATE_OFF
assert hass.states.get("binary_sensor.temperature_sensor") is None
assert hass.states.get("binary_sensor.clip_presence_sensor").state == STATE_OFF
assert hass.states.get("binary_sensor.vibration_sensor").state == STATE_ON
# Disallow clip sensors
hass.config_entries.async_update_entry(
config_entry, options={CONF_ALLOW_CLIP_SENSOR: False}
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 3
assert hass.states.get("binary_sensor.clip_presence_sensor") is None
# Allow clip sensors
hass.config_entries.async_update_entry(
config_entry, options={CONF_ALLOW_CLIP_SENSOR: True}
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 4
assert hass.states.get("binary_sensor.clip_presence_sensor").state == STATE_OFF
async def test_add_new_binary_sensor(hass):
"""Test that adding a new binary sensor works."""
config_entry = await setup_deconz_integration(hass)
gateway = get_gateway_from_config_entry(hass, config_entry)
assert len(hass.states.async_all()) == 0
state_added_event = {
"t": "event",
"e": "added",
"r": "sensors",
"id": "1",
"sensor": deepcopy(SENSORS["1"]),
}
gateway.api.event_handler(state_added_event)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
assert hass.states.get("binary_sensor.presence_sensor").state == STATE_OFF
async def test_add_new_binary_sensor_ignored(hass):
"""Test that adding a new binary sensor is not allowed."""
config_entry = await setup_deconz_integration(
hass,
options={CONF_ALLOW_NEW_DEVICES: False},
)
gateway = get_gateway_from_config_entry(hass, config_entry)
assert len(hass.states.async_all()) == 0
state_added_event = {
"t": "event",
"e": "added",
"r": "sensors",
"id": "1",
"sensor": deepcopy(SENSORS["1"]),
}
gateway.api.event_handler(state_added_event)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 0
entity_registry = await hass.helpers.entity_registry.async_get_registry()
assert (
len(async_entries_for_config_entry(entity_registry, config_entry.entry_id)) == 0
)
|
from .fs import FileSystemReader
from .trash import version
from .trash import TopTrashDirRules
from .trash import TrashDirs
from .trash import Harvester
from .trash import Parser
from .trash import PrintHelp
from .trash import PrintVersion
from .trash import parse_deletion_date
from .trash import ParseError
from .trash import parse_path
from .trash import unknown_date
def main():
import sys
import os
from trashcli.list_mount_points import os_mount_points
ListCmd(
out = sys.stdout,
err = sys.stderr,
environ = os.environ,
getuid = os.getuid,
list_volumes = os_mount_points,
).run(*sys.argv)
class ListCmd:
def __init__(self, out,
err,
environ,
list_volumes,
getuid,
file_reader = FileSystemReader(),
version = version):
self.output = ListCmdOutput(out, err)
self.err = self.output.err
self.environ = environ
self.list_volumes = list_volumes
self.getuid = getuid
self.file_reader = file_reader
self.contents_of = file_reader.contents_of
self.version = version
def run(self, *argv):
parse=Parser()
parse.on_help(PrintHelp(self.description, self.output.println))
parse.on_version(PrintVersion(self.output.println, self.version))
parse.as_default(self.list_trash)
parse(argv)
def list_trash(self):
harvester = Harvester(self.file_reader)
harvester.on_volume = self.output.set_volume_path
harvester.on_trashinfo_found = self._print_trashinfo
trashdirs = TrashDirs(self.environ,
self.getuid,
self.list_volumes,
TopTrashDirRules(self.file_reader))
trashdirs.on_trashdir_skipped_because_parent_not_sticky = self.output.top_trashdir_skipped_because_parent_not_sticky
trashdirs.on_trashdir_skipped_because_parent_is_symlink = self.output.top_trashdir_skipped_because_parent_is_symlink
trashdirs.on_trash_dir_found = harvester.analize_trash_directory
trashdirs.list_trashdirs()
def _print_trashinfo(self, path):
try:
contents = self.contents_of(path)
except IOError as e :
self.output.print_read_error(e)
else:
deletion_date = parse_deletion_date(contents) or unknown_date()
try:
path = parse_path(contents)
except ParseError:
self.output.print_parse_path_error(path)
else:
self.output.print_entry(deletion_date, path)
def description(self, program_name, printer):
printer.usage('Usage: %s [OPTIONS...]' % program_name)
printer.summary('List trashed files')
printer.options(
" --version show program's version number and exit",
" -h, --help show this help message and exit")
printer.bug_reporting()
class ListCmdOutput:
def __init__(self, out, err):
self.out = out
self.err = err
def println(self, line):
self.out.write(line+'\n')
def error(self, line):
self.err.write(line+'\n')
def print_read_error(self, error):
self.error(str(error))
def print_parse_path_error(self, offending_file):
self.error("Parse Error: %s: Unable to parse Path." % (offending_file))
def top_trashdir_skipped_because_parent_not_sticky(self, trashdir):
self.error("TrashDir skipped because parent not sticky: %s"
% trashdir)
def top_trashdir_skipped_because_parent_is_symlink(self, trashdir):
self.error("TrashDir skipped because parent is symlink: %s"
% trashdir)
def set_volume_path(self, volume_path):
self.volume_path = volume_path
def print_entry(self, maybe_deletion_date, relative_location):
import os
original_location = os.path.join(self.volume_path, relative_location)
self.println("%s %s" %(maybe_deletion_date, original_location))
|
from __future__ import division, absolute_import
# import compatibility functions and utilities
import sys
from .utils import _range
# to inherit from the tqdm class
from .std import tqdm as std_tqdm
if True: # pragma: no cover
# import IPython/Jupyter base widget and display utilities
IPY = 0
IPYW = 0
try: # IPython 4.x
import ipywidgets
IPY = 4
try:
IPYW = int(ipywidgets.__version__.split('.')[0])
except AttributeError: # __version__ may not exist in old versions
pass
except ImportError: # IPython 3.x / 2.x
IPY = 32
import warnings
with warnings.catch_warnings():
warnings.filterwarnings(
'ignore',
message=".*The `IPython.html` package has been deprecated.*")
try:
import IPython.html.widgets as ipywidgets
except ImportError:
pass
try: # IPython 4.x / 3.x
if IPY == 32:
from IPython.html.widgets import FloatProgress as IProgress
from IPython.html.widgets import HBox, HTML
IPY = 3
else:
from ipywidgets import FloatProgress as IProgress
from ipywidgets import HBox, HTML
except ImportError:
try: # IPython 2.x
from IPython.html.widgets import FloatProgressWidget as IProgress
from IPython.html.widgets import ContainerWidget as HBox
from IPython.html.widgets import HTML
IPY = 2
except ImportError:
IPY = 0
try:
from IPython.display import display # , clear_output
except ImportError:
pass
# HTML encoding
try: # Py3
from html import escape
except ImportError: # Py2
from cgi import escape
__author__ = {"github.com/": ["lrq3000", "casperdcl", "alexanderkuk"]}
__all__ = ['tqdm_notebook', 'tnrange', 'tqdm', 'trange']
class tqdm_notebook(std_tqdm):
"""
Experimental IPython/Jupyter Notebook widget using tqdm!
"""
@staticmethod
def status_printer(_, total=None, desc=None, ncols=None):
"""
Manage the printing of an IPython/Jupyter Notebook progress bar widget.
"""
# Fallback to text bar if there's no total
# DEPRECATED: replaced with an 'info' style bar
# if not total:
# return super(tqdm_notebook, tqdm_notebook).status_printer(file)
# fp = file
# Prepare IPython progress bar
try:
if total:
pbar = IProgress(min=0, max=total)
else: # No total? Show info style bar with no progress tqdm status
pbar = IProgress(min=0, max=1)
pbar.value = 1
pbar.bar_style = 'info'
except NameError:
# #187 #451 #558
raise ImportError(
"FloatProgress not found. Please update jupyter and ipywidgets."
" See https://ipywidgets.readthedocs.io/en/stable"
"/user_install.html")
if desc:
pbar.description = desc
if IPYW >= 7:
pbar.style.description_width = 'initial'
# Prepare status text
ptext = HTML()
# Only way to place text to the right of the bar is to use a container
container = HBox(children=[pbar, ptext])
# Prepare layout
if ncols is not None: # use default style of ipywidgets
# ncols could be 100, "100px", "100%"
ncols = str(ncols) # ipywidgets only accepts string
try:
if int(ncols) > 0: # isnumeric and positive
ncols += 'px'
except ValueError:
pass
pbar.layout.flex = '2'
container.layout.width = ncols
container.layout.display = 'inline-flex'
container.layout.flex_flow = 'row wrap'
display(container)
return container
def display(self, msg=None, pos=None,
# additional signals
close=False, bar_style=None):
# Note: contrary to native tqdm, msg='' does NOT clear bar
# goal is to keep all infos if error happens so user knows
# at which iteration the loop failed.
# Clear previous output (really necessary?)
# clear_output(wait=1)
if not msg and not close:
msg = self.__repr__()
pbar, ptext = self.container.children
pbar.value = self.n
if msg:
# html escape special characters (like '&')
if '<bar/>' in msg:
left, right = map(escape, msg.split('<bar/>', 1))
else:
left, right = '', escape(msg)
# remove inesthetical pipes
if left and left[-1] == '|':
left = left[:-1]
if right and right[0] == '|':
right = right[1:]
# Update description
pbar.description = left
if IPYW >= 7:
pbar.style.description_width = 'initial'
# never clear the bar (signal: msg='')
if right:
ptext.value = right
# Change bar style
if bar_style:
# Hack-ish way to avoid the danger bar_style being overridden by
# success because the bar gets closed after the error...
if not (pbar.bar_style == 'danger' and bar_style == 'success'):
pbar.bar_style = bar_style
# Special signal to close the bar
if close and pbar.bar_style != 'danger': # hide only if no error
try:
self.container.close()
except AttributeError:
self.container.visible = False
def __init__(self, *args, **kwargs):
# Setup default output
file_kwarg = kwargs.get('file', sys.stderr)
if file_kwarg is sys.stderr or file_kwarg is None:
kwargs['file'] = sys.stdout # avoid the red block in IPython
# Initialize parent class + avoid printing by using gui=True
kwargs['gui'] = True
kwargs.setdefault('bar_format', '{l_bar}{bar}{r_bar}')
kwargs['bar_format'] = kwargs['bar_format'].replace('{bar}', '<bar/>')
super(tqdm_notebook, self).__init__(*args, **kwargs)
if self.disable or not kwargs['gui']:
return
# Get bar width
self.ncols = '100%' if self.dynamic_ncols else kwargs.get("ncols", None)
# Replace with IPython progress bar display (with correct total)
unit_scale = 1 if self.unit_scale is True else self.unit_scale or 1
total = self.total * unit_scale if self.total else self.total
self.container = self.status_printer(
self.fp, total, self.desc, self.ncols)
self.sp = self.display
# Print initial bar state
if not self.disable:
self.display()
def __iter__(self, *args, **kwargs):
try:
for obj in super(tqdm_notebook, self).__iter__(*args, **kwargs):
# return super(tqdm...) will not catch exception
yield obj
# NB: except ... [ as ...] breaks IPython async KeyboardInterrupt
except: # NOQA
self.sp(bar_style='danger')
raise
def update(self, *args, **kwargs):
try:
super(tqdm_notebook, self).update(*args, **kwargs)
except Exception as exc:
# cannot catch KeyboardInterrupt when using manual tqdm
# as the interrupt will most likely happen on another statement
self.sp(bar_style='danger')
raise exc
def close(self, *args, **kwargs):
super(tqdm_notebook, self).close(*args, **kwargs)
# If it was not run in a notebook, sp is not assigned, check for it
if hasattr(self, 'sp'):
# Try to detect if there was an error or KeyboardInterrupt
# in manual mode: if n < total, things probably got wrong
if self.total and self.n < self.total:
self.sp(bar_style='danger')
else:
if self.leave:
self.sp(bar_style='success')
else:
self.sp(close=True)
def moveto(self, *args, **kwargs):
# void -> avoid extraneous `\n` in IPython output cell
return
def tnrange(*args, **kwargs):
"""
A shortcut for `tqdm.notebook.tqdm(xrange(*args), **kwargs)`.
On Python3+, `range` is used instead of `xrange`.
"""
return tqdm_notebook(_range(*args), **kwargs)
# Aliases
tqdm = tqdm_notebook
trange = tnrange
|
from functools import partial
from typing import Dict, List
from homeassistant.components.binary_sensor import BinarySensorEntity
from homeassistant.core import callback
from homeassistant.helpers.entity_registry import (
async_get_registry as async_get_entity_registry,
)
from homeassistant.helpers.typing import HomeAssistantType
from . import XboxUpdateCoordinator
from .base_sensor import XboxBaseSensorEntity
from .const import DOMAIN
PRESENCE_ATTRIBUTES = ["online", "in_party", "in_game", "in_multiplayer"]
async def async_setup_entry(hass: HomeAssistantType, config_entry, async_add_entities):
"""Set up Xbox Live friends."""
coordinator: XboxUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id][
"coordinator"
]
update_friends = partial(async_update_friends, coordinator, {}, async_add_entities)
unsub = coordinator.async_add_listener(update_friends)
hass.data[DOMAIN][config_entry.entry_id]["binary_sensor_unsub"] = unsub
update_friends()
class XboxBinarySensorEntity(XboxBaseSensorEntity, BinarySensorEntity):
"""Representation of a Xbox presence state."""
@property
def is_on(self) -> bool:
"""Return the status of the requested attribute."""
if not self.coordinator.last_update_success:
return False
return getattr(self.data, self.attribute, False)
@callback
def async_update_friends(
coordinator: XboxUpdateCoordinator,
current: Dict[str, List[XboxBinarySensorEntity]],
async_add_entities,
) -> None:
"""Update friends."""
new_ids = set(coordinator.data.presence)
current_ids = set(current)
# Process new favorites, add them to Home Assistant
new_entities = []
for xuid in new_ids - current_ids:
current[xuid] = [
XboxBinarySensorEntity(coordinator, xuid, attribute)
for attribute in PRESENCE_ATTRIBUTES
]
new_entities = new_entities + current[xuid]
if new_entities:
async_add_entities(new_entities)
# Process deleted favorites, remove them from Home Assistant
for xuid in current_ids - new_ids:
coordinator.hass.async_create_task(
async_remove_entities(xuid, coordinator, current)
)
async def async_remove_entities(
xuid: str,
coordinator: XboxUpdateCoordinator,
current: Dict[str, XboxBinarySensorEntity],
) -> None:
"""Remove friend sensors from Home Assistant."""
registry = await async_get_entity_registry(coordinator.hass)
entities = current[xuid]
for entity in entities:
if entity.entity_id in registry.entities:
registry.async_remove(entity.entity_id)
del current[xuid]
|
import asyncio
import logging
from uuid import uuid4
from aiohttp import ClientError, web_exceptions
from async_timeout import timeout
from pydaikin.daikin_base import Appliance
from pydaikin.discovery import Discovery
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_API_KEY, CONF_HOST, CONF_PASSWORD
from .const import CONF_UUID, KEY_IP, KEY_MAC, TIMEOUT
_LOGGER = logging.getLogger(__name__)
@config_entries.HANDLERS.register("daikin")
class FlowHandler(config_entries.ConfigFlow):
"""Handle a config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL
def __init__(self):
"""Initialize the Daikin config flow."""
self.host = None
@property
def schema(self):
"""Return current schema."""
return vol.Schema(
{
vol.Required(CONF_HOST, default=self.host): str,
vol.Optional(CONF_API_KEY): str,
vol.Optional(CONF_PASSWORD): str,
}
)
async def _create_entry(self, host, mac, key=None, uuid=None, password=None):
"""Register new entry."""
if not self.unique_id:
await self.async_set_unique_id(mac)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=host,
data={
CONF_HOST: host,
KEY_MAC: mac,
CONF_API_KEY: key,
CONF_UUID: uuid,
CONF_PASSWORD: password,
},
)
async def _create_device(self, host, key=None, password=None):
"""Create device."""
# BRP07Cxx devices needs uuid together with key
if key:
uuid = str(uuid4())
else:
uuid = None
key = None
if not password:
password = None
try:
with timeout(TIMEOUT):
device = await Appliance.factory(
host,
self.hass.helpers.aiohttp_client.async_get_clientsession(),
key=key,
uuid=uuid,
password=password,
)
except asyncio.TimeoutError:
return self.async_show_form(
step_id="user",
data_schema=self.schema,
errors={"base": "cannot_connect"},
)
except web_exceptions.HTTPForbidden:
return self.async_show_form(
step_id="user",
data_schema=self.schema,
errors={"base": "invalid_auth"},
)
except ClientError:
_LOGGER.exception("ClientError")
return self.async_show_form(
step_id="user",
data_schema=self.schema,
errors={"base": "unknown"},
)
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected error creating device")
return self.async_show_form(
step_id="user",
data_schema=self.schema,
errors={"base": "unknown"},
)
mac = device.mac
return await self._create_entry(host, mac, key, uuid, password)
async def async_step_user(self, user_input=None):
"""User initiated config flow."""
if user_input is None:
return self.async_show_form(step_id="user", data_schema=self.schema)
return await self._create_device(
user_input[CONF_HOST],
user_input.get(CONF_API_KEY),
user_input.get(CONF_PASSWORD),
)
async def async_step_import(self, user_input):
"""Import a config entry."""
host = user_input.get(CONF_HOST)
if not host:
return await self.async_step_user()
return await self._create_device(host)
async def async_step_discovery(self, discovery_info):
"""Initialize step from discovery."""
_LOGGER.debug("Discovered device: %s", discovery_info)
await self.async_set_unique_id(discovery_info[KEY_MAC])
self._abort_if_unique_id_configured()
self.host = discovery_info[KEY_IP]
return await self.async_step_user()
async def async_step_zeroconf(self, discovery_info):
"""Prepare configuration for a discovered Daikin device."""
_LOGGER.debug("Zeroconf user_input: %s", discovery_info)
devices = Discovery().poll(ip=discovery_info[CONF_HOST])
if not devices:
_LOGGER.debug(
"Could not find MAC-address for %s,"
" make sure the required UDP ports are open (see integration documentation)",
discovery_info[CONF_HOST],
)
return self.async_abort(reason="cannot_connect")
await self.async_set_unique_id(next(iter(devices))[KEY_MAC])
self._abort_if_unique_id_configured()
self.host = discovery_info[CONF_HOST]
return await self.async_step_user()
|
import os.path
import contextlib
from typing import TYPE_CHECKING, Iterator, List, Optional
from PyQt5.QtCore import QUrl
from qutebrowser.api import cmdutils
from qutebrowser.completion.models import configmodel
from qutebrowser.utils import objreg, message, standarddir, urlmatch
from qutebrowser.config import configtypes, configexc, configfiles, configdata
from qutebrowser.misc import editor
from qutebrowser.keyinput import keyutils
if TYPE_CHECKING:
from qutebrowser.config.config import Config, KeyConfig
class ConfigCommands:
"""qutebrowser commands related to the configuration."""
def __init__(self,
config: 'Config',
keyconfig: 'KeyConfig') -> None:
self._config = config
self._keyconfig = keyconfig
@contextlib.contextmanager
def _handle_config_error(self) -> Iterator[None]:
"""Catch errors in set_command and raise CommandError."""
try:
yield
except configexc.Error as e:
raise cmdutils.CommandError(str(e))
def _parse_pattern(self, pattern: Optional[str]) -> Optional[urlmatch.UrlPattern]:
"""Parse a pattern string argument to a pattern."""
if pattern is None:
return None
try:
return urlmatch.UrlPattern(pattern)
except urlmatch.ParseError as e:
raise cmdutils.CommandError("Error while parsing {}: {}"
.format(pattern, str(e)))
def _parse_key(self, key: str) -> keyutils.KeySequence:
"""Parse a key argument."""
try:
return keyutils.KeySequence.parse(key)
except keyutils.KeyParseError as e:
raise cmdutils.CommandError(str(e))
def _print_value(self, option: str, pattern: Optional[urlmatch.UrlPattern]) -> None:
"""Print the value of the given option."""
with self._handle_config_error():
value = self._config.get_str(option, pattern=pattern)
text = "{} = {}".format(option, value)
if pattern is not None:
text += " for {}".format(pattern)
message.info(text)
@cmdutils.register(instance='config-commands')
@cmdutils.argument('option', completion=configmodel.option)
@cmdutils.argument('value', completion=configmodel.value)
@cmdutils.argument('win_id', value=cmdutils.Value.win_id)
@cmdutils.argument('pattern', flag='u')
def set(self, win_id: int, option: str = None, value: str = None,
temp: bool = False, print_: bool = False,
*, pattern: str = None) -> None:
"""Set an option.
If the option name ends with '?' or no value is provided, the
value of the option is shown instead.
Using :set without any arguments opens a page where settings can be
changed interactively.
Args:
option: The name of the option.
value: The value to set.
pattern: The URL pattern to use.
temp: Set value temporarily until qutebrowser is closed.
print_: Print the value after setting.
"""
if option is None:
tabbed_browser = objreg.get('tabbed-browser', scope='window',
window=win_id)
tabbed_browser.load_url(QUrl('qute://settings'), newtab=False)
return
if option.endswith('!'):
raise cmdutils.CommandError("Toggling values was moved to the "
":config-cycle command")
parsed_pattern = self._parse_pattern(pattern)
if option.endswith('?') and option != '?':
self._print_value(option[:-1], pattern=parsed_pattern)
return
with self._handle_config_error():
if value is None:
self._print_value(option, pattern=parsed_pattern)
else:
self._config.set_str(option, value, pattern=parsed_pattern,
save_yaml=not temp)
if print_:
self._print_value(option, pattern=parsed_pattern)
@cmdutils.register(instance='config-commands', maxsplit=1,
no_cmd_split=True, no_replace_variables=True)
@cmdutils.argument('command', completion=configmodel.bind)
@cmdutils.argument('win_id', value=cmdutils.Value.win_id)
def bind(self, win_id: str, key: str = None, command: str = None, *,
mode: str = 'normal', default: bool = False) -> None:
"""Bind a key to a command.
If no command is given, show the current binding for the given key.
Using :bind without any arguments opens a page showing all keybindings.
Args:
key: The keychain to bind. Examples of valid keychains are `gC`,
`<Ctrl-X>` or `<Ctrl-C>a`.
command: The command to execute, with optional args.
mode: The mode to bind the key in (default: `normal`). See `:help
bindings.commands` for the available modes.
default: If given, restore a default binding.
"""
if key is None:
tabbed_browser = objreg.get('tabbed-browser', scope='window',
window=win_id)
tabbed_browser.load_url(QUrl('qute://bindings'), newtab=True)
return
seq = self._parse_key(key)
if command is None:
if default:
# :bind --default: Restore default
with self._handle_config_error():
self._keyconfig.bind_default(seq, mode=mode,
save_yaml=True)
return
# No --default -> print binding
with self._handle_config_error():
cmd = self._keyconfig.get_command(seq, mode)
if cmd is None:
message.info("{} is unbound in {} mode".format(seq, mode))
else:
message.info("{} is bound to '{}' in {} mode".format(
seq, cmd, mode))
return
with self._handle_config_error():
self._keyconfig.bind(seq, command, mode=mode, save_yaml=True)
@cmdutils.register(instance='config-commands')
def unbind(self, key: str, *, mode: str = 'normal') -> None:
"""Unbind a keychain.
Args:
key: The keychain to unbind. See the help for `:bind` for the
correct syntax for keychains.
mode: The mode to unbind the key in (default: `normal`).
See `:help bindings.commands` for the available modes.
"""
with self._handle_config_error():
self._keyconfig.unbind(self._parse_key(key), mode=mode,
save_yaml=True)
@cmdutils.register(instance='config-commands', star_args_optional=True)
@cmdutils.argument('option', completion=configmodel.option)
@cmdutils.argument('values', completion=configmodel.value)
@cmdutils.argument('pattern', flag='u')
def config_cycle(self, option: str, *values: str,
pattern: str = None,
temp: bool = False, print_: bool = False) -> None:
"""Cycle an option between multiple values.
Args:
option: The name of the option.
values: The values to cycle through.
pattern: The URL pattern to use.
temp: Set value temporarily until qutebrowser is closed.
print_: Print the value after setting.
"""
parsed_pattern = self._parse_pattern(pattern)
with self._handle_config_error():
opt = self._config.get_opt(option)
old_value = self._config.get_obj_for_pattern(
option, pattern=parsed_pattern)
if not values and isinstance(opt.typ, configtypes.Bool):
values = ('true', 'false')
if len(values) < 2:
raise cmdutils.CommandError("Need at least two values for "
"non-boolean settings.")
# Use the next valid value from values, or the first if the current
# value does not appear in the list
with self._handle_config_error():
cycle_values = [opt.typ.from_str(val) for val in values]
try:
idx = cycle_values.index(old_value)
idx = (idx + 1) % len(cycle_values)
value = cycle_values[idx]
except ValueError:
value = cycle_values[0]
with self._handle_config_error():
self._config.set_obj(option, value, pattern=parsed_pattern,
save_yaml=not temp)
if print_:
self._print_value(option, pattern=parsed_pattern)
@cmdutils.register(instance='config-commands')
@cmdutils.argument('option', completion=configmodel.customized_option)
def config_unset(self, option: str, temp: bool = False) -> None:
"""Unset an option.
This sets an option back to its default and removes it from
autoconfig.yml.
Args:
option: The name of the option.
temp: Set value temporarily until qutebrowser is closed.
"""
with self._handle_config_error():
self._config.unset(option, save_yaml=not temp)
@cmdutils.register(instance='config-commands')
@cmdutils.argument('win_id', value=cmdutils.Value.win_id)
def config_diff(self, win_id: int, old: bool = False) -> None:
"""Show all customized options.
Args:
old: Show difference for the pre-v1.0 files
(qutebrowser.conf/keys.conf).
"""
url = QUrl('qute://configdiff')
if old:
url.setPath('/old')
tabbed_browser = objreg.get('tabbed-browser',
scope='window', window=win_id)
tabbed_browser.load_url(url, newtab=False)
@cmdutils.register(instance='config-commands')
@cmdutils.argument('option', completion=configmodel.list_option)
def config_list_add(self, option: str, value: str,
temp: bool = False) -> None:
"""Append a value to a config option that is a list.
Args:
option: The name of the option.
value: The value to append to the end of the list.
temp: Add value temporarily until qutebrowser is closed.
"""
with self._handle_config_error():
opt = self._config.get_opt(option)
valid_list_types = (configtypes.List, configtypes.ListOrValue)
if not isinstance(opt.typ, valid_list_types):
raise cmdutils.CommandError(":config-list-add can only be used "
"for lists")
with self._handle_config_error():
option_value = self._config.get_mutable_obj(option)
option_value.append(value)
self._config.update_mutables(save_yaml=not temp)
@cmdutils.register(instance='config-commands')
@cmdutils.argument('option', completion=configmodel.dict_option)
def config_dict_add(self, option: str, key: str, value: str,
temp: bool = False, replace: bool = False) -> None:
"""Add a key/value pair to a dictionary option.
Args:
option: The name of the option.
key: The key to use.
value: The value to place in the dictionary.
temp: Add value temporarily until qutebrowser is closed.
replace: Replace existing values. By default, existing values are
not overwritten.
"""
with self._handle_config_error():
opt = self._config.get_opt(option)
if not isinstance(opt.typ, configtypes.Dict):
raise cmdutils.CommandError(":config-dict-add can only be used "
"for dicts")
with self._handle_config_error():
option_value = self._config.get_mutable_obj(option)
if key in option_value and not replace:
raise cmdutils.CommandError("{} already exists in {} - use "
"--replace to overwrite!"
.format(key, option))
option_value[key] = value
self._config.update_mutables(save_yaml=not temp)
@cmdutils.register(instance='config-commands')
@cmdutils.argument('option', completion=configmodel.list_option)
def config_list_remove(self, option: str, value: str,
temp: bool = False) -> None:
"""Remove a value from a list.
Args:
option: The name of the option.
value: The value to remove from the list.
temp: Remove value temporarily until qutebrowser is closed.
"""
with self._handle_config_error():
opt = self._config.get_opt(option)
valid_list_types = (configtypes.List, configtypes.ListOrValue)
if not isinstance(opt.typ, valid_list_types):
raise cmdutils.CommandError(":config-list-remove can only be used "
"for lists")
with self._handle_config_error():
option_value = self._config.get_mutable_obj(option)
if value not in option_value:
raise cmdutils.CommandError("{} is not in {}!".format(
value, option))
option_value.remove(value)
self._config.update_mutables(save_yaml=not temp)
@cmdutils.register(instance='config-commands')
@cmdutils.argument('option', completion=configmodel.dict_option)
def config_dict_remove(self, option: str, key: str,
temp: bool = False) -> None:
"""Remove a key from a dict.
Args:
option: The name of the option.
key: The key to remove from the dict.
temp: Remove value temporarily until qutebrowser is closed.
"""
with self._handle_config_error():
opt = self._config.get_opt(option)
if not isinstance(opt.typ, configtypes.Dict):
raise cmdutils.CommandError(":config-dict-remove can only be used "
"for dicts")
with self._handle_config_error():
option_value = self._config.get_mutable_obj(option)
if key not in option_value:
raise cmdutils.CommandError("{} is not in {}!".format(
key, option))
del option_value[key]
self._config.update_mutables(save_yaml=not temp)
@cmdutils.register(instance='config-commands')
def config_clear(self, save: bool = False) -> None:
"""Set all settings back to their default.
Args:
save: If given, all configuration in autoconfig.yml is also
removed.
"""
self._config.clear(save_yaml=save)
@cmdutils.register(instance='config-commands')
def config_source(self, filename: str = None, clear: bool = False) -> None:
"""Read a config.py file.
Args:
filename: The file to load. If not given, loads the default
config.py.
clear: Clear current settings first.
"""
if filename is None:
filename = standarddir.config_py()
else:
filename = os.path.expanduser(filename)
if not os.path.isabs(filename):
filename = os.path.join(standarddir.config(), filename)
if clear:
self.config_clear()
try:
configfiles.read_config_py(filename)
except configexc.ConfigFileErrors as e:
raise cmdutils.CommandError(e)
@cmdutils.register(instance='config-commands')
def config_edit(self, no_source: bool = False) -> None:
"""Open the config.py file in the editor.
Args:
no_source: Don't re-source the config file after editing.
"""
def on_file_updated() -> None:
"""Source the new config when editing finished.
This can't use cmdutils.CommandError as it's run async.
"""
try:
configfiles.read_config_py(filename)
except configexc.ConfigFileErrors as e:
message.error(str(e))
ed = editor.ExternalEditor(watch=True, parent=self._config)
if not no_source:
ed.file_updated.connect(on_file_updated)
filename = standarddir.config_py()
ed.edit_file(filename)
@cmdutils.register(instance='config-commands')
def config_write_py(self, filename: str = None,
force: bool = False, defaults: bool = False) -> None:
"""Write the current configuration to a config.py file.
Args:
filename: The file to write to, or None for the default config.py.
force: Force overwriting existing files.
defaults: Write the defaults instead of values configured via :set.
"""
if filename is None:
filename = standarddir.config_py()
else:
filename = os.path.expanduser(filename)
if not os.path.isabs(filename):
filename = os.path.join(standarddir.config(), filename)
if os.path.exists(filename) and not force:
raise cmdutils.CommandError("{} already exists - use --force to "
"overwrite!".format(filename))
options: List = []
if defaults:
options = [(None, opt, opt.default)
for _name, opt in sorted(configdata.DATA.items())]
bindings = dict(configdata.DATA['bindings.default'].default)
commented = True
else:
for values in self._config:
for scoped in values:
options.append((scoped.pattern, values.opt, scoped.value))
bindings = dict(self._config.get_mutable_obj('bindings.commands'))
commented = False
writer = configfiles.ConfigPyWriter(options, bindings,
commented=commented)
try:
writer.write(filename)
except OSError as e:
raise cmdutils.CommandError(str(e))
|
import logging
import os
from gi.repository import Gdk, Gio, GLib, Gtk
# Import support module to get all builder-constructed widgets in the namespace
import meld.ui.gladesupport # noqa: F401
import meld.ui.util
from meld.conf import PROFILE, _
from meld.const import (
FILE_FILTER_ACTION_FORMAT,
TEXT_FILTER_ACTION_FORMAT,
FileComparisonMode,
)
from meld.dirdiff import DirDiff
from meld.filediff import FileDiff
from meld.melddoc import ComparisonState, MeldDoc
from meld.menuhelpers import replace_menu_section
from meld.newdifftab import NewDiffTab
from meld.recent import RecentType, recent_comparisons
from meld.settings import get_meld_settings
from meld.task import LifoScheduler
from meld.ui.notebooklabel import NotebookLabel
from meld.vcview import VcView
from meld.windowstate import SavedWindowState
try:
from Cocoa import NSApp
from meld.macwindow import MacWindow
except Exception as ex:
print(ex)
class MacWindow:
is_quartz = False
log = logging.getLogger(__name__)
@Gtk.Template(resource_path='/org/gnome/meld/ui/appwindow.ui')
class MeldWindow(Gtk.ApplicationWindow, MacWindow):
__gtype_name__ = 'MeldWindow'
appvbox = Gtk.Template.Child()
folder_filter_button = Gtk.Template.Child()
text_filter_button = Gtk.Template.Child()
gear_menu_button = Gtk.Template.Child()
notebook = Gtk.Template.Child()
spinner = Gtk.Template.Child()
vc_filter_button = Gtk.Template.Child()
view_toolbar = Gtk.Template.Child()
def __init__(self):
super(Gtk.ApplicationWindow, self).__init__()
if self.is_quartz:
header_bar = self.get_titlebar()
self.set_titlebar(None)
self.appvbox.pack_start(header_bar, False, True, 0)
# Manually handle GAction additions
actions = (
("close", self.action_close),
("new-tab", self.action_new_tab),
("stop", self.action_stop),
)
for name, callback in actions:
action = Gio.SimpleAction.new(name, None)
action.connect('activate', callback)
self.add_action(action)
state_actions = (
(
"fullscreen", self.action_fullscreen_change,
GLib.Variant.new_boolean(False)
),
)
for (name, callback, state) in state_actions:
action = Gio.SimpleAction.new_stateful(name, None, state)
action.connect('change-state', callback)
self.add_action(action)
# Initialise sensitivity for important actions
self.lookup_action('stop').set_enabled(False)
# Fake out the spinner on Windows. See Gitlab issue #133.
if os.name == 'nt':
for attr in ('stop', 'hide', 'show', 'start'):
setattr(self.spinner, attr, lambda *args: True)
self.drag_dest_set(
Gtk.DestDefaults.MOTION | Gtk.DestDefaults.HIGHLIGHT |
Gtk.DestDefaults.DROP,
None, Gdk.DragAction.COPY)
self.drag_dest_add_uri_targets()
self.connect(
"drag_data_received", self.on_widget_drag_data_received)
self.window_state = SavedWindowState()
self.window_state.bind(self)
self.should_close = False
self.idle_hooked = 0
self.scheduler = LifoScheduler()
self.scheduler.connect("runnable", self.on_scheduler_runnable)
if PROFILE != '':
style_context = self.get_style_context()
style_context.add_class("devel")
def do_realize(self):
Gtk.ApplicationWindow.do_realize(self)
app = self.get_application()
menu = app.get_menu_by_id("gear-menu")
self.gear_menu_button.set_popover(
Gtk.Popover.new_from_model(self.gear_menu_button, menu))
filter_model = app.get_menu_by_id("text-filter-menu")
self.text_filter_button.set_popover(
Gtk.Popover.new_from_model(self.text_filter_button, filter_model))
filter_menu = app.get_menu_by_id("folder-status-filter-menu")
self.folder_filter_button.set_popover(
Gtk.Popover.new_from_model(self.folder_filter_button, filter_menu))
vc_filter_model = app.get_menu_by_id('vc-status-filter-menu')
self.vc_filter_button.set_popover(
Gtk.Popover.new_from_model(self.vc_filter_button, vc_filter_model))
meld_settings = get_meld_settings()
self.update_text_filters(meld_settings)
self.update_filename_filters(meld_settings)
self.settings_handlers = [
meld_settings.connect(
"text-filters-changed", self.update_text_filters),
meld_settings.connect(
"file-filters-changed", self.update_filename_filters),
]
meld.ui.util.extract_accels_from_menu(menu, self.get_application())
if self.is_quartz:
self.install_mac_additions()
def do_show(self):
Gtk.ApplicationWindow.do_show(self)
def _on_recentmenu_map(self, recentmenu):
for imagemenuitem in recentmenu.get_children():
imagemenuitem.set_tooltip_text(imagemenuitem.get_label())
def update_filename_filters(self, settings):
filter_items_model = Gio.Menu()
for i, filt in enumerate(settings.file_filters):
name = FILE_FILTER_ACTION_FORMAT.format(i)
filter_items_model.append(
label=filt.label, detailed_action=f'view.{name}')
section = Gio.MenuItem.new_section(_("Filename"), filter_items_model)
section.set_attribute([("id", "s", "custom-filter-section")])
app = self.get_application()
filter_model = app.get_menu_by_id("folder-status-filter-menu")
replace_menu_section(filter_model, section)
def update_text_filters(self, settings):
filter_items_model = Gio.Menu()
for i, filt in enumerate(settings.text_filters):
name = TEXT_FILTER_ACTION_FORMAT.format(i)
filter_items_model.append(
label=filt.label, detailed_action=f'view.{name}')
section = Gio.MenuItem.new_section(None, filter_items_model)
section.set_attribute([("id", "s", "custom-filter-section")])
app = self.get_application()
filter_model = app.get_menu_by_id("text-filter-menu")
replace_menu_section(filter_model, section)
def on_widget_drag_data_received(
self, wid, context, x, y, selection_data, info, time):
uris = selection_data.get_uris()
if uris:
self.open_paths([Gio.File.new_for_uri(uri) for uri in uris])
return True
def on_idle(self):
ret = self.scheduler.iteration()
if ret and isinstance(ret, str):
self.spinner.set_tooltip_text(ret)
pending = self.scheduler.tasks_pending()
if not pending:
self.spinner.stop()
self.spinner.hide()
self.spinner.set_tooltip_text("")
self.idle_hooked = None
if self.is_quartz:
self.osx_dock_bounce()
# On window close, this idle loop races widget destruction,
# and so actions may already be gone at this point.
stop_action = self.lookup_action('stop')
if stop_action:
stop_action.set_enabled(False)
return pending
def on_scheduler_runnable(self, sched):
if not self.idle_hooked:
self.spinner.show()
self.spinner.start()
self.lookup_action('stop').set_enabled(True)
self.idle_hooked = GLib.idle_add(self.on_idle)
@Gtk.Template.Callback()
def on_delete_event(self, *extra):
should_cancel = False
# Delete pages from right-to-left. This ensures that if a version
# control page is open in the far left page, it will be closed last.
for page in reversed(self.notebook.get_children()):
self.notebook.set_current_page(self.notebook.page_num(page))
response = page.on_delete_event()
if response == Gtk.ResponseType.CANCEL:
should_cancel = True
should_cancel = should_cancel or self.has_pages()
if should_cancel:
self.should_close = True
return should_cancel
def has_pages(self):
return self.notebook.get_n_pages() > 0
def handle_current_doc_switch(self, page):
page.on_container_switch_out_event(self)
@Gtk.Template.Callback()
def on_switch_page(self, notebook, page, which):
oldidx = notebook.get_current_page()
if oldidx >= 0:
olddoc = notebook.get_nth_page(oldidx)
self.handle_current_doc_switch(olddoc)
newdoc = notebook.get_nth_page(which) if which >= 0 else None
self.lookup_action('close').set_enabled(bool(newdoc))
if newdoc:
nbl = self.notebook.get_tab_label(newdoc)
self.set_title(nbl.props.label_text)
else:
self.set_title("Meld")
if hasattr(newdoc, 'scheduler'):
self.scheduler.add_task(newdoc.scheduler)
self.view_toolbar.foreach(self.view_toolbar.remove)
if hasattr(newdoc, 'toolbar_actions'):
self.view_toolbar.add(newdoc.toolbar_actions)
@Gtk.Template.Callback()
def after_switch_page(self, notebook, page, which):
newdoc = notebook.get_nth_page(which)
newdoc.on_container_switch_in_event(self)
@Gtk.Template.Callback()
def on_page_label_changed(self, notebook, label_text):
self.set_title(label_text)
def action_new_tab(self, action, parameter):
self.append_new_comparison()
def action_close(self, *extra):
i = self.notebook.get_current_page()
if i >= 0:
page = self.notebook.get_nth_page(i)
page.on_delete_event()
def action_fullscreen_change(self, action, state):
if self.is_quartz:
self.osx_toggle_fullscreen()
return
window_state = self.get_window().get_state()
is_full = window_state & Gdk.WindowState.FULLSCREEN
action.set_state(state)
if state and not is_full:
self.fullscreen()
elif is_full:
self.unfullscreen()
def action_stop(self, *args):
# TODO: This is the only window-level action we have that still
# works on the "current" document like this.
self.current_doc().action_stop()
def page_removed(self, page, status):
if hasattr(page, 'scheduler'):
self.scheduler.remove_scheduler(page.scheduler)
page_num = self.notebook.page_num(page)
if self.notebook.get_current_page() == page_num:
self.handle_current_doc_switch(page)
self.notebook.remove_page(page_num)
# Normal switch-page handlers don't get run for removing the
# last page from a notebook.
if not self.has_pages():
self.on_switch_page(self.notebook, page, -1)
if self.should_close:
cancelled = self.emit(
'delete-event', Gdk.Event.new(Gdk.EventType.DELETE))
if not cancelled:
self.destroy()
def on_page_state_changed(self, page, old_state, new_state):
if self.should_close and old_state == ComparisonState.Closing:
# Cancel closing if one of our tabs does
self.should_close = False
def on_file_changed(self, srcpage, filename):
for page in self.notebook.get_children():
if page != srcpage:
page.on_file_changed(filename)
@Gtk.Template.Callback()
def on_open_recent(self, recent_selector, uri):
try:
self.append_recent(uri)
except (IOError, ValueError):
# FIXME: Need error handling, but no sensible display location
log.exception(f'Error opening recent file {uri}')
def _append_page(self, page):
nbl = NotebookLabel(page=page)
self.notebook.append_page(page, nbl)
self.notebook.child_set_property(page, 'tab-expand', True)
# Change focus to the newly created page only if the user is on a
# DirDiff or VcView page, or if it's a new tab page. This prevents
# cycling through X pages when X diffs are initiated.
if isinstance(self.current_doc(), DirDiff) or \
isinstance(self.current_doc(), VcView) or \
isinstance(page, NewDiffTab):
self.notebook.set_current_page(self.notebook.page_num(page))
if hasattr(page, 'scheduler'):
self.scheduler.add_scheduler(page.scheduler)
if isinstance(page, MeldDoc):
page.file_changed_signal.connect(self.on_file_changed)
page.create_diff_signal.connect(
lambda obj, arg, kwargs: self.append_diff(arg, **kwargs))
page.tab_state_changed.connect(self.on_page_state_changed)
page.close_signal.connect(self.page_removed)
self.notebook.set_tab_reorderable(page, True)
def append_new_comparison(self):
doc = NewDiffTab(self)
self._append_page(doc)
self.notebook.on_label_changed(doc, _("New comparison"), None)
def diff_created_cb(doc, newdoc):
doc.on_delete_event()
idx = self.notebook.page_num(newdoc)
self.notebook.set_current_page(idx)
doc.connect("diff-created", diff_created_cb)
return doc
def append_dirdiff(self, gfiles, auto_compare=False):
dirs = [d.get_path() if d else None for d in gfiles]
assert len(dirs) in (1, 2, 3)
doc = DirDiff(len(dirs))
self._append_page(doc)
doc.set_locations(dirs)
if auto_compare:
doc.scheduler.add_task(doc.auto_compare)
return doc
def append_filediff(
self, gfiles, *, encodings=None, merge_output=None, meta=None):
assert len(gfiles) in (1, 2, 3)
doc = FileDiff(len(gfiles))
self._append_page(doc)
doc.set_files(gfiles, encodings)
if merge_output is not None:
doc.set_merge_output_file(merge_output)
if meta is not None:
doc.set_meta(meta)
return doc
def append_filemerge(self, gfiles, merge_output=None):
if len(gfiles) != 3:
raise ValueError(
_("Need three files to auto-merge, got: %r") %
[f.get_parse_name() for f in gfiles])
doc = FileDiff(
len(gfiles), comparison_mode=FileComparisonMode.AutoMerge)
self._append_page(doc)
doc.set_files(gfiles)
if merge_output is not None:
doc.set_merge_output_file(merge_output)
return doc
def append_diff(self, gfiles, auto_compare=False, auto_merge=False,
merge_output=None, meta=None):
have_directories = False
have_files = False
for f in gfiles:
if f.query_file_type(
Gio.FileQueryInfoFlags.NONE, None) == Gio.FileType.DIRECTORY:
have_directories = True
else:
have_files = True
if have_directories and have_files:
raise ValueError(
_("Cannot compare a mixture of files and directories"))
elif have_directories:
return self.append_dirdiff(gfiles, auto_compare)
elif auto_merge:
return self.append_filemerge(gfiles, merge_output=merge_output)
else:
return self.append_filediff(
gfiles, merge_output=merge_output, meta=meta)
def append_vcview(self, location, auto_compare=False):
doc = VcView()
self._append_page(doc)
if isinstance(location, (list, tuple)):
location = location[0]
doc.set_location(location.get_path())
if auto_compare:
doc.scheduler.add_task(doc.auto_compare)
return doc
def append_recent(self, uri):
comparison_type, gfiles = recent_comparisons.read(uri)
comparison_method = {
RecentType.File: self.append_filediff,
RecentType.Folder: self.append_dirdiff,
RecentType.Merge: self.append_filemerge,
RecentType.VersionControl: self.append_vcview,
}
tab = comparison_method[comparison_type](gfiles)
self.notebook.set_current_page(self.notebook.page_num(tab))
recent_comparisons.add(tab)
return tab
def _single_file_open(self, gfile):
doc = VcView()
def cleanup():
self.scheduler.remove_scheduler(doc.scheduler)
self.scheduler.add_task(cleanup)
self.scheduler.add_scheduler(doc.scheduler)
path = gfile.get_path()
doc.set_location(path)
doc.create_diff_signal.connect(
lambda obj, arg, kwargs: self.append_diff(arg, **kwargs))
doc.run_diff(path)
def open_paths(self, gfiles, auto_compare=False, auto_merge=False,
focus=False):
tab = None
if len(gfiles) == 1:
a = gfiles[0]
if a.query_file_type(Gio.FileQueryInfoFlags.NONE, None) == \
Gio.FileType.DIRECTORY:
tab = self.append_vcview(a, auto_compare)
else:
self._single_file_open(a)
elif len(gfiles) in (2, 3):
tab = self.append_diff(gfiles, auto_compare=auto_compare,
auto_merge=auto_merge)
if tab:
recent_comparisons.add(tab)
if focus:
self.notebook.set_current_page(self.notebook.page_num(tab))
return tab
def current_doc(self):
"Get the current doc or a dummy object if there is no current"
index = self.notebook.get_current_page()
if index >= 0:
page = self.notebook.get_nth_page(index)
if isinstance(page, MeldDoc):
return page
class DummyDoc:
def __getattr__(self, a):
return lambda *x: None
return DummyDoc()
|
import asyncio
from functools import partial
import logging
from typing import Any, Dict, Optional
import voluptuous as vol
import homeassistant.components.persistent_notification as pn
from homeassistant.const import CONF_NAME, CONF_PLATFORM
from homeassistant.core import ServiceCall
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_per_platform, discovery
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.loader import bind_hass
from homeassistant.setup import async_prepare_setup_platform
from homeassistant.util import slugify
# mypy: allow-untyped-defs, no-check-untyped-defs
_LOGGER = logging.getLogger(__name__)
# Platform specific data
ATTR_DATA = "data"
# Text to notify user of
ATTR_MESSAGE = "message"
# Target of the notification (user, device, etc)
ATTR_TARGET = "target"
# Title of notification
ATTR_TITLE = "title"
ATTR_TITLE_DEFAULT = "Home Assistant"
DOMAIN = "notify"
SERVICE_NOTIFY = "notify"
SERVICE_PERSISTENT_NOTIFICATION = "persistent_notification"
NOTIFY_SERVICES = "notify_services"
PLATFORM_SCHEMA = vol.Schema(
{vol.Required(CONF_PLATFORM): cv.string, vol.Optional(CONF_NAME): cv.string},
extra=vol.ALLOW_EXTRA,
)
NOTIFY_SERVICE_SCHEMA = vol.Schema(
{
vol.Required(ATTR_MESSAGE): cv.template,
vol.Optional(ATTR_TITLE): cv.template,
vol.Optional(ATTR_TARGET): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(ATTR_DATA): dict,
}
)
PERSISTENT_NOTIFICATION_SERVICE_SCHEMA = vol.Schema(
{
vol.Required(ATTR_MESSAGE): cv.template,
vol.Optional(ATTR_TITLE): cv.template,
}
)
@bind_hass
async def async_reload(hass: HomeAssistantType, integration_name: str) -> None:
"""Register notify services for an integration."""
if not _async_integration_has_notify_services(hass, integration_name):
return
tasks = [
notify_service.async_register_services()
for notify_service in hass.data[NOTIFY_SERVICES][integration_name]
]
await asyncio.gather(*tasks)
@bind_hass
async def async_reset_platform(hass: HomeAssistantType, integration_name: str) -> None:
"""Unregister notify services for an integration."""
if not _async_integration_has_notify_services(hass, integration_name):
return
tasks = [
notify_service.async_unregister_services()
for notify_service in hass.data[NOTIFY_SERVICES][integration_name]
]
await asyncio.gather(*tasks)
del hass.data[NOTIFY_SERVICES][integration_name]
def _async_integration_has_notify_services(
hass: HomeAssistantType, integration_name: str
) -> bool:
"""Determine if an integration has notify services registered."""
if (
NOTIFY_SERVICES not in hass.data
or integration_name not in hass.data[NOTIFY_SERVICES]
):
return False
return True
class BaseNotificationService:
"""An abstract class for notification services."""
hass: Optional[HomeAssistantType] = None
def send_message(self, message, **kwargs):
"""Send a message.
kwargs can contain ATTR_TITLE to specify a title.
"""
raise NotImplementedError()
async def async_send_message(self, message: Any, **kwargs: Any) -> None:
"""Send a message.
kwargs can contain ATTR_TITLE to specify a title.
"""
await self.hass.async_add_executor_job(partial(self.send_message, message, **kwargs)) # type: ignore
async def _async_notify_message_service(self, service: ServiceCall) -> None:
"""Handle sending notification message service calls."""
kwargs = {}
message = service.data[ATTR_MESSAGE]
title = service.data.get(ATTR_TITLE)
if title:
title.hass = self.hass
kwargs[ATTR_TITLE] = title.async_render(parse_result=False)
if self._registered_targets.get(service.service) is not None:
kwargs[ATTR_TARGET] = [self._registered_targets[service.service]]
elif service.data.get(ATTR_TARGET) is not None:
kwargs[ATTR_TARGET] = service.data.get(ATTR_TARGET)
message.hass = self.hass
kwargs[ATTR_MESSAGE] = message.async_render(parse_result=False)
kwargs[ATTR_DATA] = service.data.get(ATTR_DATA)
await self.async_send_message(**kwargs)
async def async_setup(
self,
hass: HomeAssistantType,
service_name: str,
target_service_name_prefix: str,
) -> None:
"""Store the data for the notify service."""
# pylint: disable=attribute-defined-outside-init
self.hass = hass
self._service_name = service_name
self._target_service_name_prefix = target_service_name_prefix
self._registered_targets: Dict = {}
async def async_register_services(self) -> None:
"""Create or update the notify services."""
assert self.hass
if hasattr(self, "targets"):
stale_targets = set(self._registered_targets)
# pylint: disable=no-member
for name, target in self.targets.items(): # type: ignore
target_name = slugify(f"{self._target_service_name_prefix}_{name}")
if target_name in stale_targets:
stale_targets.remove(target_name)
if target_name in self._registered_targets:
continue
self._registered_targets[target_name] = target
self.hass.services.async_register(
DOMAIN,
target_name,
self._async_notify_message_service,
schema=NOTIFY_SERVICE_SCHEMA,
)
for stale_target_name in stale_targets:
del self._registered_targets[stale_target_name]
self.hass.services.async_remove(
DOMAIN,
stale_target_name,
)
if self.hass.services.has_service(DOMAIN, self._service_name):
return
self.hass.services.async_register(
DOMAIN,
self._service_name,
self._async_notify_message_service,
schema=NOTIFY_SERVICE_SCHEMA,
)
async def async_unregister_services(self) -> None:
"""Unregister the notify services."""
assert self.hass
if self._registered_targets:
remove_targets = set(self._registered_targets)
for remove_target_name in remove_targets:
del self._registered_targets[remove_target_name]
self.hass.services.async_remove(
DOMAIN,
remove_target_name,
)
if not self.hass.services.has_service(DOMAIN, self._service_name):
return
self.hass.services.async_remove(
DOMAIN,
self._service_name,
)
async def async_setup(hass, config):
"""Set up the notify services."""
hass.data.setdefault(NOTIFY_SERVICES, {})
async def persistent_notification(service: ServiceCall) -> None:
"""Send notification via the built-in persistsent_notify integration."""
payload = {}
message = service.data[ATTR_MESSAGE]
message.hass = hass
payload[ATTR_MESSAGE] = message.async_render(parse_result=False)
title = service.data.get(ATTR_TITLE)
if title:
title.hass = hass
payload[ATTR_TITLE] = title.async_render(parse_result=False)
await hass.services.async_call(
pn.DOMAIN, pn.SERVICE_CREATE, payload, blocking=True
)
async def async_setup_platform(
integration_name, p_config=None, discovery_info=None
):
"""Set up a notify platform."""
if p_config is None:
p_config = {}
platform = await async_prepare_setup_platform(
hass, config, DOMAIN, integration_name
)
if platform is None:
_LOGGER.error("Unknown notification service specified")
return
_LOGGER.info("Setting up %s.%s", DOMAIN, integration_name)
notify_service = None
try:
if hasattr(platform, "async_get_service"):
notify_service = await platform.async_get_service(
hass, p_config, discovery_info
)
elif hasattr(platform, "get_service"):
notify_service = await hass.async_add_executor_job(
platform.get_service, hass, p_config, discovery_info
)
else:
raise HomeAssistantError("Invalid notify platform.")
if notify_service is None:
# Platforms can decide not to create a service based
# on discovery data.
if discovery_info is None:
_LOGGER.error(
"Failed to initialize notification service %s", integration_name
)
return
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Error setting up platform %s", integration_name)
return
if discovery_info is None:
discovery_info = {}
conf_name = p_config.get(CONF_NAME) or discovery_info.get(CONF_NAME)
target_service_name_prefix = conf_name or integration_name
service_name = slugify(conf_name or SERVICE_NOTIFY)
await notify_service.async_setup(hass, service_name, target_service_name_prefix)
await notify_service.async_register_services()
hass.data[NOTIFY_SERVICES].setdefault(integration_name, []).append(
notify_service
)
hass.config.components.add(f"{DOMAIN}.{integration_name}")
return True
hass.services.async_register(
DOMAIN,
SERVICE_PERSISTENT_NOTIFICATION,
persistent_notification,
schema=PERSISTENT_NOTIFICATION_SERVICE_SCHEMA,
)
setup_tasks = [
async_setup_platform(integration_name, p_config)
for integration_name, p_config in config_per_platform(config, DOMAIN)
]
if setup_tasks:
await asyncio.wait(setup_tasks)
async def async_platform_discovered(platform, info):
"""Handle for discovered platform."""
await async_setup_platform(platform, discovery_info=info)
discovery.async_listen_platform(hass, DOMAIN, async_platform_discovered)
return True
|
from __future__ import absolute_import
import sys
from unittest import TestCase, main
from lark import Lark, Tree
from lark.lexer import Lexer, Token
import lark.lark as lark_module
try:
from StringIO import StringIO
except ImportError:
from io import BytesIO as StringIO
import tempfile, os
class MockFile(StringIO):
def close(self):
pass
def __enter__(self):
return self
def __exit__(self, *args):
pass
class MockFS:
def __init__(self):
self.files = {}
def open(self, name, mode=None):
if name not in self.files:
f = self.files[name] = MockFile()
else:
f = self.files[name]
f.seek(0)
return f
def exists(self, name):
return name in self.files
class CustomLexer(Lexer):
def __init__(self, lexer_conf):
pass
def lex(self, data):
for obj in data:
yield Token('A', obj)
class TestCache(TestCase):
def setUp(self):
pass
def test_simple(self):
g = '''start: "a"'''
fn = "bla"
fs = lark_module.FS
mock_fs = MockFS()
try:
lark_module.FS = mock_fs
Lark(g, parser='lalr', cache=fn)
assert fn in mock_fs.files
parser = Lark(g, parser='lalr', cache=fn)
assert parser.parse('a') == Tree('start', [])
mock_fs.files = {}
assert len(mock_fs.files) == 0
Lark(g, parser='lalr', cache=True)
assert len(mock_fs.files) == 1
parser = Lark(g, parser='lalr', cache=True)
assert parser.parse('a') == Tree('start', [])
parser = Lark(g+' "b"', parser='lalr', cache=True)
assert len(mock_fs.files) == 2
assert parser.parse('ab') == Tree('start', [])
parser = Lark(g, parser='lalr', cache=True)
assert parser.parse('a') == Tree('start', [])
# Test with custom lexer
mock_fs.files = {}
parser = Lark(g, parser='lalr', lexer=CustomLexer, cache=True)
parser = Lark(g, parser='lalr', lexer=CustomLexer, cache=True)
assert len(mock_fs.files) == 1
assert parser.parse('a') == Tree('start', [])
# Test options persistence
mock_fs.files = {}
Lark(g, parser="lalr", debug=True, cache=True)
parser = Lark(g, parser="lalr", debug=True, cache=True)
assert parser.options.options['debug']
finally:
lark_module.FS = fs
if __name__ == '__main__':
main()
|
from pygal.test import make_data
def test_cubic(Chart, datas):
"""Test cubic interpolation"""
chart = Chart(interpolate='cubic')
chart = make_data(chart, datas)
assert chart.render()
def test_cubic_prec(Chart, datas):
"""Test cubic interpolation precision"""
chart = Chart(interpolate='cubic', interpolation_precision=200)
chart = make_data(chart, datas)
chart_low = Chart(interpolate='cubic', interpolation_precision=5)
chart_low = make_data(chart, datas)
assert len(chart.render()) >= len(chart_low.render())
def test_quadratic(Chart, datas):
"""Test quadratic interpolation"""
chart = Chart(interpolate='quadratic')
chart = make_data(chart, datas)
assert chart.render()
def test_lagrange(Chart, datas):
"""Test lagrange interpolation"""
chart = Chart(interpolate='lagrange')
chart = make_data(chart, datas)
assert chart.render()
def test_trigonometric(Chart, datas):
"""Test trigonometric interpolation"""
chart = Chart(interpolate='trigonometric')
chart = make_data(chart, datas)
assert chart.render()
def test_hermite(Chart, datas):
"""Test hermite interpolation"""
chart = Chart(interpolate='hermite')
chart = make_data(chart, datas)
assert chart.render()
def test_hermite_finite(Chart, datas):
"""Test hermite finite difference interpolation"""
chart = Chart(
interpolate='hermite',
interpolation_parameters={'type': 'finite_difference'}
)
chart = make_data(chart, datas)
assert chart.render()
def test_hermite_cardinal(Chart, datas):
"""Test hermite cardinal interpolation"""
chart = Chart(
interpolate='hermite',
interpolation_parameters={
'type': 'cardinal',
'c': .75
}
)
chart = make_data(chart, datas)
assert chart.render()
def test_hermite_catmull_rom(Chart, datas):
"""Test hermite catmull rom interpolation"""
chart = Chart(
interpolate='hermite',
interpolation_parameters={'type': 'catmull_rom'}
)
chart = make_data(chart, datas)
assert chart.render()
def test_hermite_kochanek_bartels(Chart, datas):
"""Test hermite kochanek bartels interpolation"""
chart = Chart(
interpolate='hermite',
interpolation_parameters={
'type': 'kochanek_bartels',
'b': -1,
'c': 1,
't': 1
}
)
chart = make_data(chart, datas)
assert chart.render()
chart = Chart(
interpolate='hermite',
interpolation_parameters={
'type': 'kochanek_bartels',
'b': -1,
'c': -8,
't': 0
}
)
chart = make_data(chart, datas)
assert chart.render()
chart = Chart(
interpolate='hermite',
interpolation_parameters={
'type': 'kochanek_bartels',
'b': 0,
'c': 10,
't': -1
}
)
chart = make_data(chart, datas)
assert chart.render()
|
import os
from django.template.defaultfilters import slugify
def append_position(path, position, separator=''):
"""
Concatenate a path and a position,
between the filename and the extension.
"""
filename, extension = os.path.splitext(path)
return ''.join([filename, separator, str(position), extension])
def loop_template_list(loop_positions, instance, instance_type,
default_template, registry):
"""
Build a list of templates from a position within a loop
and a registry of templates.
"""
templates = []
local_loop_position = loop_positions[1]
global_loop_position = loop_positions[0]
instance_string = slugify(str(instance))
for key in ['%s-%s' % (instance_type, instance_string),
instance_string,
instance_type,
'default']:
try:
templates.append(registry[key][global_loop_position])
except KeyError:
pass
templates.append(
append_position(default_template, global_loop_position, '-'))
templates.append(
append_position(default_template, local_loop_position, '_'))
templates.append(default_template)
return templates
|
import unittest
from lxml.tests.common_imports import make_doctest
import lxml.html
from lxml.html.clean import Cleaner, clean_html
class CleanerTest(unittest.TestCase):
def test_allow_tags(self):
html = """
<html>
<head>
</head>
<body>
<p>some text</p>
<table>
<tr>
<td>hello</td><td>world</td>
</tr>
<tr>
<td>hello</td><td>world</td>
</tr>
</table>
<img>
</body>
</html>
"""
html_root = lxml.html.document_fromstring(html)
cleaner = Cleaner(
remove_unknown_tags = False,
allow_tags = ['table', 'tr', 'td'])
result = cleaner.clean_html(html_root)
self.assertEqual(12-5+1, len(list(result.iter())))
def test_allow_and_remove(self):
with self.assertRaises(ValueError):
Cleaner(allow_tags=['a'], remove_unknown_tags=True)
def test_remove_unknown_tags(self):
html = """<div><bun>lettuce, tomato, veggie patty</bun></div>"""
clean_html = """<div>lettuce, tomato, veggie patty</div>"""
cleaner = Cleaner(remove_unknown_tags=True)
result = cleaner.clean_html(html)
self.assertEqual(
result,
clean_html,
msg="Unknown tags not removed. Got: %s" % result,
)
def test_safe_attrs_included(self):
html = """<p><span style="color: #00ffff;">Cyan</span></p>"""
safe_attrs=set(lxml.html.defs.safe_attrs)
safe_attrs.add('style')
cleaner = Cleaner(
safe_attrs_only=True,
safe_attrs=safe_attrs)
result = cleaner.clean_html(html)
self.assertEqual(html, result)
def test_safe_attrs_excluded(self):
html = """<p><span style="color: #00ffff;">Cyan</span></p>"""
expected = """<p><span>Cyan</span></p>"""
safe_attrs=set()
cleaner = Cleaner(
safe_attrs_only=True,
safe_attrs=safe_attrs)
result = cleaner.clean_html(html)
self.assertEqual(expected, result)
def test_clean_invalid_root_tag(self):
# only testing that cleaning with invalid root tags works at all
s = lxml.html.fromstring('parent <invalid tag>child</another>')
self.assertEqual('parent child', clean_html(s).text_content())
s = lxml.html.fromstring('<invalid tag>child</another>')
self.assertEqual('child', clean_html(s).text_content())
def test_clean_with_comments(self):
html = """<p><span style="color: #00ffff;">Cy<!-- xx -->an</span><!-- XXX --></p>"""
s = lxml.html.fragment_fromstring(html)
self.assertEqual(
b'<p><span>Cyan</span></p>',
lxml.html.tostring(clean_html(s)))
self.assertEqual(
'<p><span>Cyan</span></p>',
clean_html(html))
cleaner = Cleaner(comments=False)
result = cleaner.clean_html(s)
self.assertEqual(
b'<p><span>Cy<!-- xx -->an</span><!-- XXX --></p>',
lxml.html.tostring(result))
self.assertEqual(
'<p><span>Cy<!-- xx -->an</span><!-- XXX --></p>',
cleaner.clean_html(html))
def test_sneaky_noscript_in_style(self):
# This gets parsed as <noscript> -> <style>"...</noscript>..."</style>
# thus passing the </noscript> through into the output.
html = '<noscript><style><a title="</noscript><img src=x onerror=alert(1)>">'
s = lxml.html.fragment_fromstring(html)
self.assertEqual(
b'<noscript><style>/* deleted */</style></noscript>',
lxml.html.tostring(clean_html(s)))
def test_sneaky_js_in_math_style(self):
# This gets parsed as <math> -> <style>"..."</style>
# thus passing any tag/script/whatever content through into the output.
html = '<math><style><img src=x onerror=alert(1)></style></math>'
s = lxml.html.fragment_fromstring(html)
self.assertEqual(
b'<math><style>/* deleted */</style></math>',
lxml.html.tostring(clean_html(s)))
def test_suite():
suite = unittest.TestSuite()
suite.addTests([make_doctest('test_clean.txt')])
suite.addTests([make_doctest('test_clean_embed.txt')])
suite.addTests(unittest.makeSuite(CleanerTest))
return suite
|
import random
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_COLOR_TEMP,
ATTR_EFFECT,
ATTR_HS_COLOR,
ATTR_WHITE_VALUE,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
SUPPORT_COLOR_TEMP,
SUPPORT_EFFECT,
SUPPORT_WHITE_VALUE,
LightEntity,
)
from . import DOMAIN
LIGHT_COLORS = [(56, 86), (345, 75)]
LIGHT_EFFECT_LIST = ["rainbow", "none"]
LIGHT_TEMPS = [240, 380]
SUPPORT_DEMO = (
SUPPORT_BRIGHTNESS | SUPPORT_COLOR_TEMP | SUPPORT_COLOR | SUPPORT_WHITE_VALUE
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the demo light platform."""
async_add_entities(
[
DemoLight(
unique_id="light_1",
name="Bed Light",
state=False,
available=True,
effect_list=LIGHT_EFFECT_LIST,
effect=LIGHT_EFFECT_LIST[0],
),
DemoLight(
unique_id="light_2",
name="Ceiling Lights",
state=True,
available=True,
ct=LIGHT_TEMPS[1],
),
DemoLight(
unique_id="light_3",
name="Kitchen Lights",
state=True,
available=True,
hs_color=LIGHT_COLORS[1],
ct=LIGHT_TEMPS[0],
),
]
)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Demo config entry."""
await async_setup_platform(hass, {}, async_add_entities)
class DemoLight(LightEntity):
"""Representation of a demo light."""
def __init__(
self,
unique_id,
name,
state,
available=False,
hs_color=None,
ct=None,
brightness=180,
white=200,
effect_list=None,
effect=None,
):
"""Initialize the light."""
self._unique_id = unique_id
self._name = name
self._state = state
self._hs_color = hs_color
self._ct = ct or random.choice(LIGHT_TEMPS)
self._brightness = brightness
self._white = white
self._features = SUPPORT_DEMO
self._effect_list = effect_list
self._effect = effect
self._available = True
self._color_mode = "ct" if ct is not None and hs_color is None else "hs"
if self._effect_list is not None:
self._features |= SUPPORT_EFFECT
@property
def device_info(self):
"""Return device info."""
return {
"identifiers": {
# Serial numbers are unique identifiers within a specific domain
(DOMAIN, self.unique_id)
},
"name": self.name,
}
@property
def should_poll(self) -> bool:
"""No polling needed for a demo light."""
return False
@property
def name(self) -> str:
"""Return the name of the light if any."""
return self._name
@property
def unique_id(self):
"""Return unique ID for light."""
return self._unique_id
@property
def available(self) -> bool:
"""Return availability."""
# This demo light is always available, but well-behaving components
# should implement this to inform Home Assistant accordingly.
return self._available
@property
def brightness(self) -> int:
"""Return the brightness of this light between 0..255."""
return self._brightness
@property
def hs_color(self) -> tuple:
"""Return the hs color value."""
if self._color_mode == "hs":
return self._hs_color
return None
@property
def color_temp(self) -> int:
"""Return the CT color temperature."""
if self._color_mode == "ct":
return self._ct
return None
@property
def white_value(self) -> int:
"""Return the white value of this light between 0..255."""
return self._white
@property
def effect_list(self) -> list:
"""Return the list of supported effects."""
return self._effect_list
@property
def effect(self) -> str:
"""Return the current effect."""
return self._effect
@property
def is_on(self) -> bool:
"""Return true if light is on."""
return self._state
@property
def supported_features(self) -> int:
"""Flag supported features."""
return self._features
async def async_turn_on(self, **kwargs) -> None:
"""Turn the light on."""
self._state = True
if ATTR_HS_COLOR in kwargs:
self._color_mode = "hs"
self._hs_color = kwargs[ATTR_HS_COLOR]
if ATTR_COLOR_TEMP in kwargs:
self._color_mode = "ct"
self._ct = kwargs[ATTR_COLOR_TEMP]
if ATTR_BRIGHTNESS in kwargs:
self._brightness = kwargs[ATTR_BRIGHTNESS]
if ATTR_WHITE_VALUE in kwargs:
self._white = kwargs[ATTR_WHITE_VALUE]
if ATTR_EFFECT in kwargs:
self._effect = kwargs[ATTR_EFFECT]
# As we have disabled polling, we need to inform
# Home Assistant about updates in our state ourselves.
self.async_write_ha_state()
async def async_turn_off(self, **kwargs) -> None:
"""Turn the light off."""
self._state = False
# As we have disabled polling, we need to inform
# Home Assistant about updates in our state ourselves.
self.async_write_ha_state()
|
import logging
from datetime import datetime
import numpy as np
import pandas as pd
import six
from pandas import to_datetime as dt
from .date import mktz
PD_VER = pd.__version__
logger = logging.getLogger(__name__)
# ----------------------- Grouping and Aggregating ---------------------------- #
def fancy_group_by(df, grouping_level=0, aggregate_level=1, method='last', max_=None, min_=None, within=None):
""" Dataframe group-by operation that supports aggregating by different methods on the index.
Parameters
----------
df: ``DataFrame``
Pandas dataframe with a MultiIndex
grouping_level: ``int`` or ``str`` or ``list`` of ``str``
Index level to group by. Defaults to 0.
aggregate_level: ``int`` or ``str``
Index level to aggregate by. Defaults to 1.
method: ``str``
Aggregation method. One of
last: Use the last (lexicographically) value from each group
first: Use the first value from each group
max_: <any>
If set, will limit results to those having aggregate level values <= this value
min_: <any>
If set, will limit results to those having aggregate level values >= this value
within: Any type supported by the index, or ``DateOffset``/timedelta-like for ``DatetimeIndex``.
If set, will limit results to those having aggregate level values within this range of the group value.
Note that this is currently unsupported for Multi-index of depth > 2
"""
if method not in ('first', 'last'):
raise ValueError('Invalid method')
if isinstance(aggregate_level, six.string_types):
aggregate_level = df.index.names.index(aggregate_level)
# Trim any rows outside the aggregate value bounds
if max_ is not None or min_ is not None or within is not None:
agg_idx = df.index.get_level_values(aggregate_level)
mask = np.full(len(agg_idx), True, dtype='b1')
if max_ is not None:
mask &= (agg_idx <= max_)
if min_ is not None:
mask &= (agg_idx >= min_)
if within is not None:
group_idx = df.index.get_level_values(grouping_level)
if isinstance(agg_idx, pd.DatetimeIndex):
mask &= (group_idx >= agg_idx.shift(-1, freq=within))
else:
mask &= (group_idx >= (agg_idx - within))
df = df.loc[mask]
# The sort order must be correct in order of grouping_level -> aggregate_level for the aggregation methods
# to work properly. We can check the sortdepth to see if this is in fact the case and resort if necessary.
# TODO: this might need tweaking if the levels are around the wrong way
if df.index.lexsort_depth < (aggregate_level + 1):
df = df.sort_index(level=grouping_level)
gb = df.groupby(level=grouping_level)
if method == 'last':
return gb.last()
return gb.first()
# --------- Common as-of-date use case -------------- #
def groupby_asof(df, as_of=None, dt_col='sample_dt', asof_col='observed_dt'):
''' Common use case for selecting the latest rows from a bitemporal dataframe as-of a certain date.
Parameters
----------
df: ``pd.DataFrame``
Dataframe with a MultiIndex index
as_of: ``datetime``
Return a timeseries with values observed <= this as-of date. By default, the latest observed
values will be returned.
dt_col: ``str`` or ``int``
Name or index of the column in the MultiIndex that is the sample date
asof_col: ``str`` or ``int``
Name or index of the column in the MultiIndex that is the observed date
'''
if as_of:
if as_of.tzinfo is None and df.index.get_level_values(asof_col).tz is not None:
as_of = as_of.replace(tzinfo=mktz())
return fancy_group_by(df,
grouping_level=dt_col,
aggregate_level=asof_col,
method='last',
max_=as_of)
# ----------------------- Insert/Append ---------------------------- #
def multi_index_insert_row(df, index_row, values_row):
""" Return a new dataframe with a row inserted for a multi-index dataframe.
This will sort the rows according to the ordered multi-index levels.
"""
if PD_VER < '0.24.0':
row_index = pd.MultiIndex(levels=[[i] for i in index_row],
labels=[[0] for i in index_row])
else:
row_index = pd.MultiIndex(levels=[[i] for i in index_row],
codes=[[0] for i in index_row])
row = pd.DataFrame(values_row, index=row_index, columns=df.columns)
df = pd.concat((df, row))
if df.index.lexsort_depth == len(index_row) and df.index[-2] < df.index[-1]:
# We've just appended a row to an already-sorted dataframe
return df
# The df wasn't sorted or the row has to be put in the middle somewhere
return df.sort_index()
def insert_at(df, sample_date, values):
""" Insert some values into a bi-temporal dataframe.
This is like what would happen when we get a price correction.
"""
observed_dt = dt(datetime.now())
return multi_index_insert_row(df, [sample_date, observed_dt], values)
|
import json
from typing import Dict
from .model import Integration
def validate_json_files(integration: Integration):
"""Validate JSON files for integration."""
for json_file in integration.path.glob("**/*.json"):
if not json_file.is_file():
continue
try:
json.loads(json_file.read_text())
except json.JSONDecodeError:
relative_path = json_file.relative_to(integration.path)
integration.add_error("json", f"Invalid JSON file {relative_path}")
return
def validate(integrations: Dict[str, Integration], config):
"""Handle JSON files inside integrations."""
if not config.specific_integrations:
return
for integration in integrations.values():
if not integration.manifest:
continue
validate_json_files(integration)
|
import re
import tarfile
def _proc_pax(self, filetar):
"""Process an extended or global header as described in POSIX.1-2001."""
# Read the header information.
buf = filetar.fileobj.read(self._block(self.size))
# A pax header stores supplemental information for either
# the following file (extended) or all following files
# (global).
if self.type == tarfile.XGLTYPE:
pax_headers = filetar.pax_headers
else:
pax_headers = filetar.pax_headers.copy()
# Parse pax header information. A record looks like that:
# "%d %s=%s\n" % (length, keyword, value). length is the size
# of the complete record including the length field itself and
# the newline. keyword and value are both UTF-8 encoded strings.
regex = re.compile(r"(\d+) ([^=]+)=", re.U)
pos = 0
while True:
match = regex.match(buf, pos)
if not match:
break
length, keyword = match.groups()
length = int(length)
value = buf[match.end(2) + 1:match.start(1) + length - 1]
try:
keyword = keyword.decode("utf8")
except Exception:
# just leave the raw bytes
pass
try:
value = value.decode("utf8")
except Exception:
# just leave the raw bytes
pass
pax_headers[keyword] = value
pos += length
# Fetch the next header.
try:
next = self.fromtarfile(filetar)
except tarfile.HeaderError:
raise tarfile.SubsequentHeaderError("missing or bad subsequent header")
if self.type in (tarfile.XHDTYPE, tarfile.SOLARIS_XHDTYPE):
# Patch the TarInfo object with the extended header info.
next._apply_pax_info(pax_headers, filetar.encoding, filetar.errors)
next.offset = self.offset
if "size" in pax_headers:
# If the extended header replaces the size field,
# we need to recalculate the offset where the next
# header starts.
offset = next.offset_data
if next.isreg() or next.type not in tarfile.SUPPORTED_TYPES:
offset += next._block(next.size)
filetar.offset = offset
return next
tarfile.TarInfo._proc_pax = _proc_pax
|
from datetime import date, datetime
import logging
from requests.exceptions import ConnectTimeout, HTTPError
import solaredge
from stringcase import snakecase
from homeassistant.const import CONF_API_KEY, DEVICE_CLASS_BATTERY, DEVICE_CLASS_POWER
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
from .const import (
CONF_SITE_ID,
DETAILS_UPDATE_DELAY,
ENERGY_DETAILS_DELAY,
INVENTORY_UPDATE_DELAY,
OVERVIEW_UPDATE_DELAY,
POWER_FLOW_UPDATE_DELAY,
SENSOR_TYPES,
)
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, entry, async_add_entities):
"""Add an solarEdge entry."""
# Add the needed sensors to hass
api = solaredge.Solaredge(entry.data[CONF_API_KEY])
# Check if api can be reached and site is active
try:
response = await hass.async_add_executor_job(
api.get_details, entry.data[CONF_SITE_ID]
)
if response["details"]["status"].lower() != "active":
_LOGGER.error("SolarEdge site is not active")
return
_LOGGER.debug("Credentials correct and site is active")
except KeyError:
_LOGGER.error("Missing details data in SolarEdge response")
return
except (ConnectTimeout, HTTPError):
_LOGGER.error("Could not retrieve details from SolarEdge API")
return
sensor_factory = SolarEdgeSensorFactory(entry.title, entry.data[CONF_SITE_ID], api)
entities = []
for sensor_key in SENSOR_TYPES:
sensor = sensor_factory.create_sensor(sensor_key)
if sensor is not None:
entities.append(sensor)
async_add_entities(entities)
class SolarEdgeSensorFactory:
"""Factory which creates sensors based on the sensor_key."""
def __init__(self, platform_name, site_id, api):
"""Initialize the factory."""
self.platform_name = platform_name
details = SolarEdgeDetailsDataService(api, site_id)
overview = SolarEdgeOverviewDataService(api, site_id)
inventory = SolarEdgeInventoryDataService(api, site_id)
flow = SolarEdgePowerFlowDataService(api, site_id)
energy = SolarEdgeEnergyDetailsService(api, site_id)
self.services = {"site_details": (SolarEdgeDetailsSensor, details)}
for key in [
"lifetime_energy",
"energy_this_year",
"energy_this_month",
"energy_today",
"current_power",
]:
self.services[key] = (SolarEdgeOverviewSensor, overview)
for key in ["meters", "sensors", "gateways", "batteries", "inverters"]:
self.services[key] = (SolarEdgeInventorySensor, inventory)
for key in ["power_consumption", "solar_power", "grid_power", "storage_power"]:
self.services[key] = (SolarEdgePowerFlowSensor, flow)
for key in ["storage_level"]:
self.services[key] = (SolarEdgeStorageLevelSensor, flow)
for key in [
"purchased_power",
"production_power",
"feedin_power",
"consumption_power",
"selfconsumption_power",
]:
self.services[key] = (SolarEdgeEnergyDetailsSensor, energy)
def create_sensor(self, sensor_key):
"""Create and return a sensor based on the sensor_key."""
sensor_class, service = self.services[sensor_key]
return sensor_class(self.platform_name, sensor_key, service)
class SolarEdgeSensor(Entity):
"""Abstract class for a solaredge sensor."""
def __init__(self, platform_name, sensor_key, data_service):
"""Initialize the sensor."""
self.platform_name = platform_name
self.sensor_key = sensor_key
self.data_service = data_service
self._state = None
self._unit_of_measurement = SENSOR_TYPES[self.sensor_key][2]
self._icon = SENSOR_TYPES[self.sensor_key][3]
@property
def name(self):
"""Return the name."""
return "{} ({})".format(self.platform_name, SENSOR_TYPES[self.sensor_key][1])
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return self._unit_of_measurement
@property
def icon(self):
"""Return the sensor icon."""
return self._icon
@property
def state(self):
"""Return the state of the sensor."""
return self._state
class SolarEdgeOverviewSensor(SolarEdgeSensor):
"""Representation of an SolarEdge Monitoring API overview sensor."""
def __init__(self, platform_name, sensor_key, data_service):
"""Initialize the overview sensor."""
super().__init__(platform_name, sensor_key, data_service)
self._json_key = SENSOR_TYPES[self.sensor_key][0]
def update(self):
"""Get the latest data from the sensor and update the state."""
self.data_service.update()
self._state = self.data_service.data.get(self._json_key)
class SolarEdgeDetailsSensor(SolarEdgeSensor):
"""Representation of an SolarEdge Monitoring API details sensor."""
def __init__(self, platform_name, sensor_key, data_service):
"""Initialize the details sensor."""
super().__init__(platform_name, sensor_key, data_service)
self._attributes = {}
@property
def device_state_attributes(self):
"""Return the state attributes."""
return self._attributes
def update(self):
"""Get the latest details and update state and attributes."""
self.data_service.update()
self._state = self.data_service.data
self._attributes = self.data_service.attributes
class SolarEdgeInventorySensor(SolarEdgeSensor):
"""Representation of an SolarEdge Monitoring API inventory sensor."""
def __init__(self, platform_name, sensor_key, data_service):
"""Initialize the inventory sensor."""
super().__init__(platform_name, sensor_key, data_service)
self._json_key = SENSOR_TYPES[self.sensor_key][0]
self._attributes = {}
@property
def device_state_attributes(self):
"""Return the state attributes."""
return self._attributes
def update(self):
"""Get the latest inventory data and update state and attributes."""
self.data_service.update()
self._state = self.data_service.data.get(self._json_key)
self._attributes = self.data_service.attributes.get(self._json_key)
class SolarEdgeEnergyDetailsSensor(SolarEdgeSensor):
"""Representation of an SolarEdge Monitoring API power flow sensor."""
def __init__(self, platform_name, sensor_key, data_service):
"""Initialize the power flow sensor."""
super().__init__(platform_name, sensor_key, data_service)
self._json_key = SENSOR_TYPES[self.sensor_key][0]
self._attributes = {}
@property
def device_state_attributes(self):
"""Return the state attributes."""
return self._attributes
def update(self):
"""Get the latest inventory data and update state and attributes."""
self.data_service.update()
self._state = self.data_service.data.get(self._json_key)
self._attributes = self.data_service.attributes.get(self._json_key)
self._unit_of_measurement = self.data_service.unit
class SolarEdgePowerFlowSensor(SolarEdgeSensor):
"""Representation of an SolarEdge Monitoring API power flow sensor."""
def __init__(self, platform_name, sensor_key, data_service):
"""Initialize the power flow sensor."""
super().__init__(platform_name, sensor_key, data_service)
self._json_key = SENSOR_TYPES[self.sensor_key][0]
self._attributes = {}
@property
def device_state_attributes(self):
"""Return the state attributes."""
return self._attributes
@property
def device_class(self):
"""Device Class."""
return DEVICE_CLASS_POWER
def update(self):
"""Get the latest inventory data and update state and attributes."""
self.data_service.update()
self._state = self.data_service.data.get(self._json_key)
self._attributes = self.data_service.attributes.get(self._json_key)
self._unit_of_measurement = self.data_service.unit
class SolarEdgeStorageLevelSensor(SolarEdgeSensor):
"""Representation of an SolarEdge Monitoring API storage level sensor."""
def __init__(self, platform_name, sensor_key, data_service):
"""Initialize the storage level sensor."""
super().__init__(platform_name, sensor_key, data_service)
self._json_key = SENSOR_TYPES[self.sensor_key][0]
@property
def device_class(self):
"""Return the device_class of the device."""
return DEVICE_CLASS_BATTERY
def update(self):
"""Get the latest inventory data and update state and attributes."""
self.data_service.update()
attr = self.data_service.attributes.get(self._json_key)
if attr and "soc" in attr:
self._state = attr["soc"]
class SolarEdgeDataService:
"""Get and update the latest data."""
def __init__(self, api, site_id):
"""Initialize the data object."""
self.api = api
self.site_id = site_id
self.data = {}
self.attributes = {}
class SolarEdgeOverviewDataService(SolarEdgeDataService):
"""Get and update the latest overview data."""
@Throttle(OVERVIEW_UPDATE_DELAY)
def update(self):
"""Update the data from the SolarEdge Monitoring API."""
try:
data = self.api.get_overview(self.site_id)
overview = data["overview"]
except KeyError:
_LOGGER.error("Missing overview data, skipping update")
return
except (ConnectTimeout, HTTPError):
_LOGGER.error("Could not retrieve data, skipping update")
return
self.data = {}
for key, value in overview.items():
if key in ["lifeTimeData", "lastYearData", "lastMonthData", "lastDayData"]:
data = value["energy"]
elif key in ["currentPower"]:
data = value["power"]
else:
data = value
self.data[key] = data
_LOGGER.debug("Updated SolarEdge overview: %s", self.data)
class SolarEdgeDetailsDataService(SolarEdgeDataService):
"""Get and update the latest details data."""
def __init__(self, api, site_id):
"""Initialize the details data service."""
super().__init__(api, site_id)
self.data = None
@Throttle(DETAILS_UPDATE_DELAY)
def update(self):
"""Update the data from the SolarEdge Monitoring API."""
try:
data = self.api.get_details(self.site_id)
details = data["details"]
except KeyError:
_LOGGER.error("Missing details data, skipping update")
return
except (ConnectTimeout, HTTPError):
_LOGGER.error("Could not retrieve data, skipping update")
return
self.data = None
self.attributes = {}
for key, value in details.items():
key = snakecase(key)
if key in ["primary_module"]:
for module_key, module_value in value.items():
self.attributes[snakecase(module_key)] = module_value
elif key in [
"peak_power",
"type",
"name",
"last_update_time",
"installation_date",
]:
self.attributes[key] = value
elif key == "status":
self.data = value
_LOGGER.debug("Updated SolarEdge details: %s, %s", self.data, self.attributes)
class SolarEdgeInventoryDataService(SolarEdgeDataService):
"""Get and update the latest inventory data."""
@Throttle(INVENTORY_UPDATE_DELAY)
def update(self):
"""Update the data from the SolarEdge Monitoring API."""
try:
data = self.api.get_inventory(self.site_id)
inventory = data["Inventory"]
except KeyError:
_LOGGER.error("Missing inventory data, skipping update")
return
except (ConnectTimeout, HTTPError):
_LOGGER.error("Could not retrieve data, skipping update")
return
self.data = {}
self.attributes = {}
for key, value in inventory.items():
self.data[key] = len(value)
self.attributes[key] = {key: value}
_LOGGER.debug("Updated SolarEdge inventory: %s, %s", self.data, self.attributes)
class SolarEdgeEnergyDetailsService(SolarEdgeDataService):
"""Get and update the latest power flow data."""
def __init__(self, api, site_id):
"""Initialize the power flow data service."""
super().__init__(api, site_id)
self.unit = None
@Throttle(ENERGY_DETAILS_DELAY)
def update(self):
"""Update the data from the SolarEdge Monitoring API."""
try:
now = datetime.now()
today = date.today()
midnight = datetime.combine(today, datetime.min.time())
data = self.api.get_energy_details(
self.site_id,
midnight,
now.strftime("%Y-%m-%d %H:%M:%S"),
meters=None,
time_unit="DAY",
)
energy_details = data["energyDetails"]
except KeyError:
_LOGGER.error("Missing power flow data, skipping update")
return
except (ConnectTimeout, HTTPError):
_LOGGER.error("Could not retrieve data, skipping update")
return
if "meters" not in energy_details:
_LOGGER.debug(
"Missing meters in energy details data. Assuming site does not have any"
)
return
self.data = {}
self.attributes = {}
self.unit = energy_details["unit"]
meters = energy_details["meters"]
for entity in meters:
for key, data in entity.items():
if key == "type" and data in [
"Production",
"SelfConsumption",
"FeedIn",
"Purchased",
"Consumption",
]:
energy_type = data
if key == "values":
for row in data:
self.data[energy_type] = row["value"]
self.attributes[energy_type] = {"date": row["date"]}
_LOGGER.debug(
"Updated SolarEdge energy details: %s, %s", self.data, self.attributes
)
class SolarEdgePowerFlowDataService(SolarEdgeDataService):
"""Get and update the latest power flow data."""
def __init__(self, api, site_id):
"""Initialize the power flow data service."""
super().__init__(api, site_id)
self.unit = None
@Throttle(POWER_FLOW_UPDATE_DELAY)
def update(self):
"""Update the data from the SolarEdge Monitoring API."""
try:
data = self.api.get_current_power_flow(self.site_id)
power_flow = data["siteCurrentPowerFlow"]
except KeyError:
_LOGGER.error("Missing power flow data, skipping update")
return
except (ConnectTimeout, HTTPError):
_LOGGER.error("Could not retrieve data, skipping update")
return
power_from = []
power_to = []
if "connections" not in power_flow:
_LOGGER.debug(
"Missing connections in power flow data. Assuming site does not have any"
)
return
for connection in power_flow["connections"]:
power_from.append(connection["from"].lower())
power_to.append(connection["to"].lower())
self.data = {}
self.attributes = {}
self.unit = power_flow["unit"]
for key, value in power_flow.items():
if key in ["LOAD", "PV", "GRID", "STORAGE"]:
self.data[key] = value["currentPower"]
self.attributes[key] = {"status": value["status"]}
if key in ["GRID"]:
export = key.lower() in power_to
self.data[key] *= -1 if export else 1
self.attributes[key]["flow"] = "export" if export else "import"
if key in ["STORAGE"]:
charge = key.lower() in power_to
self.data[key] *= -1 if charge else 1
self.attributes[key]["flow"] = "charge" if charge else "discharge"
self.attributes[key]["soc"] = value["chargeLevel"]
_LOGGER.debug(
"Updated SolarEdge power flow: %s, %s", self.data, self.attributes
)
|
import multiprocessing
import pytest
import http.server
import socketserver
from urllib.request import urlopen
# Internal imports
import vcr
# Conditional imports
requests = pytest.importorskip("requests")
class Proxy(http.server.SimpleHTTPRequestHandler):
"""
Simple proxy server.
(Inspired by: http://effbot.org/librarybook/simplehttpserver.htm).
"""
def do_GET(self):
upstream_response = urlopen(self.path)
try:
status = upstream_response.status
headers = upstream_response.headers.items()
except AttributeError:
# In Python 2 the response is an addinfourl instance.
status = upstream_response.code
headers = upstream_response.info().items()
self.send_response(status, upstream_response.msg)
for header in headers:
self.send_header(*header)
self.end_headers()
self.copyfile(upstream_response, self.wfile)
@pytest.yield_fixture(scope="session")
def proxy_server():
httpd = socketserver.ThreadingTCPServer(("", 0), Proxy)
proxy_process = multiprocessing.Process(target=httpd.serve_forever)
proxy_process.start()
yield "http://{}:{}".format(*httpd.server_address)
proxy_process.terminate()
def test_use_proxy(tmpdir, httpbin, proxy_server):
"""Ensure that it works with a proxy."""
with vcr.use_cassette(str(tmpdir.join("proxy.yaml"))):
response = requests.get(httpbin.url, proxies={"http": proxy_server})
with vcr.use_cassette(str(tmpdir.join("proxy.yaml"))) as cassette:
cassette_response = requests.get(httpbin.url, proxies={"http": proxy_server})
assert cassette_response.headers == response.headers
assert cassette.play_count == 1
|
try:
from http import HTTPStatus
except ImportError:
import httplib as HTTPStatus
from flask import Blueprint
from flask import Flask
from flask import jsonify
from flask import request
from flasgger import Schema
from flasgger import Swagger
from flasgger import SwaggerView
from flasgger import fields
from flasgger import swag_from
from flasgger import validate
# Examples include intentionally invalid defaults to demonstrate validation.
_TEST_META_SKIP_FULL_VALIDATION = True
app = Flask(__name__)
swag = Swagger(app)
test_specs_1 = {
"tags": [
"users"
],
"parameters": [
{
"name": "body",
"in": "body",
"required": True,
"schema": {
"id": "User",
"required": [
"username",
"age"
],
"properties": {
"username": {
"type": "string",
"description": "The user name.",
"default": "Sirius Black"
},
"age": {
"type": "integer",
"description": "The user age (should be integer)",
"default": "180"
},
"tags": {
"type": "array",
"description": "optional list of tags",
"default": [
"wizard",
"hogwarts",
"dead"
],
"items": {
"type": "string"
}
}
}
}
}
],
"responses": {
"200": {
"description": "A single user item",
"schema": {
"$ref": "#/definitions/User"
}
}
}
}
@app.route("/manualvalidation", methods=['POST'])
@swag_from("test_validation.yml")
def manualvalidation():
"""
In this example you need to call validate() manually
passing received data, Definition (schema: id), specs filename
"""
data = request.json
validate(data, 'User', "test_validation.yml")
return jsonify(data)
@app.route("/validateannotation", methods=['POST'])
@swag.validate('User')
@swag_from("test_validation.yml")
def validateannotation():
"""
In this example you use validate(schema_id) annotation on the
method in which you want to validate received data
"""
data = request.json
return jsonify(data)
@app.route("/autovalidation", methods=['POST'])
@swag_from("test_validation.yml", validation=True)
def autovalidation():
"""
Example using auto validation from yaml file.
In this example you don't need to call validate() because
`validation=True` on @swag_from does that for you.
In this case it will use the same provided filename
and will extract the schema from `in: body` definition
and the data will default to `request.json`
or you can specify:
@swag_from('file.yml',
validation=True,
definition='User',
data=lambda: request.json, # any callable
)
"""
data = request.json
return jsonify(data)
@app.route("/autovalidationfromspecdict", methods=['POST'])
@swag_from(test_specs_1, validation=True)
def autovalidation_from_spec_dict():
"""
Example using data from dict to validate.
In this example you don't need to call validate() because
`validation=True` on @swag_from does that for you.
In this case it will use the same provided filename
and will extract the schema from `in: body` definition
and the data will default to `request.json`
or you can specify:
@swag_from('file.yml',
validation=True,
definition='User',
data=lambda: request.json, # any callable
)
"""
data = request.json
return jsonify(data)
class User(Schema):
username = fields.Str(required=True, default="Sirius Black")
# wrong default "180" to force validation error
age = fields.Int(required=True, min=18, default="180")
tags = fields.List(fields.Str(), default=["wizard", "hogwarts", "dead"])
class UserPostView(SwaggerView):
tags = ['users']
parameters = User
responses = {
200: {
'description': 'A single user item',
'schema': User
}
}
validation = True
def post(self):
"""
Example using marshmallow Schema
validation=True forces validation of parameters in body
---
# This value overwrites the attributes above
deprecated: true
"""
return jsonify(request.json)
app.add_url_rule(
'/schemevalidation',
view_func=UserPostView.as_view('schemevalidation'),
methods=['POST']
)
# ensure the same works for blueprints
example_blueprint = Blueprint(
"example", __name__, url_prefix='/blueprint')
@example_blueprint.route("/autovalidationfromdocstring", methods=['POST'])
@swag.validate('Officer')
def autovalidation_from_docstring():
"""
Test validation using JsonSchema
The default payload is invalid, try it, then change the age to a
valid integer and try again
---
tags:
- officer
parameters:
- name: body
in: body
required: true
schema:
id: Officer
required:
- name
- age
properties:
name:
type: string
description: The officer's name.
default: "James T. Kirk"
age:
type: integer
description: The officer's age (should be integer)
default: "138"
tags:
type: array
description: optional list of tags
default: ["starfleet", "captain", "enterprise", "dead"]
items:
type: string
responses:
200:
description: A single officer item
schema:
$ref: '#/definitions/Officer'
"""
data = request.json
return jsonify(data)
@example_blueprint.route('/manualvalidation', methods=['POST'])
@swag_from("test_validation.yml")
def manualvalidation_bp():
"""
In this example you need to call validate() manually
passing received data, Definition (schema: id), specs filename
"""
data = request.json
validate(data, 'User', "test_validation.yml")
return jsonify(data)
@example_blueprint.route('/autovalidation', methods=['POST'])
@swag_from("test_validation.yml", validation=True)
def autovalidation_bp():
"""
Example using auto validation from yaml file.
In this example you don't need to call validate() because
`validation=True` on @swag_from does that for you.
In this case it will use the same provided filename
and will extract the schema from `in: body` definition
and the data will default to `request.json`
or you can specify:
@swag_from('file.yml',
validation=True,
definition='User',
data=lambda: request.json, # any callable
)
"""
data = request.json
return jsonify(data)
@example_blueprint.route("/autovalidationfromspecdict", methods=['POST'])
@swag_from(test_specs_1, validation=True)
def autovalidation_from_spec_dict_bp():
"""
Example using data from dict to validate.
In this example you don't need to call validate() because
`validation=True` on @swag_from does that for you.
In this case it will use the same provided filename
and will extract the schema from `in: body` definition
and the data will default to `request.json`
or you can specify:
@swag_from('file.yml',
validation=True,
definition='User',
data=lambda: request.json, # any callable
)
"""
data = request.json
return jsonify(data)
class BPUserPostView(SwaggerView):
tags = ['users']
parameters = User
responses = {
200: {
'description': 'A single user item',
'schema': User
}
}
validation = True
def post(self):
"""
Example using marshmallow Schema
validation=True forces validation of parameters in body
---
# This value overwrites the attributes above
deprecated: true
"""
return jsonify(request.json)
example_blueprint.add_url_rule(
'/schemevalidation',
view_func=BPUserPostView.as_view('schemevalidation'),
methods=['POST']
)
app.register_blueprint(example_blueprint)
def test_swag(client, specs_data):
"""
This test is runs automatically in Travis CI
:param client: Flask app test client
:param specs_data: {'url': {swag_specs}} for every spec in app
"""
apispec = specs_data.get('/apispec_1.json')
assert apispec is not None
paths = apispec.get('paths')
expected_user_paths = (
'/autovalidation',
'/validateannotation',
'/autovalidationfromspecdict',
'/blueprint/autovalidation',
'/blueprint/autovalidationfromspecdict',
'/blueprint/manualvalidation',
'/blueprint/schemevalidation',
'/manualvalidation',
'/schemevalidation',
)
expected_officer_paths = (
'/blueprint/autovalidationfromdocstring',
)
invalid_users = (
"""
{
"username": "Sirius Black",
"age": "180",
"tags": [
"wizard",
"hogwarts",
"dead"
]
}
""",
"""
{
"age": 180,
"tags": [
"wizard"
]
}
""",
)
valid_users = (
"""
{
"username": "Sirius Black",
"age": 180,
"tags": [
"wizard",
"hogwarts",
"dead"
]
}
""",
"""
{
"username": "Ronald Weasley",
"age": 22
}
""",
)
invalid_officers = (
"""
{
"name": "James T. Kirk",
"age": "138",
"tags": [
"captain",
"enterprise",
"dead"
]
}
""",
"""
{
"age": 138,
"tags": [
"captain"
]
}
""",
)
valid_officers = (
"""
{
"name": "James T. Kirk",
"age": 138,
"tags": [
"captain",
"enterprise",
"dead"
]
}
""",
"""
{
"name": "Jean-Luc Picard",
"age": 60
}
""",
)
assert paths is not None and len(paths) > 0
definitions = apispec.get('definitions')
assert definitions is not None
assert definitions.get('User') is not None
assert definitions.get('Officer') is not None
for expected_path in expected_user_paths:
assert paths.get(expected_path) is not None
for invalid_user in invalid_users:
response = client.post(
expected_path, data=invalid_user,
content_type='application/json')
assert response.status_code == HTTPStatus.BAD_REQUEST
for valid_user in valid_users:
response = client.post(
expected_path, data=valid_user,
content_type='application/json')
assert response.status_code == HTTPStatus.OK
for expected_path in expected_officer_paths:
assert paths.get(expected_path) is not None
for invalid_officer in invalid_officers:
response = client.post(
expected_path, data=invalid_officer,
content_type='application/json')
assert response.status_code == HTTPStatus.BAD_REQUEST
for valid_officer in valid_officers:
response = client.post(
expected_path, data=valid_officer,
content_type='application/json')
assert response.status_code == HTTPStatus.OK
if __name__ == "__main__":
app.run(debug=True)
|
import posixpath
import unittest
from absl import flags
import mock
from perfkitbenchmarker import linux_packages
from perfkitbenchmarker.linux_packages import maven
from tests import pkb_common_test_case
FLAGS = flags.FLAGS
class MavenTest(pkb_common_test_case.PkbCommonTestCase):
def setUp(self):
super(MavenTest, self).setUp()
self.vm = mock.Mock()
self.vm.RemoteCommand.return_value = ('/home/jre/java', '')
def assertCallArgsEqual(self, call_args_singles, mock_method):
"""Compare the list of single arguments to all mocked calls in mock_method.
Mock calls can be tested like this:
(('x',),) == call('x')
As all the mocked method calls have one single argument (ie 'x') they need
to be converted into the tuple of positional arguments tuple that mock
expects.
Args:
call_args_singles: List of single arguments sent to the mock_method,
ie ['x', 'y'] is for when mock_method was called twice: once with
x and then with y.
mock_method: Method that was mocked and called with call_args_singles.
"""
# convert from ['a', 'b'] into [(('a',),), (('b',),)]
expected = [((arg,),) for arg in call_args_singles]
self.assertEqual(expected, mock_method.call_args_list)
def assertRemoteCommandsEqual(self, expected_cmds):
# tests the calls to vm.RemoteCommand(str)
self.assertCallArgsEqual(expected_cmds, self.vm.RemoteCommand)
def assertVmInstallCommandsEqual(self, expected_cmds):
# tests the calls to vm.Install(str)
self.assertCallArgsEqual(expected_cmds, self.vm.Install)
def assertOnlyKnownMethodsCalled(self, *known_methods):
# this test will fail if vm.foo() was called and "foo" was not in the
# known methods
found_methods = set()
for mock_call in self.vm.mock_calls:
found_methods.add(mock_call[0])
self.assertEqual(set(known_methods), found_methods)
def testGetRunCommandWithProxy(self):
FLAGS['http_proxy'].parse('http://some-proxy.com:888')
FLAGS['https_proxy'].parse('https://some-proxy.com:888')
cmd = maven.GetRunCommand('install')
expected = (
'source {} && mvn install'
' -Dhttp.proxyHost=some-proxy.com -Dhttp.proxyPort=888'
' -Dhttps.proxyHost=some-proxy.com -Dhttps.proxyPort=888'.format(
maven.MVN_ENV_PATH))
self.assertEqual(expected, cmd)
def testGetRunCommandNoProxy(self):
FLAGS['http_proxy'].present = 0
FLAGS['https_proxy'].present = 0
cmd = maven.GetRunCommand('install')
expected = ('source {} && mvn install'.format(maven.MVN_ENV_PATH))
self.assertEqual(expected, cmd)
def testAptInstall(self):
maven.AptInstall(self.vm)
maven_full_ver = maven.FLAGS.maven_version
maven_major_ver = maven_full_ver[:maven_full_ver.index('.')]
maven_url = maven.MVN_URL.format(maven_major_ver, maven_full_ver)
maven_tar = maven_url.split('/')[-1]
maven_remote_path = posixpath.join(linux_packages.INSTALL_DIR, maven_tar)
self.assertRemoteCommandsEqual([
'mkdir -p {0} && '
'tar -C {0} --strip-components=1 -xzf {1}'.format(maven.MVN_DIR,
maven_remote_path),
'java -XshowSettings:properties 2>&1 > /dev/null '
'| awk \'/java.home/{print $3}\'',
'echo "{0}" | sudo tee -a {1}'.format(
maven.MVN_ENV.format(java_home='/home', maven_home=maven.MVN_DIR),
maven.MVN_ENV_PATH)
])
self.assertVmInstallCommandsEqual(['openjdk', 'curl'])
self.assertOnlyKnownMethodsCalled('RemoteCommand',
'InstallPreprovisionedPackageData',
'Install')
if __name__ == '__main__':
unittest.main()
|
from builtins import range
import pytest
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
from hypertools.plot import plot
from hypertools.tools.reduce import reduce as reducer
from hypertools.tools.load import load
from hypertools.datageometry import DataGeometry
data = [np.random.multivariate_normal(np.zeros(4), np.eye(4), size=100) for i
in range(2)]
weights = load('weights_avg').get_data()
# To prevent warning about 20+ figs being open
mpl.rcParams['figure.max_open_warning'] = 25
## STATIC ##
def test_plot_1d():
data_reduced_1d = reducer(data, ndims=1)
geo = plot.plot(data_reduced_1d, show=False)
assert all([i.shape[1]==1 for i in geo.data])
def test_plot_2d():
data_reduced_2d = reducer(data, ndims=2)
geo = plot.plot(data_reduced_2d, show=False)
assert all([i.shape[1]==2 for i in geo.data])
def test_plot_3d():
data_reduced_3d = reducer(data, ndims=3)
geo = plot.plot(data_reduced_3d, show=False)
assert all([i.shape[1]==3 for i in geo.data])
def test_plot_reduce_none():
# Should return same dimensional data if ndims is None
geo = plot.plot(data, show=False)
assert all([i.shape[1] == d.shape[1] for i, d in zip(geo.data, data)])
def test_plot_reduce3d():
# should return 3d data since ndims=3
geo = plot.plot(data, ndims=3, show=False)
assert all([i.shape[1] == 3 for i in geo.xform_data])
def test_plot_reduce2d():
# should return 2d data since ndims=2
geo = plot.plot(data, ndims=2, show=False)
assert all([i.shape[1] == 2 for i in geo.xform_data])
def test_plot_reduce1d():
# should return 1d data since ndims=1
geo = plot.plot(data, ndims=1, show=False)
assert all([i.shape[1] == 1 for i in geo.xform_data])
def test_plot_reduce_align5d():
# should return 5d data since ndims=5
geo = plot.plot(weights, ndims=5, align=True, show=False)
assert all([i.shape[1] == 5 for i in geo.xform_data])
def test_plot_reduce10d():
# should return 10d data since ndims=10
geo = plot.plot(weights, ndims=10, show=False)
assert all([i.shape[1] == 10 for i in geo.xform_data])
def test_plot_model_dict():
# should return 10d data since ndims=10
geo = plot.plot(weights, reduce={'model' : 'PCA', 'params' : {'whiten' : True}}, show=False)
assert isinstance(geo, DataGeometry)
def test_plot_cluster_str():
# should return 10d data since ndims=10
geo = plot.plot(weights, cluster='KMeans', show=False)
assert isinstance(geo, DataGeometry)
def test_plot_cluster_dict():
# should return 10d data since ndims=10
geo = plot.plot(weights, cluster={'model' : 'KMeans', 'params' : {'n_clusters' : 3}}, show=False)
assert isinstance(geo, DataGeometry)
def test_plot_cluster_n_clusters():
# should return 10d data since ndims=10
geo = plot.plot(weights, n_clusters=3, show=False)
assert isinstance(geo, DataGeometry)
def test_plot_nd():
geo = plot.plot(data, show=False)
assert all([i.shape[1]==d.shape[1] for i, d in zip(geo.data, data)])
def test_plot_data_is_list():
geo = plot.plot(data, show=False)
assert type(geo.data) is list
def test_plot_check_fig():
geo = plot.plot(data, show=False)
assert isinstance(geo.fig, mpl.figure.Figure)
def test_plot_check_ax():
geo = plot.plot(data, show=False)
assert isinstance(geo.ax, mpl.axes._axes.Axes)
def test_plot_text():
text_data = [['i like cats alot', 'cats r pretty cool', 'cats are better than dogs'],
['dogs rule the haus', 'dogs are my jam', 'dogs are a mans best friend']]
geo = plot.plot(text_data, show=False)
assert isinstance(geo, DataGeometry)
def test_plot_ax():
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
geo = plot.plot(data, ax=ax, show=False)
assert isinstance(geo, DataGeometry)
def test_plot_ax_2d():
fig = plt.figure()
ax = fig.add_subplot(111)
geo = plot.plot(data, ax=ax, show=False, ndims=2)
assert isinstance(geo, DataGeometry)
def test_plot_ax_error():
fig = plt.figure()
ax = fig.add_subplot(111)
with pytest.raises(ValueError) as e_info:
geo = plot.plot(data, ax=ax, show=False)
def test_plot_geo():
geo = plot.plot(data, show=False)
geo = plot.plot(geo, show=False)
assert isinstance(geo, DataGeometry)
# ## ANIMATED ##
def test_plot_1d_animate():
d = reducer(data, ndims=1)
with pytest.raises(Exception) as e_info:
plot.plot(d, animate=True, show=False)
def test_plot_2d_animate():
data_reduced_2d = reducer(data, ndims=2)
with pytest.raises(Exception) as e_info:
plot.plot(data_reduced_2d, animate=True, show=False)
def test_plot_3d_animate():
data_reduced_3d = reducer(data,ndims=3)
geo = plot.plot(data_reduced_3d, animate=True, show=False)
assert all([i.shape[1]==3 for i in geo.data])
def test_plot_nd_animate():
geo = plot.plot(data, animate=True, show=False)
assert all([i.shape[1]==d.shape[1] for i, d in zip(geo.data, data)])
def test_plot_data_animate_is_list():
geo = plot.plot(data, animate=True, show=False)
assert type(geo.data) is list
def test_plot_animate_check_fig():
geo = plot.plot(data, animate=True, show=False)
assert isinstance(geo.fig, mpl.figure.Figure)
def test_plot_animate_check_ax():
geo = plot.plot(data, animate=True, show=False)
assert isinstance(geo.ax, mpl.axes._axes.Axes)
def test_plot_animate_check_line_ani():
geo = plot.plot(data, animate=True, show=False)
assert isinstance(geo.line_ani, mpl.animation.FuncAnimation)
|
import os.path as op
import numpy as np
from numpy.testing import assert_array_equal
import pytest
from mne import pick_types
from mne.utils import run_tests_if_main
from mne.datasets import testing
from mne.io.tests.test_raw import _test_raw_reader
from mne.io.cnt import read_raw_cnt
from mne.annotations import read_annotations
data_path = testing.data_path(download=False)
fname = op.join(data_path, 'CNT', 'scan41_short.cnt')
@testing.requires_testing_data
def test_data():
"""Test reading raw cnt files."""
with pytest.warns(RuntimeWarning, match='number of bytes'):
raw = _test_raw_reader(read_raw_cnt, input_fname=fname,
eog='auto', misc=['NA1', 'LEFT_EAR'])
# make sure we use annotations event if we synthesized stim
assert len(raw.annotations) == 6
eog_chs = pick_types(raw.info, eog=True, exclude=[])
assert len(eog_chs) == 2 # test eog='auto'
assert raw.info['bads'] == ['LEFT_EAR', 'VEOGR'] # test bads
# XXX: the data has "05/10/200 17:35:31" so it is set to None
assert raw.info['meas_date'] is None
@testing.requires_testing_data
def test_compare_events_and_annotations():
"""Test comparing annotations and events."""
with pytest.warns(RuntimeWarning, match='Could not parse meas date'):
raw = read_raw_cnt(fname)
events = np.array([[333, 0, 7],
[1010, 0, 7],
[1664, 0, 109],
[2324, 0, 7],
[2984, 0, 109]])
annot = read_annotations(fname)
assert len(annot) == 6
assert_array_equal(annot.onset[:-1], events[:, 0] / raw.info['sfreq'])
assert 'STI 014' not in raw.info['ch_names']
run_tests_if_main()
|
from weblate.checks.duplicate import DuplicateCheck
from weblate.checks.models import Check
from weblate.checks.tests.test_checks import CheckTestCase, MockUnit
from weblate.lang.models import Language
from weblate.trans.models import Component, Translation, Unit
class DuplicateCheckTest(CheckTestCase):
check = DuplicateCheck()
def _run_check(self, target, source="", lang="cs"):
return self.check.check_single(source, target, MockUnit(code=lang, note=""))
def test_no_duplicated_token(self):
self.assertFalse(self._run_check("I have two lemons"))
def test_check_respects_boundaries_suffix(self):
"""'lemon lemon' is a false duplicate."""
self.assertFalse(self._run_check("I have two lemon lemons"))
def test_check_respects_boundaries_prefix(self):
"""'melon on' is a false duplicate."""
self.assertFalse(self._run_check("I have a melon on my back"))
def test_check_single_duplicated_token(self):
self.assertTrue(self._run_check("I have two two lemons"))
def test_check_multiple_duplicated_tokens(self):
self.assertTrue(self._run_check("I have two two lemons lemons"))
def test_check_duplicated_numbers(self):
self.assertFalse(
self._run_check("Mám 222 222 citrónů", source="I have 222 222 lemons")
)
def test_check_duplicated_letter(self):
self.assertFalse(self._run_check("I have A A A"))
def test_check_duplicated_source(self):
self.assertFalse(
self._run_check("begin begin end end", source="begin begin end end")
)
def test_check_duplicated_source_different(self):
self.assertFalse(
self._run_check("ХАХ ХАХ! ХЕ ХЕ ХЕ!", source="HAH HAH! HEH HEH HEH!")
)
self.assertTrue(self._run_check("ХАХ ХАХ!", source="HAH HAH! HEH HEH HEH!"))
self.assertTrue(
self._run_check("ХАХ ХАХ! ХЕ ХЕ ХЕ! ХИ ХИ!", source="HAH HAH! HEH HEH HEH!")
)
self.assertTrue(
self._run_check("ХАХ ХАХ! ХЕ ХЕ!", source="HAH HAH! HEH HEH HEH!")
)
self.assertTrue(
self._run_check("ХАХ ХАХ ХАХ! ХЕ ХЕ ХЕ!", source="HAH HAH! HEH HEH HEH!")
)
def test_duplicate_conjunction(self):
self.assertFalse(
self._run_check(
"Zalomit řádky na 77 znacích a znacích nových řádků",
source="Wrap lines at 77 chars and at newlines",
)
)
def test_check_duplicated_language_ignore(self):
self.assertFalse(self._run_check("Si vous vous interrogez", lang="fr"))
def test_description(self):
unit = Unit(
source="string",
target="I have two two lemons lemons",
translation=Translation(
language=Language("cs"),
component=Component(source_language=Language("en")),
),
)
check = Check(unit=unit)
self.assertEqual(
self.check.get_description(check),
"Text contains the same word twice in a row: lemons, two",
)
def test_check_duplicated_language_cleanup(self):
self.assertFalse(self._run_check("Cancel·la la baixada", lang="ca"))
def test_separator(self):
self.assertFalse(self._run_check("plug-in in"))
|
import logging
from typing import Any, Dict, Optional
from surepy import SureLockStateID, SureProductID
from homeassistant.const import (
ATTR_VOLTAGE,
CONF_ID,
CONF_TYPE,
DEVICE_CLASS_BATTERY,
PERCENTAGE,
)
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity
from . import SurePetcareAPI
from .const import (
DATA_SURE_PETCARE,
SPC,
SURE_BATT_VOLTAGE_DIFF,
SURE_BATT_VOLTAGE_LOW,
TOPIC_UPDATE,
)
_LOGGER = logging.getLogger(__name__)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up Sure PetCare Flaps sensors."""
if discovery_info is None:
return
entities = []
spc = hass.data[DATA_SURE_PETCARE][SPC]
for entity in spc.ids:
sure_type = entity[CONF_TYPE]
if sure_type in [
SureProductID.CAT_FLAP,
SureProductID.PET_FLAP,
SureProductID.FEEDER,
]:
entities.append(SureBattery(entity[CONF_ID], sure_type, spc))
if sure_type in [SureProductID.CAT_FLAP, SureProductID.PET_FLAP]:
entities.append(Flap(entity[CONF_ID], sure_type, spc))
async_add_entities(entities, True)
class SurePetcareSensor(Entity):
"""A binary sensor implementation for Sure Petcare Entities."""
def __init__(self, _id: int, sure_type: SureProductID, spc: SurePetcareAPI):
"""Initialize a Sure Petcare sensor."""
self._id = _id
self._sure_type = sure_type
self._spc = spc
self._spc_data: Dict[str, Any] = self._spc.states[self._sure_type].get(self._id)
self._state: Dict[str, Any] = {}
self._name = (
f"{self._sure_type.name.capitalize()} "
f"{self._spc_data['name'].capitalize()}"
)
self._async_unsub_dispatcher_connect = None
@property
def name(self) -> str:
"""Return the name of the device if any."""
return self._name
@property
def unique_id(self) -> str:
"""Return an unique ID."""
return f"{self._spc_data['household_id']}-{self._id}"
@property
def available(self) -> bool:
"""Return true if entity is available."""
return bool(self._state)
@property
def should_poll(self) -> bool:
"""Return true."""
return False
async def async_update(self) -> None:
"""Get the latest data and update the state."""
self._spc_data = self._spc.states[self._sure_type].get(self._id)
self._state = self._spc_data.get("status")
_LOGGER.debug("%s -> self._state: %s", self._name, self._state)
async def async_added_to_hass(self) -> None:
"""Register callbacks."""
@callback
def update() -> None:
"""Update the state."""
self.async_schedule_update_ha_state(True)
self._async_unsub_dispatcher_connect = async_dispatcher_connect(
self.hass, TOPIC_UPDATE, update
)
async def async_will_remove_from_hass(self) -> None:
"""Disconnect dispatcher listener when removed."""
if self._async_unsub_dispatcher_connect:
self._async_unsub_dispatcher_connect()
class Flap(SurePetcareSensor):
"""Sure Petcare Flap."""
@property
def state(self) -> Optional[int]:
"""Return battery level in percent."""
return SureLockStateID(self._state["locking"]["mode"]).name.capitalize()
@property
def device_state_attributes(self) -> Optional[Dict[str, Any]]:
"""Return the state attributes of the device."""
attributes = None
if self._state:
attributes = {"learn_mode": bool(self._state["learn_mode"])}
return attributes
class SureBattery(SurePetcareSensor):
"""Sure Petcare Flap."""
@property
def name(self) -> str:
"""Return the name of the device if any."""
return f"{self._name} Battery Level"
@property
def state(self) -> Optional[int]:
"""Return battery level in percent."""
battery_percent: Optional[int]
try:
per_battery_voltage = self._state["battery"] / 4
voltage_diff = per_battery_voltage - SURE_BATT_VOLTAGE_LOW
battery_percent = min(int(voltage_diff / SURE_BATT_VOLTAGE_DIFF * 100), 100)
except (KeyError, TypeError):
battery_percent = None
return battery_percent
@property
def unique_id(self) -> str:
"""Return an unique ID."""
return f"{self._spc_data['household_id']}-{self._id}-battery"
@property
def device_class(self) -> str:
"""Return the device class."""
return DEVICE_CLASS_BATTERY
@property
def device_state_attributes(self) -> Optional[Dict[str, Any]]:
"""Return state attributes."""
attributes = None
if self._state:
voltage_per_battery = float(self._state["battery"]) / 4
attributes = {
ATTR_VOLTAGE: f"{float(self._state['battery']):.2f}",
f"{ATTR_VOLTAGE}_per_battery": f"{voltage_per_battery:.2f}",
}
return attributes
@property
def unit_of_measurement(self) -> str:
"""Return the unit of measurement."""
return PERCENTAGE
|
from lark import Lark, UnexpectedInput
from _json_parser import json_grammar # Using the grammar from the json_parser example
json_parser = Lark(json_grammar, parser='lalr')
class JsonSyntaxError(SyntaxError):
def __str__(self):
context, line, column = self.args
return '%s at line %s, column %s.\n\n%s' % (self.label, line, column, context)
class JsonMissingValue(JsonSyntaxError):
label = 'Missing Value'
class JsonMissingOpening(JsonSyntaxError):
label = 'Missing Opening'
class JsonMissingClosing(JsonSyntaxError):
label = 'Missing Closing'
class JsonMissingComma(JsonSyntaxError):
label = 'Missing Comma'
class JsonTrailingComma(JsonSyntaxError):
label = 'Trailing Comma'
def parse(json_text):
try:
j = json_parser.parse(json_text)
except UnexpectedInput as u:
exc_class = u.match_examples(json_parser.parse, {
JsonMissingOpening: ['{"foo": ]}',
'{"foor": }}',
'{"foo": }'],
JsonMissingClosing: ['{"foo": [}',
'{',
'{"a": 1',
'[1'],
JsonMissingComma: ['[1 2]',
'[false 1]',
'["b" 1]',
'{"a":true 1:4}',
'{"a":1 1:4}',
'{"a":"b" 1:4}'],
JsonTrailingComma: ['[,]',
'[1,]',
'[1,2,]',
'{"foo":1,}',
'{"foo":false,"bar":true,}']
}, use_accepts=True)
if not exc_class:
raise
raise exc_class(u.get_context(json_text), u.line, u.column)
def test():
try:
parse('{"example1": "value"')
except JsonMissingClosing as e:
print(e)
try:
parse('{"example2": ] ')
except JsonMissingOpening as e:
print(e)
if __name__ == '__main__':
test()
|
import logging
import datetime
import json
import pytest
from end2end.fixtures import quteprocess, testprocess
from qutebrowser.utils import log
class FakeRepCall:
"""Fake for request.node.rep_call."""
def __init__(self):
self.failed = False
class FakeConfig:
"""Fake for request.config."""
ARGS = {
'--qute-delay': 0,
'--color': True,
'--verbose': False,
'--capture': None,
}
INI = {
'qt_log_ignore': [],
}
def __init__(self):
self.webengine = False
def getoption(self, name):
return self.ARGS[name]
def getini(self, name):
return self.INI[name]
class FakeNode:
"""Fake for request.node."""
def __init__(self, call):
self.rep_call = call
def get_closest_marker(self, _name):
return None
class FakeRequest:
"""Fake for request."""
def __init__(self, node, config, server):
self.node = node
self.config = config
self._server = server
def getfixturevalue(self, name):
assert name == 'server'
return self._server
@pytest.fixture
def request_mock(quteproc, monkeypatch, server):
"""Patch out a pytest request."""
fake_call = FakeRepCall()
fake_config = FakeConfig()
fake_node = FakeNode(fake_call)
fake_request = FakeRequest(fake_node, fake_config, server)
assert not hasattr(fake_request.node.rep_call, 'wasxfail')
monkeypatch.setattr(quteproc, 'request', fake_request)
return fake_request
@pytest.mark.parametrize('cmd', [
':message-error test',
':jseval console.log("[FAIL] test");'
])
def test_quteproc_error_message(qtbot, quteproc, cmd, request_mock):
"""Make sure the test fails with an unexpected error message."""
with qtbot.waitSignal(quteproc.got_error):
quteproc.send_cmd(cmd)
# Usually we wouldn't call this from inside a test, but here we force the
# error to occur during the test rather than at teardown time.
with pytest.raises(pytest.fail.Exception):
quteproc.after_test()
def test_quteproc_error_message_did_fail(qtbot, quteproc, request_mock):
"""Make sure the test does not fail on teardown if the main test failed."""
request_mock.node.rep_call.failed = True
with qtbot.waitSignal(quteproc.got_error):
quteproc.send_cmd(':message-error test')
# Usually we wouldn't call this from inside a test, but here we force the
# error to occur during the test rather than at teardown time.
quteproc.after_test()
def test_quteproc_skip_via_js(qtbot, quteproc):
with pytest.raises(pytest.skip.Exception, match='test'):
quteproc.send_cmd(':jseval console.log("[SKIP] test");')
quteproc.wait_for_js('[SKIP] test')
# Usually we wouldn't call this from inside a test, but here we force
# the error to occur during the test rather than at teardown time.
quteproc.after_test()
def test_quteproc_skip_and_wait_for(qtbot, quteproc):
"""This test will skip *again* during teardown, but we don't care."""
with pytest.raises(pytest.skip.Exception):
quteproc.send_cmd(':jseval console.log("[SKIP] foo");')
quteproc.wait_for_js("[SKIP] foo")
quteproc.wait_for(message='This will not match')
def test_qt_log_ignore(qtbot, quteproc):
"""Make sure the test passes when logging a qt_log_ignore message."""
with qtbot.waitSignal(quteproc.got_error):
quteproc.send_cmd(':message-error "SpellCheck: test"')
def test_quteprocess_quitting(qtbot, quteproc_process):
"""When qutebrowser quits, after_test should fail."""
with qtbot.waitSignal(quteproc_process.proc.finished, timeout=15000):
quteproc_process.send_cmd(':quit')
with pytest.raises(testprocess.ProcessExited):
quteproc_process.after_test()
@pytest.mark.parametrize('data, attrs', [
pytest.param(
'{"created": 86400, "msecs": 0, "levelname": "DEBUG", "name": "init", '
'"module": "earlyinit", "funcName": "init_log", "lineno": 280, '
'"levelno": 10, "message": "Log initialized."}',
{
'timestamp': datetime.datetime.fromtimestamp(86400),
'loglevel': logging.DEBUG,
'category': 'init',
'module': 'earlyinit',
'function': 'init_log',
'line': 280,
'message': 'Log initialized.',
'expected': False,
},
id='normal'),
pytest.param(
'{"created": 86400, "msecs": 0, "levelname": "VDEBUG", "name": "foo", '
'"module": "foo", "funcName": "foo", "lineno": 0, "levelno": 9, '
'"message": ""}',
{'loglevel': log.VDEBUG_LEVEL},
id='vdebug'),
pytest.param(
'{"created": 86400, "msecs": 0, "levelname": "DEBUG", "name": "qt", '
'"module": null, "funcName": null, "lineno": 0, "levelno": 10, '
'"message": "test"}',
{'module': None, 'function': None, 'line': None},
id='unknown module'),
pytest.param(
'{"created": 86400, "msecs": 0, "levelname": "VDEBUG", "name": "foo", '
'"module": "foo", "funcName": "foo", "lineno": 0, "levelno": 9, '
'"message": "SpellCheck: test"}',
{'expected': True},
id='expected message'),
pytest.param(
'{"created": 86400, "msecs": 0, "levelname": "DEBUG", "name": "qt", '
'"module": "qnetworkreplyhttpimpl", "funcName": '
'"void QNetworkReplyHttpImplPrivate::error('
'QNetworkReply::NetworkError, const QString&)", "lineno": 1929, '
'"levelno": 10, "message": "QNetworkReplyImplPrivate::error: '
'Internal problem, this method must only be called once."}',
{
'module': 'qnetworkreplyhttpimpl',
'function': 'void QNetworkReplyHttpImplPrivate::error('
'QNetworkReply::NetworkError, const QString&)',
'line': 1929
},
id='weird Qt location'),
pytest.param(
'{"created": 86400, "msecs": 0, "levelname": "DEBUG", "name": "qt", '
'"module": "qxcbxsettings", "funcName": "QXcbXSettings::QXcbXSettings('
'QXcbScreen*)", "lineno": 233, "levelno": 10, "message": '
'"QXcbXSettings::QXcbXSettings(QXcbScreen*) Failed to get selection '
'owner for XSETTINGS_S atom"}',
{
'module': 'qxcbxsettings',
'function': 'QXcbXSettings::QXcbXSettings(QXcbScreen*)',
'line': 233,
},
id='QXcbXSettings'),
pytest.param(
'{"created": 86400, "msecs": 0, "levelname": "WARNING", '
'"name": "py.warnings", "module": "app", "funcName": "qt_mainloop", '
'"lineno": 121, "levelno": 30, "message": '
'".../app.py:121: ResourceWarning: unclosed file <_io.TextIOWrapper '
'name=18 mode=\'r\' encoding=\'UTF-8\'>"}',
{'category': 'py.warnings'},
id='resourcewarning'),
])
def test_log_line_parse(pytestconfig, data, attrs):
line = quteprocess.LogLine(pytestconfig, data)
for name, expected in attrs.items():
actual = getattr(line, name)
assert actual == expected, name
@pytest.mark.parametrize('data, colorized, expect_error, expected', [
pytest.param(
{'created': 86400, 'msecs': 0, 'levelname': 'DEBUG', 'name': 'foo',
'module': 'bar', 'funcName': 'qux', 'lineno': 10, 'levelno': 10,
'message': 'quux'},
False, False,
'{timestamp} DEBUG foo bar:qux:10 quux',
id='normal'),
pytest.param(
{'created': 86400, 'msecs': 0, 'levelname': 'DEBUG', 'name': 'foo',
'module': 'bar', 'funcName': 'qux', 'lineno': 10, 'levelno': 10,
'message': 'quux', 'traceback': ('Traceback (most recent call '
'last):\n here be dragons')},
False, False,
'{timestamp} DEBUG foo bar:qux:10 quux\n'
'Traceback (most recent call last):\n'
' here be dragons',
id='traceback'),
pytest.param(
{'created': 86400, 'msecs': 0, 'levelname': 'DEBUG', 'name': 'foo',
'module': 'bar', 'funcName': 'qux', 'lineno': 10, 'levelno': 10,
'message': 'quux'},
True, False,
'\033[32m{timestamp}\033[0m \033[37mDEBUG \033[0m \033[36mfoo '
' bar:qux:10\033[0m \033[37mquux\033[0m',
id='colored'),
pytest.param(
{'created': 86400, 'msecs': 0, 'levelname': 'ERROR', 'name': 'foo',
'module': 'bar', 'funcName': 'qux', 'lineno': 10, 'levelno': 40,
'message': 'quux'},
False, True,
'{timestamp} ERROR (expected) foo bar:qux:10 quux',
id='expected error'),
pytest.param(
{'created': 86400, 'msecs': 0, 'levelname': 'DEBUG', 'name': 'foo',
'module': 'bar', 'funcName': 'qux', 'lineno': 10, 'levelno': 10,
'message': 'quux'},
False, True,
'{timestamp} DEBUG foo bar:qux:10 quux',
id='expected other'),
pytest.param(
{'created': 86400, 'msecs': 0, 'levelname': 'ERROR', 'name': 'foo',
'module': 'bar', 'funcName': 'qux', 'lineno': 10, 'levelno': 40,
'message': 'quux'},
True, True,
'\033[32m{timestamp}\033[0m \033[37mERROR (expected)\033[0m '
'\033[36mfoo bar:qux:10\033[0m \033[37mquux\033[0m',
id='expected error colorized'),
])
def test_log_line_formatted(pytestconfig,
data, colorized, expect_error, expected):
line = json.dumps(data)
record = quteprocess.LogLine(pytestconfig, line)
record.expected = expect_error
ts = datetime.datetime.fromtimestamp(data['created']).strftime('%H:%M:%S')
ts += '.{:03.0f}'.format(data['msecs'])
expected = expected.format(timestamp=ts)
assert record.formatted_str(colorized=colorized) == expected
def test_log_line_no_match(pytestconfig):
with pytest.raises(testprocess.InvalidLine):
quteprocess.LogLine(pytestconfig, "Hello World!")
class TestClickElementByText:
@pytest.fixture(autouse=True)
def open_page(self, quteproc):
quteproc.open_path('data/click_element.html')
def test_click_element(self, quteproc):
quteproc.click_element_by_text('Test Element')
quteproc.wait_for_js('click_element clicked')
def test_click_special_chars(self, quteproc):
quteproc.click_element_by_text('"Don\'t", he shouted')
quteproc.wait_for_js('click_element special chars')
def test_duplicate(self, quteproc):
with pytest.raises(ValueError, match='not unique'):
quteproc.click_element_by_text('Duplicate')
def test_nonexistent(self, quteproc):
with pytest.raises(ValueError, match='No element'):
quteproc.click_element_by_text('no element exists with this text')
@pytest.mark.parametrize('string, expected', [
('Test', "'Test'"),
("Don't", '"Don\'t"'),
# This is some serious string escaping madness
('"Don\'t", he said',
"concat('\"', 'Don', \"'\", 't', '\"', ', he said')"),
])
def test_xpath_escape(string, expected):
assert quteprocess._xpath_escape(string) == expected
@pytest.mark.parametrize('value', [
'foo',
'foo"bar', # Make sure a " is preserved
])
def test_set(quteproc, value):
quteproc.set_setting('content.default_encoding', value)
read_back = quteproc.get_setting('content.default_encoding')
assert read_back == value
@pytest.mark.parametrize('message, ignored', [
# Unparseable
('Hello World', False),
# Without process/thread ID
('[0606/135039:ERROR:cert_verify_proc_nss.cc(925)] CERT_PKIXVerifyCert '
'for localhost failed err=-8179', True),
# Random ignored message
('[26598:26598:0605/191429.639416:WARNING:audio_manager.cc(317)] Multiple '
'instances of AudioManager detected', True),
# Not ignored
('[26598:26598:0605/191429.639416:WARNING:audio_manager.cc(317)] Test',
False),
])
def test_is_ignored_chromium_message(message, ignored):
assert quteprocess.is_ignored_chromium_message(message) == ignored
|
import os
import sys
import rospkg
import rospkg.os_detect
def _platform_supported(m, os, version):
for p in m.platforms:
if os == p.os and version == p.version:
return True
return False
def platform_supported(rospack, pkg, os, version):
"""
Return whether the platform defined by os and version is marked as supported in the package
@param pkg The package to test for support
@param os The os name to test for support
@param version The os version to test for support
"""
return _platform_supported(rospack.get_manifest(pkg), os, version)
class PackageFlagTracker:
"""This will use the dependency tracker to test if packages are
blacklisted and all their dependents."""
def __init__(self, dependency_tracker, os_name=None, os_version=None):
if not os_name and not os_version:
try:
osd = rospkg.os_detect.OsDetect()
self.os_name = osd.get_codename()
self.os_version = osd.get_version()
except rospkg.os_detect.OsNotDetected:
sys.stderr.write('Could not detect OS. platform detection will not work\n')
else:
self.os_name = os_name
self.os_version = os_version
self.rospack = rospkg.RosPack()
self.blacklisted = {}
self.blacklisted_osx = {}
self.nobuild = set()
self.nomakefile = set()
self.packages_tested = set()
self.dependency_tracker = dependency_tracker
self.build_failed = set()
def register_blacklisted(self, blacklisted_package, dependent_package):
if dependent_package in self.blacklisted.keys():
self.blacklisted[dependent_package].append(blacklisted_package)
else:
self.blacklisted[dependent_package] = [blacklisted_package]
def register_blacklisted_osx(self, blacklisted_package, dependent_package):
if dependent_package in self.blacklisted_osx:
self.blacklisted_osx[dependent_package].append(blacklisted_package)
else:
self.blacklisted_osx[dependent_package] = [blacklisted_package]
def _check_package_flags(self, package):
if package in self.packages_tested:
return
rospack = self.rospack
path = rospack.get_path(package)
if os.path.exists(os.path.join(path, 'ROS_BUILD_BLACKLIST')):
self.register_blacklisted(package, package)
for p in rospack.get_depends_on(package, implicit=True):
self.register_blacklisted(package, p)
if os.path.exists(os.path.join(path, 'ROS_BUILD_BLACKLIST_OSX')):
self.register_blacklisted_osx(package, package)
for p in rospack.get_depends_on(package, implicit=True):
self.register_blacklisted_osx(package, p)
# NO_BUILD if marker file or catkin attribute in manifest
if os.path.exists(os.path.join(path, 'ROS_NOBUILD')):
self.nobuild.add(package)
if self.rospack.get_manifest(package).is_catkin:
self.nobuild.add(package)
if not os.path.exists(os.path.join(path, 'Makefile')):
self.nomakefile.add(package)
self.packages_tested.add(package)
def is_blacklisted(self, package):
# this will noop if already run
self._check_package_flags(package)
# make sure it's not dependent on a blacklisted package
for p in self.dependency_tracker.get_deps(package):
if p not in self.packages_tested:
self._check_package_flags(p)
# test result after checking all dependents.
if package in self.blacklisted:
return self.blacklisted[package]
return []
def is_blacklisted_osx(self, package):
# this will noop if already run
self._check_package_flags(package)
# make sure it's not dependent on a blacklisted_osx package
for p in self.dependency_tracker.get_deps(package):
if p not in self.packages_tested:
self._check_package_flags(p)
# test result after checking all dependents.
if package in self.blacklisted_osx:
return self.blacklisted_osx[package]
return []
def has_nobuild(self, package):
# this will noop if already run
self._check_package_flags(package)
# Short circuit if known result
if package in self.nobuild:
return True
return False
def has_makefile(self, package):
# this will noop if already run
self._check_package_flags(package)
# Short circuit if known result
if package in self.nomakefile:
return False
return True
def add_nobuild(self, package):
if self.has_nobuild(package):
return True
with open(os.path.join(self.rospack.get_path(package), 'ROS_NOBUILD'), 'w') as f:
f.write('created by rosmake to mark as installed')
self.nobuild.add(package)
return True
return False
def remove_nobuild(self, package):
if not self.has_nobuild(package):
return True
try:
os.remove(os.path.join(self.rospack.get_path(package), 'ROS_NOBUILD'))
self.nobuild.remove(package)
return True
except Exception:
return False
def mark_build_failed(self, package):
self.build_failed.add(package)
def build_failed(self, package):
return package in self.build_failed
def can_build(self, pkg, use_blacklist=False, failed_packages=[], use_makefile=True):
"""
Return (buildable, error, "reason why not")
"""
output_str = ''
output_state = True
buildable = True
previously_failed_pkgs = [pk for pk in failed_packages if pk in self.dependency_tracker.get_deps(pkg)]
if len(previously_failed_pkgs) > 0:
buildable = False
output_state = False
output_str += ' Package %s cannot be built for dependent package(s) %s failed. \n' % (pkg, previously_failed_pkgs)
if use_blacklist:
black_listed_dependents = self.is_blacklisted(pkg)
if len(black_listed_dependents) > 0:
buildable = False
output_str += 'Cannot build %s ROS_BUILD_BLACKLIST found in packages %s' % (pkg, black_listed_dependents)
if self.has_nobuild(pkg):
buildable = False
output_state = True # dependents are ok, it should already be built
output_str += 'ROS_NOBUILD in package %s\n' % pkg
if use_makefile and not self.has_makefile(pkg):
output_state = True # dependents are ok no need to build
buildable = False
output_str += ' No Makefile in package %s\n' % pkg
if output_str and output_str[-1] == '\n':
output_str = output_str[:-1]
return (buildable, output_state, output_str)
|
import itertools
import numpy as np
import os
import six
from chainercv.evaluations.eval_detection_coco import _redirect_stdout
from chainercv.evaluations.eval_detection_coco import _summarize
try:
import pycocotools.coco
import pycocotools.cocoeval
import pycocotools.mask as mask_tools
_available = True
except ImportError:
_available = False
def eval_instance_segmentation_coco(
pred_masks, pred_labels, pred_scores,
gt_masks, gt_labels, gt_areas=None, gt_crowdeds=None):
"""Evaluate instance segmentations based on evaluation code of MS COCO.
This function evaluates predicted instance segmentations obtained from
a dataset by using average precision for each class.
The code is based on the evaluation code used in MS COCO.
Args:
pred_masks (iterable of numpy.ndarray): See the table below.
pred_labels (iterable of numpy.ndarray): See the table below.
pred_scores (iterable of numpy.ndarray): See the table below.
gt_masks (iterable of numpy.ndarray): See the table below.
gt_labels (iterable of numpy.ndarray): See the table below.
gt_areas (iterable of numpy.ndarray): See the table below. If
:obj:`None`, some scores are not returned.
gt_crowdeds (iterable of numpy.ndarray): See the table below.
.. csv-table::
:header: name, shape, dtype, format
:obj:`pred_masks`, ":math:`[(R, H, W)]`", :obj:`bool`, --
:obj:`pred_labels`, ":math:`[(R,)]`", :obj:`int32`, \
":math:`[0, \#fg\_class - 1]`"
:obj:`pred_scores`, ":math:`[(R,)]`", :obj:`float32`, \
--
:obj:`gt_masks`, ":math:`[(R, H, W)]`", :obj:`bool`, --
:obj:`gt_labels`, ":math:`[(R,)]`", :obj:`int32`, \
":math:`[0, \#fg\_class - 1]`"
:obj:`gt_areas`, ":math:`[(R,)]`", \
:obj:`float32`, --
:obj:`gt_crowdeds`, ":math:`[(R,)]`", :obj:`bool`, --
All inputs should have the same length. For more detailed explanation
of the inputs, please refer to
:class:`chainercv.datasets.COCOInstanceSegmentationDataset`.
.. seealso::
:class:`chainercv.datasets.COCOInstanceSegmentationDataset`.
Returns:
dict:
The keys, value-types and the description of the values are listed
below. The APs and ARs calculated with different iou
thresholds, sizes of objects, and numbers of detections
per image. For more details on the 12 patterns of evaluation metrics,
please refer to COCO's official `evaluation page`_.
.. csv-table::
:header: key, type, description
ap/iou=0.50:0.95/area=all/max_dets=100, *numpy.ndarray*, \
[#coco_ins_eval_1]_
ap/iou=0.50/area=all/max_dets=100, *numpy.ndarray*, \
[#coco_ins_eval_1]_
ap/iou=0.75/area=all/max_dets=100, *numpy.ndarray*, \
[#coco_ins_eval_1]_
ap/iou=0.50:0.95/area=small/max_dets=100, *numpy.ndarray*, \
[#coco_ins_eval_1]_ [#coco_ins_eval_5]_
ap/iou=0.50:0.95/area=medium/max_dets=100, *numpy.ndarray*, \
[#coco_ins_eval_1]_ [#coco_ins_eval_5]_
ap/iou=0.50:0.95/area=large/max_dets=100, *numpy.ndarray*, \
[#coco_ins_eval_1]_ [#coco_ins_eval_5]_
ar/iou=0.50:0.95/area=all/max_dets=1, *numpy.ndarray*, \
[#coco_ins_eval_2]_
ar/iou=0.50/area=all/max_dets=10, *numpy.ndarray*, \
[#coco_ins_eval_2]_
ar/iou=0.75/area=all/max_dets=100, *numpy.ndarray*, \
[#coco_ins_eval_2]_
ar/iou=0.50:0.95/area=small/max_dets=100, *numpy.ndarray*, \
[#coco_ins_eval_2]_ [#coco_ins_eval_5]_
ar/iou=0.50:0.95/area=medium/max_dets=100, *numpy.ndarray*, \
[#coco_ins_eval_2]_ [#coco_ins_eval_5]_
ar/iou=0.50:0.95/area=large/max_dets=100, *numpy.ndarray*, \
[#coco_ins_eval_2]_ [#coco_ins_eval_5]_
map/iou=0.50:0.95/area=all/max_dets=100, *float*, \
[#coco_ins_eval_3]_
map/iou=0.50/area=all/max_dets=100, *float*, \
[#coco_ins_eval_3]_
map/iou=0.75/area=all/max_dets=100, *float*, \
[#coco_ins_eval_3]_
map/iou=0.50:0.95/area=small/max_dets=100, *float*, \
[#coco_ins_eval_3]_ [#coco_ins_eval_5]_
map/iou=0.50:0.95/area=medium/max_dets=100, *float*, \
[#coco_ins_eval_3]_ [#coco_ins_eval_5]_
map/iou=0.50:0.95/area=large/max_dets=100, *float*, \
[#coco_ins_eval_3]_ [#coco_ins_eval_5]_
mar/iou=0.50:0.95/area=all/max_dets=1, *float*, \
[#coco_ins_eval_4]_
mar/iou=0.50/area=all/max_dets=10, *float*, \
[#coco_ins_eval_4]_
mar/iou=0.75/area=all/max_dets=100, *float*, \
[#coco_ins_eval_4]_
mar/iou=0.50:0.95/area=small/max_dets=100, *float*, \
[#coco_ins_eval_4]_ [#coco_ins_eval_5]_
mar/iou=0.50:0.95/area=medium/max_dets=100, *float*, \
[#coco_ins_eval_4]_ [#coco_ins_eval_5]_
mar/iou=0.50:0.95/area=large/max_dets=100, *float*, \
[#coco_ins_eval_4]_ [#coco_ins_eval_5]_
coco_eval, *pycocotools.cocoeval.COCOeval*, \
result from :obj:`pycocotools`
existent_labels, *numpy.ndarray*, \
used labels \
.. [#coco_ins_eval_1] An array of average precisions. \
The :math:`l`-th value corresponds to the average precision \
for class :math:`l`. If class :math:`l` does not exist in \
either :obj:`pred_labels` or :obj:`gt_labels`, the corresponding \
value is set to :obj:`numpy.nan`.
.. [#coco_ins_eval_2] An array of average recalls. \
The :math:`l`-th value corresponds to the average precision \
for class :math:`l`. If class :math:`l` does not exist in \
either :obj:`pred_labels` or :obj:`gt_labels`, the corresponding \
value is set to :obj:`numpy.nan`.
.. [#coco_ins_eval_3] The average of average precisions over classes.
.. [#coco_ins_eval_4] The average of average recalls over classes.
.. [#coco_ins_eval_5] Skip if :obj:`gt_areas` is :obj:`None`.
"""
if not _available:
raise ValueError(
'Please install pycocotools \n'
'pip install -e \'git+https://github.com/cocodataset/coco.git'
'#egg=pycocotools&subdirectory=PythonAPI\'')
gt_coco = pycocotools.coco.COCO()
pred_coco = pycocotools.coco.COCO()
pred_masks = iter(pred_masks)
pred_labels = iter(pred_labels)
pred_scores = iter(pred_scores)
gt_masks = iter(gt_masks)
gt_labels = iter(gt_labels)
if gt_areas is None:
compute_area_dependent_metrics = False
gt_areas = itertools.repeat(None)
else:
compute_area_dependent_metrics = True
gt_areas = iter(gt_areas)
gt_crowdeds = (iter(gt_crowdeds) if gt_crowdeds is not None
else itertools.repeat(None))
images = []
pred_annos = []
gt_annos = []
existent_labels = {}
for i, (pred_mask, pred_label, pred_score, gt_mask, gt_label,
gt_area, gt_crowded) in enumerate(six.moves.zip(
pred_masks, pred_labels, pred_scores,
gt_masks, gt_labels, gt_areas, gt_crowdeds)):
size = pred_mask.shape[1:]
if gt_area is None:
gt_area = itertools.repeat(None)
if gt_crowded is None:
gt_crowded = itertools.repeat(None)
# Starting ids from 1 is important when using COCO.
img_id = i + 1
for pred_msk, pred_lb, pred_sc in zip(
pred_mask, pred_label, pred_score):
pred_annos.append(
_create_anno(pred_msk, pred_lb, pred_sc,
img_id=img_id, anno_id=len(pred_annos) + 1,
crw=0, ar=None))
existent_labels[pred_lb] = True
for gt_msk, gt_lb, gt_ar, gt_crw in zip(
gt_mask, gt_label, gt_area, gt_crowded):
gt_annos.append(
_create_anno(gt_msk, gt_lb, None,
img_id=img_id, anno_id=len(gt_annos) + 1,
ar=gt_ar, crw=gt_crw))
existent_labels[gt_lb] = True
images.append({'id': img_id, 'height': size[0], 'width': size[1]})
existent_labels = sorted(existent_labels.keys())
pred_coco.dataset['categories'] = [{'id': i} for i in existent_labels]
gt_coco.dataset['categories'] = [{'id': i} for i in existent_labels]
pred_coco.dataset['annotations'] = pred_annos
gt_coco.dataset['annotations'] = gt_annos
pred_coco.dataset['images'] = images
gt_coco.dataset['images'] = images
with _redirect_stdout(open(os.devnull, 'w')):
pred_coco.createIndex()
gt_coco.createIndex()
coco_eval = pycocotools.cocoeval.COCOeval(gt_coco, pred_coco, 'segm')
coco_eval.evaluate()
coco_eval.accumulate()
results = {'coco_eval': coco_eval}
p = coco_eval.params
common_kwargs = {
'prec': coco_eval.eval['precision'],
'rec': coco_eval.eval['recall'],
'iou_threshs': p.iouThrs,
'area_ranges': p.areaRngLbl,
'max_detection_list': p.maxDets}
all_kwargs = {
'ap/iou=0.50:0.95/area=all/max_dets=100': {
'ap': True, 'iou_thresh': None, 'area_range': 'all',
'max_detection': 100},
'ap/iou=0.50/area=all/max_dets=100': {
'ap': True, 'iou_thresh': 0.5, 'area_range': 'all',
'max_detection': 100},
'ap/iou=0.75/area=all/max_dets=100': {
'ap': True, 'iou_thresh': 0.75, 'area_range': 'all',
'max_detection': 100},
'ar/iou=0.50:0.95/area=all/max_dets=1': {
'ap': False, 'iou_thresh': None, 'area_range': 'all',
'max_detection': 1},
'ar/iou=0.50:0.95/area=all/max_dets=10': {
'ap': False, 'iou_thresh': None, 'area_range': 'all',
'max_detection': 10},
'ar/iou=0.50:0.95/area=all/max_dets=100': {
'ap': False, 'iou_thresh': None, 'area_range': 'all',
'max_detection': 100},
}
if compute_area_dependent_metrics:
all_kwargs.update({
'ap/iou=0.50:0.95/area=small/max_dets=100': {
'ap': True, 'iou_thresh': None, 'area_range': 'small',
'max_detection': 100},
'ap/iou=0.50:0.95/area=medium/max_dets=100': {
'ap': True, 'iou_thresh': None, 'area_range': 'medium',
'max_detection': 100},
'ap/iou=0.50:0.95/area=large/max_dets=100': {
'ap': True, 'iou_thresh': None, 'area_range': 'large',
'max_detection': 100},
'ar/iou=0.50:0.95/area=small/max_dets=100': {
'ap': False, 'iou_thresh': None, 'area_range': 'small',
'max_detection': 100},
'ar/iou=0.50:0.95/area=medium/max_dets=100': {
'ap': False, 'iou_thresh': None, 'area_range': 'medium',
'max_detection': 100},
'ar/iou=0.50:0.95/area=large/max_dets=100': {
'ap': False, 'iou_thresh': None, 'area_range': 'large',
'max_detection': 100},
})
for key, kwargs in all_kwargs.items():
kwargs.update(common_kwargs)
metrics, mean_metric = _summarize(**kwargs)
# pycocotools ignores classes that are not included in
# either gt or prediction, but lies between 0 and
# the maximum label id.
# We set values for these classes to np.nan.
results[key] = np.nan * np.ones(np.max(existent_labels) + 1)
results[key][existent_labels] = metrics
results['m' + key] = mean_metric
results['existent_labels'] = existent_labels
return results
def _create_anno(msk, lb, sc, img_id, anno_id, ar=None, crw=None):
H, W = msk.shape
if crw is None:
crw = False
msk = np.asfortranarray(msk.astype(np.uint8))
rle = mask_tools.encode(msk)
if ar is None:
# We compute dummy area to pass to pycocotools.
# Note that area dependent scores are ignored afterwards.
ar = mask_tools.area(rle)
if crw is None:
crw = False
# Rounding is done to make the result consistent with COCO.
anno = {
'image_id': img_id, 'category_id': lb,
'segmentation': rle,
'area': ar,
'id': anno_id,
'iscrowd': crw}
if sc is not None:
anno.update({'score': sc})
return anno
|
from homeassistant.auth.providers.homeassistant import InvalidAuth
from tests.async_mock import Mock, patch
async def test_auth_success(hass, hassio_client_supervisor):
"""Test no auth needed for ."""
with patch(
"homeassistant.auth.providers.homeassistant."
"HassAuthProvider.async_validate_login",
) as mock_login:
resp = await hassio_client_supervisor.post(
"/api/hassio_auth",
json={"username": "test", "password": "123456", "addon": "samba"},
)
# Check we got right response
assert resp.status == 200
mock_login.assert_called_with("test", "123456")
async def test_auth_fails_no_supervisor(hass, hassio_client):
"""Test if only supervisor can access."""
with patch(
"homeassistant.auth.providers.homeassistant."
"HassAuthProvider.async_validate_login",
) as mock_login:
resp = await hassio_client.post(
"/api/hassio_auth",
json={"username": "test", "password": "123456", "addon": "samba"},
)
# Check we got right response
assert resp.status == 401
assert not mock_login.called
async def test_auth_fails_no_auth(hass, hassio_noauth_client):
"""Test if only supervisor can access."""
with patch(
"homeassistant.auth.providers.homeassistant."
"HassAuthProvider.async_validate_login",
) as mock_login:
resp = await hassio_noauth_client.post(
"/api/hassio_auth",
json={"username": "test", "password": "123456", "addon": "samba"},
)
# Check we got right response
assert resp.status == 401
assert not mock_login.called
async def test_login_error(hass, hassio_client_supervisor):
"""Test no auth needed for error."""
with patch(
"homeassistant.auth.providers.homeassistant."
"HassAuthProvider.async_validate_login",
Mock(side_effect=InvalidAuth()),
) as mock_login:
resp = await hassio_client_supervisor.post(
"/api/hassio_auth",
json={"username": "test", "password": "123456", "addon": "samba"},
)
# Check we got right response
assert resp.status == 401
mock_login.assert_called_with("test", "123456")
async def test_login_no_data(hass, hassio_client_supervisor):
"""Test auth with no data -> error."""
with patch(
"homeassistant.auth.providers.homeassistant."
"HassAuthProvider.async_validate_login",
Mock(side_effect=InvalidAuth()),
) as mock_login:
resp = await hassio_client_supervisor.post("/api/hassio_auth")
# Check we got right response
assert resp.status == 400
assert not mock_login.called
async def test_login_no_username(hass, hassio_client_supervisor):
"""Test auth with no username in data -> error."""
with patch(
"homeassistant.auth.providers.homeassistant."
"HassAuthProvider.async_validate_login",
Mock(side_effect=InvalidAuth()),
) as mock_login:
resp = await hassio_client_supervisor.post(
"/api/hassio_auth", json={"password": "123456", "addon": "samba"}
)
# Check we got right response
assert resp.status == 400
assert not mock_login.called
async def test_login_success_extra(hass, hassio_client_supervisor):
"""Test auth with extra data."""
with patch(
"homeassistant.auth.providers.homeassistant."
"HassAuthProvider.async_validate_login",
) as mock_login:
resp = await hassio_client_supervisor.post(
"/api/hassio_auth",
json={
"username": "test",
"password": "123456",
"addon": "samba",
"path": "/share",
},
)
# Check we got right response
assert resp.status == 200
mock_login.assert_called_with("test", "123456")
async def test_password_success(hass, hassio_client_supervisor):
"""Test no auth needed for ."""
with patch(
"homeassistant.auth.providers.homeassistant."
"HassAuthProvider.async_change_password",
) as mock_change:
resp = await hassio_client_supervisor.post(
"/api/hassio_auth/password_reset",
json={"username": "test", "password": "123456"},
)
# Check we got right response
assert resp.status == 200
mock_change.assert_called_with("test", "123456")
async def test_password_fails_no_supervisor(hass, hassio_client):
"""Test if only supervisor can access."""
resp = await hassio_client.post(
"/api/hassio_auth/password_reset",
json={"username": "test", "password": "123456"},
)
# Check we got right response
assert resp.status == 401
async def test_password_fails_no_auth(hass, hassio_noauth_client):
"""Test if only supervisor can access."""
resp = await hassio_noauth_client.post(
"/api/hassio_auth/password_reset",
json={"username": "test", "password": "123456"},
)
# Check we got right response
assert resp.status == 401
async def test_password_no_user(hass, hassio_client_supervisor):
"""Test changing password for invalid user."""
resp = await hassio_client_supervisor.post(
"/api/hassio_auth/password_reset",
json={"username": "test", "password": "123456"},
)
# Check we got right response
assert resp.status == 404
|
from __future__ import absolute_import
import sys
from unittest import TestCase, main
from lark import Lark
from lark.load_grammar import GrammarLoader, GrammarError
class TestGrammar(TestCase):
def setUp(self):
pass
def test_errors(self):
for msg, examples in GrammarLoader.ERRORS:
for example in examples:
try:
p = Lark(example)
except GrammarError as e:
assert msg in str(e)
else:
assert False, "example did not raise an error"
def test_override(self):
# Overrides the 'sep' template in existing grammar to add an optional terminating delimiter
# Thus extending it beyond its original capacity
p = Lark("""
%import .test_templates_import (start, sep)
%override sep{item, delim}: item (delim item)* delim?
%ignore " "
""")
a = p.parse('[1, 2, 3]')
b = p.parse('[1, 2, 3, ]')
assert a == b
if __name__ == '__main__':
main()
|
from __future__ import print_function
from pyVim.connect import SmartConnect, Disconnect
from pyVmomi import vim
import argparse
import atexit
import getpass
import ssl
def GetArgs():
"""
Supports the command-line arguments listed below.
"""
parser = argparse.ArgumentParser(
description='Process args for retrieving all the Virtual Machines')
parser.add_argument('-s', '--host', required=True, action='store',
help='Remote host to connect to')
parser.add_argument('-o', '--port', type=int, default=443, action='store',
help='Port to connect on')
parser.add_argument('-u', '--user', required=True, action='store',
help='User name to use when connecting to host')
parser.add_argument('-p', '--password', required=False, action='store',
help='Password to use when connecting to host')
args = parser.parse_args()
return args
def PrintVmInfo(vm, depth=1):
"""
Print information for a particular virtual machine or recurse into a folder
or vApp with depth protection
"""
maxdepth = 10
# if this is a group it will have children. if it does, recurse into them
# and then return
if hasattr(vm, 'childEntity'):
if depth > maxdepth:
return
vmList = vm.childEntity
for c in vmList:
PrintVmInfo(c, depth+1)
return
# if this is a vApp, it likely contains child VMs
# (vApps can nest vApps, but it is hardly a common usecase, so ignore that)
if isinstance(vm, vim.VirtualApp):
vmList = vm.vm
for c in vmList:
PrintVmInfo(c, depth + 1)
return
summary = vm.summary
print("Name : ", summary.config.name)
print("Path : ", summary.config.vmPathName)
print("Guest : ", summary.config.guestFullName)
annotation = summary.config.annotation
if annotation != None and annotation != "":
print("Annotation : ", annotation)
print("State : ", summary.runtime.powerState)
if summary.guest != None:
ip = summary.guest.ipAddress
if ip != None and ip != "":
print("IP : ", ip)
if summary.runtime.question != None:
print("Question : ", summary.runtime.question.text)
print("")
def main():
"""
Simple command-line program for listing the virtual machines on a system.
"""
args = GetArgs()
if args.password:
password = args.password
else:
password = getpass.getpass(prompt='Enter password for host %s and '
'user %s: ' % (args.host,args.user))
context = None
if hasattr(ssl, '_create_unverified_context'):
context = ssl._create_unverified_context()
si = SmartConnect(host=args.host,
user=args.user,
pwd=password,
port=int(args.port),
sslContext=context)
if not si:
print("Could not connect to the specified host using specified "
"username and password")
return -1
atexit.register(Disconnect, si)
content = si.RetrieveContent()
for child in content.rootFolder.childEntity:
if hasattr(child, 'vmFolder'):
datacenter = child
vmFolder = datacenter.vmFolder
vmList = vmFolder.childEntity
for vm in vmList:
PrintVmInfo(vm)
return 0
# Start program
if __name__ == "__main__":
main()
|
import inspect
import collections
import traceback
import typing
from typing import Any, MutableMapping, MutableSequence, Tuple, Union
import attr
from qutebrowser.api import cmdutils
from qutebrowser.commands import cmdexc, argparser
from qutebrowser.utils import log, message, docutils, objreg, usertypes, utils
from qutebrowser.utils import debug as debug_utils
from qutebrowser.misc import objects
@attr.s
class ArgInfo:
"""Information about an argument."""
value = attr.ib(None)
hide = attr.ib(False)
metavar = attr.ib(None)
flag = attr.ib(None)
completion = attr.ib(None)
choices = attr.ib(None)
class Command:
"""Base skeleton for a command.
Attributes:
name: The main name of the command.
maxsplit: The maximum amount of splits to do for the commandline, or
None.
deprecated: False, or a string to describe why a command is deprecated.
desc: The description of the command.
handler: The handler function to call.
debug: Whether this is a debugging command (only shown with --debug).
parser: The ArgumentParser to use to parse this command.
flags_with_args: A list of flags which take an argument.
no_cmd_split: If true, ';;' to split sub-commands is ignored.
backend: Which backend the command works with (or None if it works with
both)
no_replace_variables: Don't replace variables like {url}
modes: The modes the command can be executed in.
_qute_args: The saved data from @cmdutils.argument
_count: The count set for the command.
_instance: The object to bind 'self' to.
_scope: The scope to get _instance for in the object registry.
"""
# CommandValue values which need a count
COUNT_COMMAND_VALUES = [usertypes.CommandValue.count,
usertypes.CommandValue.count_tab]
def __init__(self, *, handler, name, instance=None, maxsplit=None,
modes=None, not_modes=None, debug=False, deprecated=False,
no_cmd_split=False, star_args_optional=False, scope='global',
backend=None, no_replace_variables=False):
if modes is not None and not_modes is not None:
raise ValueError("Only modes or not_modes can be given!")
if modes is not None:
for m in modes:
if not isinstance(m, usertypes.KeyMode):
raise TypeError("Mode {} is no KeyMode member!".format(m))
self.modes = set(modes)
elif not_modes is not None:
for m in not_modes:
if not isinstance(m, usertypes.KeyMode):
raise TypeError("Mode {} is no KeyMode member!".format(m))
self.modes = set(usertypes.KeyMode).difference(not_modes)
else:
self.modes = set(usertypes.KeyMode)
if scope != 'global' and instance is None:
raise ValueError("Setting scope without setting instance makes "
"no sense!")
self.name = name
self.maxsplit = maxsplit
self.deprecated = deprecated
self._instance = instance
self._scope = scope
self._star_args_optional = star_args_optional
self.debug = debug
self.handler = handler
self.no_cmd_split = no_cmd_split
self.backend = backend
self.no_replace_variables = no_replace_variables
self.docparser = docutils.DocstringParser(handler)
self.parser = argparser.ArgumentParser(
name, description=self.docparser.short_desc,
epilog=self.docparser.long_desc)
self.parser.add_argument('-h', '--help', action=argparser.HelpAction,
default=argparser.SUPPRESS, nargs=0,
help=argparser.SUPPRESS)
self.opt_args: MutableMapping[str, Tuple[str, str]] = collections.OrderedDict()
self.namespace = None
self._count = None
self.pos_args: MutableSequence[Tuple[str, str]] = []
self.flags_with_args: MutableSequence[str] = []
self._has_vararg = False
# This is checked by future @cmdutils.argument calls so they fail
# (as they'd be silently ignored otherwise)
self._qute_args = getattr(self.handler, 'qute_args', {})
self.handler.qute_args = None
self._check_func()
self._inspect_func()
def _check_prerequisites(self, win_id):
"""Check if the command is permitted to run currently.
Args:
win_id: The window ID the command is run in.
"""
from qutebrowser.keyinput import modeman
mode_manager = modeman.instance(win_id)
self.validate_mode(mode_manager.mode)
if self.backend is not None and objects.backend != self.backend:
raise cmdexc.PrerequisitesError(
"{}: Only available with {} "
"backend.".format(self.name, self.backend.name))
if self.deprecated:
message.warning('{} is deprecated - {}'.format(self.name,
self.deprecated))
def _check_func(self):
"""Make sure the function parameters don't violate any rules."""
signature = inspect.signature(self.handler)
if 'self' in signature.parameters:
if self._instance is None:
raise TypeError("{} is a class method, but instance was not "
"given!".format(self.name))
arg_info = self.get_arg_info(signature.parameters['self'])
if arg_info.value is not None:
raise TypeError("{}: Can't fill 'self' with value!"
.format(self.name))
elif 'self' not in signature.parameters and self._instance is not None:
raise TypeError("{} is not a class method, but instance was "
"given!".format(self.name))
elif any(param.kind == inspect.Parameter.VAR_KEYWORD
for param in signature.parameters.values()):
raise TypeError("{}: functions with varkw arguments are not "
"supported!".format(self.name))
def get_arg_info(self, param):
"""Get an ArgInfo tuple for the given inspect.Parameter."""
return self._qute_args.get(param.name, ArgInfo())
def get_pos_arg_info(self, pos):
"""Get an ArgInfo tuple for the given positional parameter."""
if pos >= len(self.pos_args) and self._has_vararg:
pos = len(self.pos_args) - 1
name = self.pos_args[pos][0]
return self._qute_args.get(name, ArgInfo())
def _inspect_special_param(self, param):
"""Check if the given parameter is a special one.
Args:
param: The inspect.Parameter to handle.
Return:
True if the parameter is special, False otherwise.
"""
arg_info = self.get_arg_info(param)
if arg_info.value is None:
return False
elif arg_info.value == usertypes.CommandValue.count:
if param.default is inspect.Parameter.empty:
raise TypeError("{}: handler has count parameter "
"without default!".format(self.name))
return True
elif isinstance(arg_info.value, usertypes.CommandValue):
return True
else:
raise TypeError("{}: Invalid value={!r} for argument '{}'!"
.format(self.name, arg_info.value, param.name))
raise utils.Unreachable
def _inspect_func(self):
"""Inspect the function to get useful information from it.
Sets instance attributes (desc, type_conv, name_conv) based on the
information.
Return:
How many user-visible arguments the command has.
"""
signature = inspect.signature(self.handler)
doc = inspect.getdoc(self.handler)
if doc is not None:
self.desc = doc.splitlines()[0].strip()
else:
self.desc = ""
for param in signature.parameters.values():
# https://docs.python.org/3/library/inspect.html#inspect.Parameter.kind
# "Python has no explicit syntax for defining positional-only
# parameters, but many built-in and extension module functions
# (especially those that accept only one or two parameters) accept
# them."
assert param.kind != inspect.Parameter.POSITIONAL_ONLY
if param.name == 'self':
continue
if self._inspect_special_param(param):
continue
if (param.kind == inspect.Parameter.KEYWORD_ONLY and
param.default is inspect.Parameter.empty):
raise TypeError("{}: handler has keyword only argument {!r} "
"without default!".format(
self.name, param.name))
typ = self._get_type(param)
is_bool = typ is bool
kwargs = self._param_to_argparse_kwargs(param, is_bool)
args = self._param_to_argparse_args(param, is_bool)
callsig = debug_utils.format_call(self.parser.add_argument, args,
kwargs, full=False)
log.commands.vdebug( # type: ignore[attr-defined]
'Adding arg {} of type {} -> {}'
.format(param.name, typ, callsig))
self.parser.add_argument(*args, **kwargs)
if param.kind == inspect.Parameter.VAR_POSITIONAL:
self._has_vararg = True
return signature.parameters.values()
def _param_to_argparse_kwargs(self, param, is_bool):
"""Get argparse keyword arguments for a parameter.
Args:
param: The inspect.Parameter object to get the args for.
is_bool: Whether the parameter is a boolean.
Return:
A kwargs dict.
"""
kwargs = {}
try:
kwargs['help'] = self.docparser.arg_descs[param.name]
except KeyError:
pass
kwargs['dest'] = param.name
arg_info = self.get_arg_info(param)
if is_bool:
kwargs['action'] = 'store_true'
else:
if arg_info.metavar is not None:
kwargs['metavar'] = arg_info.metavar
else:
kwargs['metavar'] = argparser.arg_name(param.name)
if param.kind == inspect.Parameter.VAR_POSITIONAL:
kwargs['nargs'] = '*' if self._star_args_optional else '+'
elif param.kind == inspect.Parameter.KEYWORD_ONLY:
kwargs['default'] = param.default
elif not is_bool and param.default is not inspect.Parameter.empty:
kwargs['default'] = param.default
kwargs['nargs'] = '?'
return kwargs
def _param_to_argparse_args(self, param, is_bool):
"""Get argparse positional arguments for a parameter.
Args:
param: The inspect.Parameter object to get the args for.
is_bool: Whether the parameter is a boolean.
Return:
A list of args.
"""
args = []
name = argparser.arg_name(param.name)
arg_info = self.get_arg_info(param)
assert not arg_info.value, name
if arg_info.flag is not None:
shortname = arg_info.flag
else:
shortname = name[0]
if len(shortname) != 1:
raise ValueError("Flag '{}' of parameter {} (command {}) must be "
"exactly 1 char!".format(shortname, name,
self.name))
if is_bool or param.kind == inspect.Parameter.KEYWORD_ONLY:
long_flag = '--{}'.format(name)
short_flag = '-{}'.format(shortname)
args.append(long_flag)
args.append(short_flag)
self.opt_args[param.name] = long_flag, short_flag
if not is_bool:
self.flags_with_args += [short_flag, long_flag]
else:
if not arg_info.hide:
self.pos_args.append((param.name, name))
return args
def _get_type(self, param):
"""Get the type of an argument from its default value or annotation.
Args:
param: The inspect.Parameter to look at.
"""
arg_info = self.get_arg_info(param)
if arg_info.value:
# Filled values are passed 1:1
return None
elif param.kind in [inspect.Parameter.VAR_POSITIONAL,
inspect.Parameter.VAR_KEYWORD]:
# For *args/**kwargs we only support strings
assert param.annotation in [inspect.Parameter.empty, str], param
return None
elif param.annotation is not inspect.Parameter.empty:
return param.annotation
elif param.default not in [None, inspect.Parameter.empty]:
return type(param.default)
else:
return str
def _get_objreg(self, *, win_id, name, scope):
"""Get an object from the objreg."""
if scope == 'global':
tab_id = None
win_id = None
elif scope == 'tab':
tab_id = 'current'
elif scope == 'window':
tab_id = None
else:
raise ValueError("Invalid scope {}!".format(scope))
return objreg.get(name, scope=scope, window=win_id, tab=tab_id,
from_command=True)
def _add_special_arg(self, *, value, param, args, kwargs):
"""Add a special argument value to a function call.
Arguments:
value: The value to add.
param: The parameter being filled.
args: The positional argument list. Gets modified directly.
kwargs: The keyword argument dict. Gets modified directly.
"""
if param.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD:
args.append(value)
elif param.kind == inspect.Parameter.KEYWORD_ONLY:
kwargs[param.name] = value
else:
raise TypeError("{}: invalid parameter type {} for argument "
"{!r}!".format(self.name, param.kind, param.name))
def _add_count_tab(self, *, win_id, param, args, kwargs):
"""Add the count_tab widget argument."""
tabbed_browser = self._get_objreg(
win_id=win_id, name='tabbed-browser', scope='window')
if self._count is None:
tab = tabbed_browser.widget.currentWidget()
elif 1 <= self._count <= tabbed_browser.widget.count():
cmdutils.check_overflow(self._count + 1, 'int')
tab = tabbed_browser.widget.widget(self._count - 1)
else:
tab = None
self._add_special_arg(value=tab, param=param, args=args,
kwargs=kwargs)
def _get_param_value(self, param):
"""Get the converted value for an inspect.Parameter."""
value = getattr(self.namespace, param.name)
typ = self._get_type(param)
if isinstance(typ, tuple):
raise TypeError("{}: Legacy tuple type annotation!".format(
self.name))
try:
origin = typing.get_origin(typ) # type: ignore[attr-defined]
except AttributeError:
# typing.get_origin was added in Python 3.8
origin = getattr(typ, '__origin__', None)
if origin is Union:
try:
types = list(typing.get_args(typ)) # type: ignore[attr-defined]
except AttributeError:
# typing.get_args was added in Python 3.8
types = list(typ.__args__)
if param.default is not inspect.Parameter.empty:
types.append(type(param.default))
choices = self.get_arg_info(param).choices
value = argparser.multitype_conv(param, types, value,
str_choices=choices)
elif typ is str:
choices = self.get_arg_info(param).choices
value = argparser.type_conv(param, typ, value, str_choices=choices)
elif typ is bool: # no type conversion for flags
assert isinstance(value, bool)
elif typ is None:
pass
else:
value = argparser.type_conv(param, typ, value)
return value
def _handle_special_call_arg(self, *, pos, param, win_id, args, kwargs):
"""Check whether the argument is special, and if so, fill it in.
Args:
pos: The position of the argument.
param: The argparse.Parameter.
win_id: The window ID the command is run in.
args/kwargs: The args/kwargs to fill.
Return:
True if it was a special arg, False otherwise.
"""
arg_info = self.get_arg_info(param)
if pos == 0 and self._instance is not None:
assert param.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD
self_value = self._get_objreg(win_id=win_id, name=self._instance,
scope=self._scope)
self._add_special_arg(value=self_value, param=param,
args=args, kwargs=kwargs)
return True
elif arg_info.value == usertypes.CommandValue.count:
if self._count is None:
assert param.default is not inspect.Parameter.empty
value = param.default
else:
value = self._count
self._add_special_arg(value=value, param=param,
args=args, kwargs=kwargs)
return True
elif arg_info.value == usertypes.CommandValue.win_id:
self._add_special_arg(value=win_id, param=param,
args=args, kwargs=kwargs)
return True
elif arg_info.value == usertypes.CommandValue.cur_tab:
tab = self._get_objreg(win_id=win_id, name='tab', scope='tab')
self._add_special_arg(value=tab, param=param,
args=args, kwargs=kwargs)
return True
elif arg_info.value == usertypes.CommandValue.count_tab:
self._add_count_tab(win_id=win_id, param=param, args=args,
kwargs=kwargs)
return True
elif arg_info.value is None:
pass
else:
raise utils.Unreachable(arg_info)
return False
def _get_call_args(self, win_id):
"""Get arguments for a function call.
Args:
win_id: The window id this command should be executed in.
Return:
An (args, kwargs) tuple.
"""
args: Any = []
kwargs: MutableMapping[str, Any] = {}
signature = inspect.signature(self.handler)
for i, param in enumerate(signature.parameters.values()):
if self._handle_special_call_arg(pos=i, param=param,
win_id=win_id, args=args,
kwargs=kwargs):
continue
value = self._get_param_value(param)
if param.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD:
args.append(value)
elif param.kind == inspect.Parameter.VAR_POSITIONAL:
if value is not None:
args += value
elif param.kind == inspect.Parameter.KEYWORD_ONLY:
kwargs[param.name] = value
else:
raise TypeError("{}: Invalid parameter type {} for argument "
"'{}'!".format(
self.name, param.kind, param.name))
return args, kwargs
def run(self, win_id, args=None, count=None):
"""Run the command.
Note we don't catch CommandError here as it might happen async.
Args:
win_id: The window ID the command is run in.
args: Arguments to the command.
count: Command repetition count.
"""
dbgout = ["command called:", self.name]
if args:
dbgout.append(str(args))
elif args is None:
args = []
if count is not None:
dbgout.append("(count={})".format(count))
log.commands.debug(' '.join(dbgout))
try:
self.namespace = self.parser.parse_args(args)
except argparser.ArgumentParserError as e:
message.error('{}: {}'.format(self.name, e),
stack=traceback.format_exc())
return
except argparser.ArgumentParserExit as e:
log.commands.debug("argparser exited with status {}: {}".format(
e.status, e))
return
self._count = count
self._check_prerequisites(win_id)
posargs, kwargs = self._get_call_args(win_id)
log.commands.debug('Calling {}'.format(
debug_utils.format_call(self.handler, posargs, kwargs)))
self.handler(*posargs, **kwargs)
def validate_mode(self, mode):
"""Raise cmdexc.PrerequisitesError unless allowed in the given mode.
Args:
mode: The usertypes.KeyMode to check.
"""
if mode not in self.modes:
mode_names = '/'.join(sorted(m.name for m in self.modes))
raise cmdexc.PrerequisitesError(
"{}: This command is only allowed in {} mode, not {}.".format(
self.name, mode_names, mode.name))
def takes_count(self):
"""Return true iff this command can take a count argument."""
return any(info.value in self.COUNT_COMMAND_VALUES
for info in self._qute_args.values())
def register(self):
"""Register this command in objects.commands."""
log.commands.vdebug( # type: ignore[attr-defined]
"Registering command {} (from {}:{})".format(
self.name, self.handler.__module__, self.handler.__qualname__))
if self.name in objects.commands:
raise ValueError("{} is already registered!".format(self.name))
objects.commands[self.name] = self
|
from math import sqrt
import numpy as np
from ..utils import check_random_state, verbose, logger
from ..parallel import parallel_func
def _max_stat(X, X2, perms, dof_scaling):
"""Aux function for permutation_t_test (for parallel comp)."""
n_samples = len(X)
mus = np.dot(perms, X) / float(n_samples)
stds = np.sqrt(X2[None, :] - mus * mus) * dof_scaling # std with splitting
max_abs = np.max(np.abs(mus) / (stds / sqrt(n_samples)), axis=1) # t-max
return max_abs
@verbose
def permutation_t_test(X, n_permutations=10000, tail=0, n_jobs=1,
seed=None, verbose=None):
"""One sample/paired sample permutation test based on a t-statistic.
This function can perform the test on one variable or
simultaneously on multiple variables. When applying the test to multiple
variables, the "tmax" method is used for adjusting the p-values of each
variable for multiple comparisons. Like Bonferroni correction, this method
adjusts p-values in a way that controls the family-wise error rate.
However, the permutation method will be more
powerful than Bonferroni correction when different variables in the test
are correlated (see [1]_).
Parameters
----------
X : array, shape (n_samples, n_tests)
Samples (observations) by number of tests (variables).
n_permutations : int | 'all'
Number of permutations. If n_permutations is 'all' all possible
permutations are tested. It's the exact test, that
can be untractable when the number of samples is big (e.g. > 20).
If n_permutations >= 2**n_samples then the exact test is performed.
tail : -1 or 0 or 1 (default = 0)
If tail is 1, the alternative hypothesis is that the
mean of the data is greater than 0 (upper tailed test). If tail is 0,
the alternative hypothesis is that the mean of the data is different
than 0 (two tailed test). If tail is -1, the alternative hypothesis
is that the mean of the data is less than 0 (lower tailed test).
%(n_jobs)s
%(seed)s
%(verbose)s
Returns
-------
T_obs : array of shape [n_tests]
T-statistic observed for all variables.
p_values : array of shape [n_tests]
P-values for all the tests (a.k.a. variables).
H0 : array of shape [n_permutations]
T-statistic obtained by permutations and t-max trick for multiple
comparison.
Notes
-----
If ``n_permutations >= 2 ** (n_samples - (tail == 0))``,
``n_permutations`` and ``seed`` will be ignored since an exact test
(full permutation test) will be performed.
References
----------
.. [1] Nichols, T. E. & Holmes, A. P. (2002). Nonparametric permutation
tests for functional neuroimaging: a primer with examples.
Human Brain Mapping, 15, 1-25.
"""
from .cluster_level import _get_1samp_orders
n_samples, n_tests = X.shape
X2 = np.mean(X ** 2, axis=0) # precompute moments
mu0 = np.mean(X, axis=0)
dof_scaling = sqrt(n_samples / (n_samples - 1.0))
std0 = np.sqrt(X2 - mu0 ** 2) * dof_scaling # get std with var splitting
T_obs = np.mean(X, axis=0) / (std0 / sqrt(n_samples))
rng = check_random_state(seed)
orders, _, extra = _get_1samp_orders(n_samples, n_permutations, tail, rng)
perms = 2 * np.array(orders) - 1 # from 0, 1 -> 1, -1
logger.info('Permuting %d times%s...' % (len(orders), extra))
parallel, my_max_stat, n_jobs = parallel_func(_max_stat, n_jobs)
max_abs = np.concatenate(parallel(my_max_stat(X, X2, p, dof_scaling)
for p in np.array_split(perms, n_jobs)))
max_abs = np.concatenate((max_abs, [np.abs(T_obs).max()]))
H0 = np.sort(max_abs)
if tail == 0:
p_values = (H0 >= np.abs(T_obs[:, np.newaxis])).mean(-1)
elif tail == 1:
p_values = (H0 >= T_obs[:, np.newaxis]).mean(-1)
elif tail == -1:
p_values = (-H0 <= T_obs[:, np.newaxis]).mean(-1)
return T_obs, p_values, H0
def bootstrap_confidence_interval(arr, ci=.95, n_bootstraps=2000,
stat_fun='mean', random_state=None):
"""Get confidence intervals from non-parametric bootstrap.
Parameters
----------
arr : ndarray, shape (n_samples, ...)
The input data on which to calculate the confidence interval.
ci : float
Level of the confidence interval between 0 and 1.
n_bootstraps : int
Number of bootstraps.
stat_fun : str | callable
Can be "mean", "median", or a callable operating along ``axis=0``.
random_state : int | float | array_like | None
The seed at which to initialize the bootstrap.
Returns
-------
cis : ndarray, shape (2, ...)
Containing the lower boundary of the CI at ``cis[0, ...]`` and the
upper boundary of the CI at ``cis[1, ...]``.
"""
if stat_fun == "mean":
def stat_fun(x):
return x.mean(axis=0)
elif stat_fun == 'median':
def stat_fun(x):
return np.median(x, axis=0)
elif not callable(stat_fun):
raise ValueError("stat_fun must be 'mean', 'median' or callable.")
n_trials = arr.shape[0]
indices = np.arange(n_trials, dtype=int) # BCA would be cool to have too
rng = check_random_state(random_state)
boot_indices = rng.choice(indices, replace=True,
size=(n_bootstraps, len(indices)))
stat = np.array([stat_fun(arr[inds]) for inds in boot_indices])
ci = (((1 - ci) / 2) * 100, ((1 - ((1 - ci) / 2))) * 100)
ci_low, ci_up = np.percentile(stat, ci, axis=0)
return np.array([ci_low, ci_up])
def _ci(arr, ci=.95, method="bootstrap", n_bootstraps=2000, random_state=None):
"""Calculate confidence interval. Aux function for plot_compare_evokeds."""
if method == "bootstrap":
return bootstrap_confidence_interval(arr, ci=ci,
n_bootstraps=n_bootstraps,
random_state=random_state)
else:
from . import _parametric_ci
return _parametric_ci(arr, ci=ci)
|
from flask import Flask
try:
from http import HTTPStatus
except ImportError:
import httplib as HTTPStatus
from flasgger import Swagger
swagger_config = {
"headers": [
],
"specs": [
{
"endpoint": 'specifications',
"route": '/specifications.json',
"rule_filter": lambda rule: True, # all in
"model_filter": lambda tag: True, # all in
}
],
"static_url_path": "/flasgger_static",
# "static_folder": "static", # must be set by user
"specs_route": "/documentation/swagger/"
}
app = Flask(__name__)
swag = Swagger(app, config=swagger_config)
def test_swag(client, specs_data):
"""
This test is runs automatically in Travis CI
:param client: Flask app test client
:param specs_data: {'url': {swag_specs}} for every spec in app
"""
assert client.get('/documentation/swagger/').status_code == HTTPStatus.OK
assert specs_data.get('/specifications.json') is not None
if __name__ == '__main__':
app.run(debug=True)
|
import os
from unittest import expectedFailure
from stash.tests.stashtest import StashTestCase
class CatTests(StashTestCase):
"""Tests for the 'cat' command."""
def setUp(self):
"""setup the tests"""
self.cwd = self.get_data_path()
StashTestCase.setUp(self)
def get_data_path(self):
"""return the data/ sibling path"""
return os.path.abspath(os.path.join(os.path.dirname(__file__), "data"))
def read_data_file(self, fn):
"""returns the content of the file 'fn' in the data sibling dir."""
fp = os.path.join(self.get_data_path(), fn)
with open(fp, "r") as fin:
content = fin.read()
return content
def test_help(self):
"""test 'cat --help'."""
output = self.run_command("cat --help", exitcode=0)
self.assertIn("cat", output)
self.assertIn("-h", output)
self.assertIn("--help", output)
self.assertIn("files", output)
def test_cat_file(self):
"""test 'cat <somefile>'."""
output = self.run_command("cat somefile.txt", exitcode=0)
expected = self.read_data_file("somefile.txt")
self.assertEqual(output, expected)
def test_cat_multi_files(self):
"""test 'cat <somefile> <otherfile>'."""
output = self.run_command("cat somefile.txt otherfile.txt", exitcode=0)
expected = self.read_data_file("somefile.txt") + self.read_data_file("otherfile.txt")
self.assertEqual(output, expected)
def test_cat_stdin(self):
"""test 'cat <somefile> | cat -'."""
# we test 'cat <someifle>' in a seperate test, so we can use it here
output = self.run_command("cat somefile.txt | cat -", exitcode=0)
expected = self.read_data_file("somefile.txt")
self.assertEqual(output, expected)
def test_cat_nonexistent(self):
"""test 'cat <some file which does not exist>'."""
output = self.run_command("cat invalid.txt", exitcode=1)
self.assertIn("cat: ", output)
self.assertIn("No such file or directory: ", output)
self.assertIn("invalid.txt", output)
def test_cat_nonascii(self):
"""test 'cat <some file containing non-ascii characters.>'."""
output = self.run_command("cat nonascii.txt", exitcode=0).replace("\n", "")
self.assertEqual(output, u"äöüß")
|
from datetime import timedelta
import logging
from homeassistant.components.cover import (
ATTR_POSITION,
DEVICE_CLASS_AWNING,
DEVICE_CLASS_BLIND,
DEVICE_CLASS_CURTAIN,
DEVICE_CLASS_GARAGE,
DEVICE_CLASS_SHUTTER,
DEVICE_CLASS_WINDOW,
CoverEntity,
)
from homeassistant.util.dt import utcnow
from . import DOMAIN as TAHOMA_DOMAIN, TahomaDevice
_LOGGER = logging.getLogger(__name__)
ATTR_MEM_POS = "memorized_position"
ATTR_RSSI_LEVEL = "rssi_level"
ATTR_LOCK_START_TS = "lock_start_ts"
ATTR_LOCK_END_TS = "lock_end_ts"
ATTR_LOCK_LEVEL = "lock_level"
ATTR_LOCK_ORIG = "lock_originator"
HORIZONTAL_AWNING = "io:HorizontalAwningIOComponent"
TAHOMA_DEVICE_CLASSES = {
HORIZONTAL_AWNING: DEVICE_CLASS_AWNING,
"io:AwningValanceIOComponent": DEVICE_CLASS_AWNING,
"io:DiscreteGarageOpenerWithPartialPositionIOComponent": DEVICE_CLASS_GARAGE,
"io:DiscreteGarageOpenerIOComponent": DEVICE_CLASS_GARAGE,
"io:ExteriorVenetianBlindIOComponent": DEVICE_CLASS_BLIND,
"io:GarageOpenerIOComponent": DEVICE_CLASS_GARAGE,
"io:RollerShutterGenericIOComponent": DEVICE_CLASS_SHUTTER,
"io:RollerShutterUnoIOComponent": DEVICE_CLASS_SHUTTER,
"io:RollerShutterVeluxIOComponent": DEVICE_CLASS_SHUTTER,
"io:RollerShutterWithLowSpeedManagementIOComponent": DEVICE_CLASS_SHUTTER,
"io:VerticalExteriorAwningIOComponent": DEVICE_CLASS_AWNING,
"io:VerticalInteriorBlindVeluxIOComponent": DEVICE_CLASS_BLIND,
"io:WindowOpenerVeluxIOComponent": DEVICE_CLASS_WINDOW,
"rts:BlindRTSComponent": DEVICE_CLASS_BLIND,
"rts:CurtainRTSComponent": DEVICE_CLASS_CURTAIN,
"rts:DualCurtainRTSComponent": DEVICE_CLASS_CURTAIN,
"rts:ExteriorVenetianBlindRTSComponent": DEVICE_CLASS_BLIND,
"rts:RollerShutterRTSComponent": DEVICE_CLASS_SHUTTER,
"rts:VenetianBlindRTSComponent": DEVICE_CLASS_BLIND,
}
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Tahoma covers."""
if discovery_info is None:
return
controller = hass.data[TAHOMA_DOMAIN]["controller"]
devices = []
for device in hass.data[TAHOMA_DOMAIN]["devices"]["cover"]:
devices.append(TahomaCover(device, controller))
add_entities(devices, True)
class TahomaCover(TahomaDevice, CoverEntity):
"""Representation a Tahoma Cover."""
def __init__(self, tahoma_device, controller):
"""Initialize the device."""
super().__init__(tahoma_device, controller)
self._closure = 0
# 100 equals open
self._position = 100
self._closed = False
self._rssi_level = None
self._icon = None
# Can be 0 and bigger
self._lock_timer = 0
self._lock_start_ts = None
self._lock_end_ts = None
# Can be 'comfortLevel1', 'comfortLevel2', 'comfortLevel3',
# 'comfortLevel4', 'environmentProtection', 'humanProtection',
# 'userLevel1', 'userLevel2'
self._lock_level = None
# Can be 'LSC', 'SAAC', 'SFC', 'UPS', 'externalGateway', 'localUser',
# 'myself', 'rain', 'security', 'temperature', 'timer', 'user', 'wind'
self._lock_originator = None
def update(self):
"""Update method."""
self.controller.get_states([self.tahoma_device])
# For vertical covers
self._closure = self.tahoma_device.active_states.get("core:ClosureState")
# For horizontal covers
if self._closure is None:
self._closure = self.tahoma_device.active_states.get("core:DeploymentState")
# For all, if available
if "core:PriorityLockTimerState" in self.tahoma_device.active_states:
old_lock_timer = self._lock_timer
self._lock_timer = self.tahoma_device.active_states[
"core:PriorityLockTimerState"
]
# Derive timestamps from _lock_timer, only if not already set or
# something has changed
if self._lock_timer > 0:
_LOGGER.debug("Update %s, lock_timer: %d", self._name, self._lock_timer)
if self._lock_start_ts is None:
self._lock_start_ts = utcnow()
if self._lock_end_ts is None or old_lock_timer != self._lock_timer:
self._lock_end_ts = utcnow() + timedelta(seconds=self._lock_timer)
else:
self._lock_start_ts = None
self._lock_end_ts = None
else:
self._lock_timer = 0
self._lock_start_ts = None
self._lock_end_ts = None
self._lock_level = self.tahoma_device.active_states.get(
"io:PriorityLockLevelState"
)
self._lock_originator = self.tahoma_device.active_states.get(
"io:PriorityLockOriginatorState"
)
self._rssi_level = self.tahoma_device.active_states.get("core:RSSILevelState")
# Define which icon to use
if self._lock_timer > 0:
if self._lock_originator == "wind":
self._icon = "mdi:weather-windy"
else:
self._icon = "mdi:lock-alert"
else:
self._icon = None
# Define current position.
# _position: 0 is closed, 100 is fully open.
# 'core:ClosureState': 100 is closed, 0 is fully open.
if self._closure is not None:
if self.tahoma_device.type == HORIZONTAL_AWNING:
self._position = self._closure
else:
self._position = 100 - self._closure
if self._position <= 5:
self._position = 0
if self._position >= 95:
self._position = 100
self._closed = self._position == 0
else:
self._position = None
if "core:OpenClosedState" in self.tahoma_device.active_states:
self._closed = (
self.tahoma_device.active_states["core:OpenClosedState"] == "closed"
)
if "core:OpenClosedPartialState" in self.tahoma_device.active_states:
self._closed = (
self.tahoma_device.active_states["core:OpenClosedPartialState"]
== "closed"
)
else:
self._closed = False
_LOGGER.debug("Update %s, position: %d", self._name, self._position)
@property
def current_cover_position(self):
"""Return current position of cover."""
return self._position
def set_cover_position(self, **kwargs):
"""Move the cover to a specific position."""
if self.tahoma_device.type == "io:WindowOpenerVeluxIOComponent":
command = "setClosure"
else:
command = "setPosition"
if self.tahoma_device.type == HORIZONTAL_AWNING:
self.apply_action(command, kwargs.get(ATTR_POSITION, 0))
else:
self.apply_action(command, 100 - kwargs.get(ATTR_POSITION, 0))
@property
def is_closed(self):
"""Return if the cover is closed."""
return self._closed
@property
def device_class(self):
"""Return the class of the device."""
return TAHOMA_DEVICE_CLASSES.get(self.tahoma_device.type)
@property
def device_state_attributes(self):
"""Return the device state attributes."""
attr = {}
super_attr = super().device_state_attributes
if super_attr is not None:
attr.update(super_attr)
if "core:Memorized1PositionState" in self.tahoma_device.active_states:
attr[ATTR_MEM_POS] = self.tahoma_device.active_states[
"core:Memorized1PositionState"
]
if self._rssi_level is not None:
attr[ATTR_RSSI_LEVEL] = self._rssi_level
if self._lock_start_ts is not None:
attr[ATTR_LOCK_START_TS] = self._lock_start_ts.isoformat()
if self._lock_end_ts is not None:
attr[ATTR_LOCK_END_TS] = self._lock_end_ts.isoformat()
if self._lock_level is not None:
attr[ATTR_LOCK_LEVEL] = self._lock_level
if self._lock_originator is not None:
attr[ATTR_LOCK_ORIG] = self._lock_originator
return attr
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return self._icon
def open_cover(self, **kwargs):
"""Open the cover."""
self.apply_action("open")
def close_cover(self, **kwargs):
"""Close the cover."""
self.apply_action("close")
def stop_cover(self, **kwargs):
"""Stop the cover."""
if (
self.tahoma_device.type
== "io:RollerShutterWithLowSpeedManagementIOComponent"
):
self.apply_action("setPosition", "secured")
elif self.tahoma_device.type in {
"io:ExteriorVenetianBlindIOComponent",
"rts:BlindRTSComponent",
"rts:DualCurtainRTSComponent",
"rts:ExteriorVenetianBlindRTSComponent",
"rts:VenetianBlindRTSComponent",
}:
self.apply_action("my")
elif self.tahoma_device.type in {
HORIZONTAL_AWNING,
"io:AwningValanceIOComponent",
"io:RollerShutterGenericIOComponent",
"io:VerticalExteriorAwningIOComponent",
"io:VerticalInteriorBlindVeluxIOComponent",
"io:WindowOpenerVeluxIOComponent",
}:
self.apply_action("stop")
else:
self.apply_action("stopIdentify")
|
import logging
from typing import List
from heatmiserV3 import connection, heatmiser
import voluptuous as vol
from homeassistant.components.climate import (
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
PLATFORM_SCHEMA,
ClimateEntity,
)
from homeassistant.components.climate.const import SUPPORT_TARGET_TEMPERATURE
from homeassistant.const import (
ATTR_TEMPERATURE,
CONF_HOST,
CONF_ID,
CONF_NAME,
CONF_PORT,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
CONF_THERMOSTATS = "tstats"
TSTATS_SCHEMA = vol.Schema(
vol.All(
cv.ensure_list,
[{vol.Required(CONF_ID): cv.positive_int, vol.Required(CONF_NAME): cv.string}],
)
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PORT): cv.string,
vol.Optional(CONF_THERMOSTATS, default=[]): TSTATS_SCHEMA,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the heatmiser thermostat."""
heatmiser_v3_thermostat = heatmiser.HeatmiserThermostat
host = config[CONF_HOST]
port = config[CONF_PORT]
thermostats = config[CONF_THERMOSTATS]
uh1_hub = connection.HeatmiserUH1(host, port)
add_entities(
[
HeatmiserV3Thermostat(heatmiser_v3_thermostat, thermostat, uh1_hub)
for thermostat in thermostats
],
True,
)
class HeatmiserV3Thermostat(ClimateEntity):
"""Representation of a HeatmiserV3 thermostat."""
def __init__(self, therm, device, uh1):
"""Initialize the thermostat."""
self.therm = therm(device[CONF_ID], "prt", uh1)
self.uh1 = uh1
self._name = device[CONF_NAME]
self._current_temperature = None
self._target_temperature = None
self._id = device
self.dcb = None
self._hvac_mode = HVAC_MODE_HEAT
self._temperature_unit = None
@property
def supported_features(self):
"""Return the list of supported features."""
return SUPPORT_TARGET_TEMPERATURE
@property
def name(self):
"""Return the name of the thermostat, if any."""
return self._name
@property
def temperature_unit(self):
"""Return the unit of measurement which this thermostat uses."""
return self._temperature_unit
@property
def hvac_mode(self) -> str:
"""Return hvac operation ie. heat, cool mode.
Need to be one of HVAC_MODE_*.
"""
return self._hvac_mode
@property
def hvac_modes(self) -> List[str]:
"""Return the list of available hvac operation modes.
Need to be a subset of HVAC_MODES.
"""
return [HVAC_MODE_HEAT, HVAC_MODE_OFF]
@property
def current_temperature(self):
"""Return the current temperature."""
return self._current_temperature
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return self._target_temperature
def set_temperature(self, **kwargs):
"""Set new target temperature."""
temperature = kwargs.get(ATTR_TEMPERATURE)
self._target_temperature = int(temperature)
self.therm.set_target_temp(self._target_temperature)
def update(self):
"""Get the latest data."""
self.uh1.reopen()
if not self.uh1.status:
_LOGGER.error("Failed to update device %s", self._name)
return
self.dcb = self.therm.read_dcb()
self._temperature_unit = (
TEMP_CELSIUS
if (self.therm.get_temperature_format() == "C")
else TEMP_FAHRENHEIT
)
self._current_temperature = int(self.therm.get_floor_temp())
self._target_temperature = int(self.therm.get_target_temp())
self._hvac_mode = (
HVAC_MODE_OFF
if (int(self.therm.get_current_state()) == 0)
else HVAC_MODE_HEAT
)
|
from datetime import timedelta
import re
import unittest
import forecastio
from requests.exceptions import HTTPError
import requests_mock
from homeassistant.components.darksky import sensor as darksky
from homeassistant.setup import setup_component
from tests.async_mock import MagicMock, patch
from tests.common import get_test_home_assistant, load_fixture
VALID_CONFIG_MINIMAL = {
"sensor": {
"platform": "darksky",
"api_key": "foo",
"forecast": [1, 2],
"hourly_forecast": [1, 2],
"monitored_conditions": ["summary", "icon", "temperature_high", "alerts"],
"scan_interval": timedelta(seconds=120),
}
}
INVALID_CONFIG_MINIMAL = {
"sensor": {
"platform": "darksky",
"api_key": "foo",
"forecast": [1, 2],
"hourly_forecast": [1, 2],
"monitored_conditions": ["summary", "iocn", "temperature_high"],
"scan_interval": timedelta(seconds=120),
}
}
VALID_CONFIG_LANG_DE = {
"sensor": {
"platform": "darksky",
"api_key": "foo",
"forecast": [1, 2],
"hourly_forecast": [1, 2],
"units": "us",
"language": "de",
"monitored_conditions": [
"summary",
"icon",
"temperature_high",
"minutely_summary",
"hourly_summary",
"daily_summary",
"humidity",
"alerts",
],
"scan_interval": timedelta(seconds=120),
}
}
INVALID_CONFIG_LANG = {
"sensor": {
"platform": "darksky",
"api_key": "foo",
"forecast": [1, 2],
"hourly_forecast": [1, 2],
"language": "yz",
"monitored_conditions": ["summary", "icon", "temperature_high"],
"scan_interval": timedelta(seconds=120),
}
}
VALID_CONFIG_ALERTS = {
"sensor": {
"platform": "darksky",
"api_key": "foo",
"forecast": [1, 2],
"hourly_forecast": [1, 2],
"monitored_conditions": ["summary", "icon", "temperature_high", "alerts"],
"scan_interval": timedelta(seconds=120),
}
}
def load_forecastMock(key, lat, lon, units, lang): # pylint: disable=invalid-name
"""Mock darksky forecast loading."""
return ""
class TestDarkSkySetup(unittest.TestCase):
"""Test the Dark Sky platform."""
def add_entities(self, new_entities, update_before_add=False):
"""Mock add entities."""
if update_before_add:
for entity in new_entities:
entity.update()
for entity in new_entities:
self.entities.append(entity)
def setUp(self):
"""Initialize values for this testcase class."""
self.hass = get_test_home_assistant()
self.key = "foo"
self.lat = self.hass.config.latitude = 37.8267
self.lon = self.hass.config.longitude = -122.423
self.entities = []
self.addCleanup(self.tear_down_cleanup)
def tear_down_cleanup(self):
"""Stop everything that was started."""
self.hass.stop()
@patch(
"homeassistant.components.darksky.sensor.forecastio.load_forecast",
new=load_forecastMock,
)
def test_setup_with_config(self):
"""Test the platform setup with configuration."""
setup_component(self.hass, "sensor", VALID_CONFIG_MINIMAL)
self.hass.block_till_done()
state = self.hass.states.get("sensor.dark_sky_summary")
assert state is not None
def test_setup_with_invalid_config(self):
"""Test the platform setup with invalid configuration."""
setup_component(self.hass, "sensor", INVALID_CONFIG_MINIMAL)
self.hass.block_till_done()
state = self.hass.states.get("sensor.dark_sky_summary")
assert state is None
@patch(
"homeassistant.components.darksky.sensor.forecastio.load_forecast",
new=load_forecastMock,
)
def test_setup_with_language_config(self):
"""Test the platform setup with language configuration."""
setup_component(self.hass, "sensor", VALID_CONFIG_LANG_DE)
self.hass.block_till_done()
state = self.hass.states.get("sensor.dark_sky_summary")
assert state is not None
def test_setup_with_invalid_language_config(self):
"""Test the platform setup with language configuration."""
setup_component(self.hass, "sensor", INVALID_CONFIG_LANG)
self.hass.block_till_done()
state = self.hass.states.get("sensor.dark_sky_summary")
assert state is None
@patch("forecastio.api.get_forecast")
def test_setup_bad_api_key(self, mock_get_forecast):
"""Test for handling a bad API key."""
# The Dark Sky API wrapper that we use raises an HTTP error
# when you try to use a bad (or no) API key.
url = "https://api.darksky.net/forecast/{}/{},{}?units=auto".format(
self.key, str(self.lat), str(self.lon)
)
msg = f"400 Client Error: Bad Request for url: {url}"
mock_get_forecast.side_effect = HTTPError(msg)
response = darksky.setup_platform(
self.hass, VALID_CONFIG_MINIMAL["sensor"], MagicMock()
)
assert not response
@patch(
"homeassistant.components.darksky.sensor.forecastio.load_forecast",
new=load_forecastMock,
)
def test_setup_with_alerts_config(self):
"""Test the platform setup with alert configuration."""
setup_component(self.hass, "sensor", VALID_CONFIG_ALERTS)
self.hass.block_till_done()
state = self.hass.states.get("sensor.dark_sky_alerts")
assert state.state == "0"
@requests_mock.Mocker()
@patch("forecastio.api.get_forecast", wraps=forecastio.api.get_forecast)
def test_setup(self, mock_req, mock_get_forecast):
"""Test for successfully setting up the forecast.io platform."""
uri = (
r"https://api.(darksky.net|forecast.io)\/forecast\/(\w+)\/"
r"(-?\d+\.?\d*),(-?\d+\.?\d*)"
)
mock_req.get(re.compile(uri), text=load_fixture("darksky.json"))
assert setup_component(self.hass, "sensor", VALID_CONFIG_MINIMAL)
self.hass.block_till_done()
assert mock_get_forecast.called
assert mock_get_forecast.call_count == 1
assert len(self.hass.states.entity_ids()) == 13
state = self.hass.states.get("sensor.dark_sky_summary")
assert state is not None
assert state.state == "Clear"
assert state.attributes.get("friendly_name") == "Dark Sky Summary"
state = self.hass.states.get("sensor.dark_sky_alerts")
assert state.state == "2"
state = self.hass.states.get("sensor.dark_sky_daytime_high_temperature_1d")
assert state is not None
assert state.attributes.get("device_class") == "temperature"
|
import posixpath
from http import client
from urllib.parse import urlparse
from radicale import app, httputils, pathutils, storage
from radicale.log import logger
class ApplicationMoveMixin:
def do_MOVE(self, environ, base_prefix, path, user):
"""Manage MOVE request."""
raw_dest = environ.get("HTTP_DESTINATION", "")
to_url = urlparse(raw_dest)
if to_url.netloc != environ["HTTP_HOST"]:
logger.info("Unsupported destination address: %r", raw_dest)
# Remote destination server, not supported
return httputils.REMOTE_DESTINATION
access = app.Access(self._rights, user, path)
if not access.check("w"):
return httputils.NOT_ALLOWED
to_path = pathutils.sanitize_path(to_url.path)
if not (to_path + "/").startswith(base_prefix + "/"):
logger.warning("Destination %r from MOVE request on %r doesn't "
"start with base prefix", to_path, path)
return httputils.NOT_ALLOWED
to_path = to_path[len(base_prefix):]
to_access = app.Access(self._rights, user, to_path)
if not to_access.check("w"):
return httputils.NOT_ALLOWED
with self._storage.acquire_lock("w", user):
item = next(self._storage.discover(path), None)
if not item:
return httputils.NOT_FOUND
if (not access.check("w", item) or
not to_access.check("w", item)):
return httputils.NOT_ALLOWED
if isinstance(item, storage.BaseCollection):
# TODO: support moving collections
return httputils.METHOD_NOT_ALLOWED
to_item = next(self._storage.discover(to_path), None)
if isinstance(to_item, storage.BaseCollection):
return httputils.FORBIDDEN
to_parent_path = pathutils.unstrip_path(
posixpath.dirname(pathutils.strip_path(to_path)), True)
to_collection = next(
self._storage.discover(to_parent_path), None)
if not to_collection:
return httputils.CONFLICT
tag = item.collection.get_meta("tag")
if not tag or tag != to_collection.get_meta("tag"):
return httputils.FORBIDDEN
if to_item and environ.get("HTTP_OVERWRITE", "F") != "T":
return httputils.PRECONDITION_FAILED
if (to_item and item.uid != to_item.uid or
not to_item and
to_collection.path != item.collection.path and
to_collection.has_uid(item.uid)):
return self._webdav_error_response(
client.CONFLICT, "%s:no-uid-conflict" % (
"C" if tag == "VCALENDAR" else "CR"))
to_href = posixpath.basename(pathutils.strip_path(to_path))
try:
self._storage.move(item, to_collection, to_href)
except ValueError as e:
logger.warning(
"Bad MOVE request on %r: %s", path, e, exc_info=True)
return httputils.BAD_REQUEST
return client.NO_CONTENT if to_item else client.CREATED, {}, None
|
from bizkaibus.bizkaibus import BizkaibusData
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_NAME, TIME_MINUTES
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
ATTR_DUE_IN = "Due in"
CONF_STOP_ID = "stopid"
CONF_ROUTE = "route"
DEFAULT_NAME = "Next bus"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_STOP_ID): cv.string,
vol.Required(CONF_ROUTE): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Bizkaibus public transport sensor."""
name = config[CONF_NAME]
stop = config[CONF_STOP_ID]
route = config[CONF_ROUTE]
data = Bizkaibus(stop, route)
add_entities([BizkaibusSensor(data, stop, route, name)], True)
class BizkaibusSensor(Entity):
"""The class for handling the data."""
def __init__(self, data, stop, route, name):
"""Initialize the sensor."""
self.data = data
self.stop = stop
self.route = route
self._name = name
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of the sensor."""
return TIME_MINUTES
def update(self):
"""Get the latest data from the webservice."""
self.data.update()
try:
self._state = self.data.info[0][ATTR_DUE_IN]
except TypeError:
pass
class Bizkaibus:
"""The class for handling the data retrieval."""
def __init__(self, stop, route):
"""Initialize the data object."""
self.stop = stop
self.route = route
self.info = None
def update(self):
"""Retrieve the information from API."""
bridge = BizkaibusData(self.stop, self.route)
bridge.getNextBus()
self.info = bridge.info
|
from pygal.graph.bar import Bar
from pygal.graph.dual import Dual
from pygal.util import alter, cached_property, decorate
class Histogram(Dual, Bar):
"""Histogram chart class"""
_series_margin = 0
@cached_property
def _values(self):
"""Getter for secondary series values (flattened)"""
return self.yvals
@cached_property
def _secondary_values(self):
"""Getter for secondary series values (flattened)"""
return [
val[0] for serie in self.secondary_series for val in serie.values
if val[0] is not None
]
@cached_property
def xvals(self):
"""All x values"""
return [
val for serie in self.all_series for dval in serie.values
for val in dval[1:3] if val is not None
]
@cached_property
def yvals(self):
"""All y values"""
return [
val[0] for serie in self.series for val in serie.values
if val[0] is not None
]
def _bar(self, serie, parent, x0, x1, y, i, zero, secondary=False):
"""Internal bar drawing function"""
x, y = self.view((x0, y))
x1, _ = self.view((x1, y))
width = x1 - x
height = self.view.y(zero) - y
series_margin = width * self._series_margin
x += series_margin
width -= 2 * series_margin
r = serie.rounded_bars * 1 if serie.rounded_bars else 0
alter(
self.svg.transposable_node(
parent,
'rect',
x=x,
y=y,
rx=r,
ry=r,
width=width,
height=height,
class_='rect reactive tooltip-trigger'
), serie.metadata.get(i)
)
return x, y, width, height
def bar(self, serie, rescale=False):
"""Draw a bar graph for a serie"""
serie_node = self.svg.serie(serie)
bars = self.svg.node(serie_node['plot'], class_="histbars")
points = serie.points
for i, (y, x0, x1) in enumerate(points):
if None in (x0, x1, y) or (self.logarithmic and y <= 0):
continue
metadata = serie.metadata.get(i)
bar = decorate(
self.svg, self.svg.node(bars, class_='histbar'), metadata
)
val = self._format(serie, i)
bounds = self._bar(
serie, bar, x0, x1, y, i, self.zero, secondary=rescale
)
self._tooltip_and_print_values(
serie_node, serie, bar, i, val, metadata, *bounds
)
def _compute(self):
"""Compute x/y min and max and x/y scale and set labels"""
if self.xvals:
xmin = min(self.xvals)
xmax = max(self.xvals)
xrng = (xmax - xmin)
else:
xrng = None
if self.yvals:
ymin = min(min(self.yvals), self.zero)
ymax = max(max(self.yvals), self.zero)
yrng = (ymax - ymin)
else:
yrng = None
for serie in self.all_series:
serie.points = serie.values
if xrng:
self._box.xmin, self._box.xmax = xmin, xmax
if yrng:
self._box.ymin, self._box.ymax = ymin, ymax
if self.range and self.range[0] is not None:
self._box.ymin = self.range[0]
if self.range and self.range[1] is not None:
self._box.ymax = self.range[1]
|
from unittest import TestCase
import numpy as np
from scattertext import OncePerDocFrequencyRanker
from scattertext.termscoring.CohensD import CohensD, HedgesR
from scattertext.test.test_termDocMatrixFactory import build_hamlet_jz_corpus_with_meta, build_hamlet_jz_corpus
class TestCohensD(TestCase):
def test_get_cohens_d_scores(self):
corpus = build_hamlet_jz_corpus()
np.testing.assert_almost_equal(CohensD(corpus)
.set_term_ranker(OncePerDocFrequencyRanker)
.set_categories('hamlet')
.get_scores()[:5], [-0.2303607, 0.8838835, 0.8838835, 1.4028612, 0.8838835])
def test_get_cohens_d_scores_zero_robust(self):
corpus = build_hamlet_jz_corpus()
corpus._X[1, :] = 0
np.testing.assert_almost_equal(CohensD(corpus)
.set_term_ranker(OncePerDocFrequencyRanker)
.set_categories('hamlet')
.get_scores()[:5], [-0.2303607, 0.8838835, 0.8838835, 0.8838835, 0.8838835])
def test_get_cohens_d_score_df(self):
corpus = build_hamlet_jz_corpus()
columns = (CohensD(corpus)
.set_term_ranker(OncePerDocFrequencyRanker)
.set_categories('hamlet')
.get_score_df().columns)
self.assertEqual(set(columns), set(['cohens_d', 'cohens_d_se', 'cohens_d_z', 'cohens_d_p', 'hedges_r',
'hedges_r_se', 'hedges_r_z', 'hedges_r_p', 'm1', 'm2',
'count1', 'count2', 'docs1', 'docs2']))
def test_get_cohens_d_score_df_p_vals(self):
corpus = build_hamlet_jz_corpus()
columns = (CohensD(corpus)
.set_term_ranker(OncePerDocFrequencyRanker)
.set_categories('hamlet')
.get_score_df().columns)
self.assertEqual(set(columns), set(['cohens_d', 'cohens_d_se', 'cohens_d_z', 'cohens_d_p', 'hedges_r',
'hedges_r_se', 'hedges_r_z', 'hedges_r_p', 'm1', 'm2',
'count1', 'count2', 'docs1', 'docs2']))
def test_get_name(self):
corpus = build_hamlet_jz_corpus()
self.assertEqual(CohensD(corpus)
.set_categories('hamlet')
.get_name(),
"Cohen's d")
def test_get_name_hedges(self):
corpus = build_hamlet_jz_corpus()
self.assertEqual(HedgesR(corpus).set_categories('hamlet').get_name(), "Hedge's r")
self.assertEqual(len(HedgesR(corpus).set_categories('hamlet').get_scores()), corpus.get_num_terms())
|
from functools import partial
import logging
from i2csense.bh1750 import BH1750 # pylint: disable=import-error
import smbus # pylint: disable=import-error
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_NAME, DEVICE_CLASS_ILLUMINANCE, LIGHT_LUX
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
CONF_I2C_ADDRESS = "i2c_address"
CONF_I2C_BUS = "i2c_bus"
CONF_OPERATION_MODE = "operation_mode"
CONF_SENSITIVITY = "sensitivity"
CONF_DELAY = "measurement_delay_ms"
CONF_MULTIPLIER = "multiplier"
# Operation modes for BH1750 sensor (from the datasheet). Time typically 120ms
# In one time measurements, device is set to Power Down after each sample.
CONTINUOUS_LOW_RES_MODE = "continuous_low_res_mode"
CONTINUOUS_HIGH_RES_MODE_1 = "continuous_high_res_mode_1"
CONTINUOUS_HIGH_RES_MODE_2 = "continuous_high_res_mode_2"
ONE_TIME_LOW_RES_MODE = "one_time_low_res_mode"
ONE_TIME_HIGH_RES_MODE_1 = "one_time_high_res_mode_1"
ONE_TIME_HIGH_RES_MODE_2 = "one_time_high_res_mode_2"
OPERATION_MODES = {
CONTINUOUS_LOW_RES_MODE: (0x13, True), # 4lx resolution
CONTINUOUS_HIGH_RES_MODE_1: (0x10, True), # 1lx resolution.
CONTINUOUS_HIGH_RES_MODE_2: (0x11, True), # 0.5lx resolution.
ONE_TIME_LOW_RES_MODE: (0x23, False), # 4lx resolution.
ONE_TIME_HIGH_RES_MODE_1: (0x20, False), # 1lx resolution.
ONE_TIME_HIGH_RES_MODE_2: (0x21, False), # 0.5lx resolution.
}
DEFAULT_NAME = "BH1750 Light Sensor"
DEFAULT_I2C_ADDRESS = "0x23"
DEFAULT_I2C_BUS = 1
DEFAULT_MODE = CONTINUOUS_HIGH_RES_MODE_1
DEFAULT_DELAY_MS = 120
DEFAULT_SENSITIVITY = 69 # from 31 to 254
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_I2C_ADDRESS, default=DEFAULT_I2C_ADDRESS): cv.string,
vol.Optional(CONF_I2C_BUS, default=DEFAULT_I2C_BUS): vol.Coerce(int),
vol.Optional(CONF_OPERATION_MODE, default=DEFAULT_MODE): vol.In(
OPERATION_MODES
),
vol.Optional(CONF_SENSITIVITY, default=DEFAULT_SENSITIVITY): cv.positive_int,
vol.Optional(CONF_DELAY, default=DEFAULT_DELAY_MS): cv.positive_int,
vol.Optional(CONF_MULTIPLIER, default=1.0): vol.Range(min=0.1, max=10),
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the BH1750 sensor."""
name = config[CONF_NAME]
bus_number = config[CONF_I2C_BUS]
i2c_address = config[CONF_I2C_ADDRESS]
operation_mode = config[CONF_OPERATION_MODE]
bus = smbus.SMBus(bus_number)
sensor = await hass.async_add_executor_job(
partial(
BH1750,
bus,
i2c_address,
operation_mode=operation_mode,
measurement_delay=config[CONF_DELAY],
sensitivity=config[CONF_SENSITIVITY],
logger=_LOGGER,
)
)
if not sensor.sample_ok:
_LOGGER.error("BH1750 sensor not detected at %s", i2c_address)
return False
dev = [BH1750Sensor(sensor, name, LIGHT_LUX, config[CONF_MULTIPLIER])]
_LOGGER.info(
"Setup of BH1750 light sensor at %s in mode %s is complete",
i2c_address,
operation_mode,
)
async_add_entities(dev, True)
class BH1750Sensor(Entity):
"""Implementation of the BH1750 sensor."""
def __init__(self, bh1750_sensor, name, unit, multiplier=1.0):
"""Initialize the sensor."""
self._name = name
self._unit_of_measurement = unit
self._multiplier = multiplier
self.bh1750_sensor = bh1750_sensor
if self.bh1750_sensor.light_level >= 0:
self._state = int(round(self.bh1750_sensor.light_level))
else:
self._state = None
@property
def name(self) -> str:
"""Return the name of the sensor."""
return self._name
@property
def state(self) -> int:
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self) -> str:
"""Return the unit of measurement of the sensor."""
return self._unit_of_measurement
@property
def device_class(self) -> str:
"""Return the class of this device, from component DEVICE_CLASSES."""
return DEVICE_CLASS_ILLUMINANCE
async def async_update(self):
"""Get the latest data from the BH1750 and update the states."""
await self.hass.async_add_executor_job(self.bh1750_sensor.update)
if self.bh1750_sensor.sample_ok and self.bh1750_sensor.light_level >= 0:
self._state = int(round(self.bh1750_sensor.light_level * self._multiplier))
else:
_LOGGER.warning(
"Bad Update of sensor.%s: %s", self.name, self.bh1750_sensor.light_level
)
|
import pickle
import numpy as np
import pytest
import xarray as xr
import xarray.ufuncs as xu
from . import assert_array_equal
from . import assert_identical as assert_identical_
from . import mock, raises_regex
def assert_identical(a, b):
assert type(a) is type(b) or float(a) == float(b)
if isinstance(a, (xr.DataArray, xr.Dataset, xr.Variable)):
assert_identical_(a, b)
else:
assert_array_equal(a, b)
def test_unary():
args = [
0,
np.zeros(2),
xr.Variable(["x"], [0, 0]),
xr.DataArray([0, 0], dims="x"),
xr.Dataset({"y": ("x", [0, 0])}),
]
for a in args:
assert_identical(a + 1, np.cos(a))
def test_binary():
args = [
0,
np.zeros(2),
xr.Variable(["x"], [0, 0]),
xr.DataArray([0, 0], dims="x"),
xr.Dataset({"y": ("x", [0, 0])}),
]
for n, t1 in enumerate(args):
for t2 in args[n:]:
assert_identical(t2 + 1, np.maximum(t1, t2 + 1))
assert_identical(t2 + 1, np.maximum(t2, t1 + 1))
assert_identical(t2 + 1, np.maximum(t1 + 1, t2))
assert_identical(t2 + 1, np.maximum(t2 + 1, t1))
def test_binary_out():
args = [
1,
np.ones(2),
xr.Variable(["x"], [1, 1]),
xr.DataArray([1, 1], dims="x"),
xr.Dataset({"y": ("x", [1, 1])}),
]
for arg in args:
actual_mantissa, actual_exponent = np.frexp(arg)
assert_identical(actual_mantissa, 0.5 * arg)
assert_identical(actual_exponent, arg)
def test_groupby():
ds = xr.Dataset({"a": ("x", [0, 0, 0])}, {"c": ("x", [0, 0, 1])})
ds_grouped = ds.groupby("c")
group_mean = ds_grouped.mean("x")
arr_grouped = ds["a"].groupby("c")
assert_identical(ds, np.maximum(ds_grouped, group_mean))
assert_identical(ds, np.maximum(group_mean, ds_grouped))
assert_identical(ds, np.maximum(arr_grouped, group_mean))
assert_identical(ds, np.maximum(group_mean, arr_grouped))
assert_identical(ds, np.maximum(ds_grouped, group_mean["a"]))
assert_identical(ds, np.maximum(group_mean["a"], ds_grouped))
assert_identical(ds.a, np.maximum(arr_grouped, group_mean.a))
assert_identical(ds.a, np.maximum(group_mean.a, arr_grouped))
with raises_regex(ValueError, "mismatched lengths for dimension"):
np.maximum(ds.a.variable, ds_grouped)
def test_alignment():
ds1 = xr.Dataset({"a": ("x", [1, 2])}, {"x": [0, 1]})
ds2 = xr.Dataset({"a": ("x", [2, 3]), "b": 4}, {"x": [1, 2]})
actual = np.add(ds1, ds2)
expected = xr.Dataset({"a": ("x", [4])}, {"x": [1]})
assert_identical_(actual, expected)
with xr.set_options(arithmetic_join="outer"):
actual = np.add(ds1, ds2)
expected = xr.Dataset(
{"a": ("x", [np.nan, 4, np.nan]), "b": np.nan}, coords={"x": [0, 1, 2]}
)
assert_identical_(actual, expected)
def test_kwargs():
x = xr.DataArray(0)
result = np.add(x, 1, dtype=np.float64)
assert result.dtype == np.float64
def test_xarray_defers_to_unrecognized_type():
class Other:
def __array_ufunc__(self, *args, **kwargs):
return "other"
xarray_obj = xr.DataArray([1, 2, 3])
other = Other()
assert np.maximum(xarray_obj, other) == "other"
assert np.sin(xarray_obj, out=other) == "other"
def test_xarray_handles_dask():
da = pytest.importorskip("dask.array")
x = xr.DataArray(np.ones((2, 2)), dims=["x", "y"])
y = da.ones((2, 2), chunks=(2, 2))
result = np.add(x, y)
assert result.chunks == ((2,), (2,))
assert isinstance(result, xr.DataArray)
def test_dask_defers_to_xarray():
da = pytest.importorskip("dask.array")
x = xr.DataArray(np.ones((2, 2)), dims=["x", "y"])
y = da.ones((2, 2), chunks=(2, 2))
result = np.add(y, x)
assert result.chunks == ((2,), (2,))
assert isinstance(result, xr.DataArray)
def test_gufunc_methods():
xarray_obj = xr.DataArray([1, 2, 3])
with raises_regex(NotImplementedError, "reduce method"):
np.add.reduce(xarray_obj, 1)
def test_out():
xarray_obj = xr.DataArray([1, 2, 3])
# xarray out arguments should raise
with raises_regex(NotImplementedError, "`out` argument"):
np.add(xarray_obj, 1, out=xarray_obj)
# but non-xarray should be OK
other = np.zeros((3,))
np.add(other, xarray_obj, out=other)
assert_identical(other, np.array([1, 2, 3]))
def test_gufuncs():
xarray_obj = xr.DataArray([1, 2, 3])
fake_gufunc = mock.Mock(signature="(n)->()", autospec=np.sin)
with raises_regex(NotImplementedError, "generalized ufuncs"):
xarray_obj.__array_ufunc__(fake_gufunc, "__call__", xarray_obj)
def test_xarray_ufuncs_deprecation():
with pytest.warns(PendingDeprecationWarning, match="xarray.ufuncs"):
xu.cos(xr.DataArray([0, 1]))
with pytest.warns(None) as record:
xu.angle(xr.DataArray([0, 1]))
record = [el.message for el in record if el.category == PendingDeprecationWarning]
assert len(record) == 0
@pytest.mark.filterwarnings("ignore::RuntimeWarning")
@pytest.mark.parametrize(
"name",
[
name
for name in dir(xu)
if (
not name.startswith("_")
and hasattr(np, name)
and name not in ["print_function", "absolute_import", "division"]
)
],
)
def test_numpy_ufuncs(name, request):
x = xr.DataArray([1, 1])
np_func = getattr(np, name)
if hasattr(np_func, "nin") and np_func.nin == 2:
args = (x, x)
else:
args = (x,)
y = np_func(*args)
if name in ["angle", "iscomplex"]:
# these functions need to be handled with __array_function__ protocol
assert isinstance(y, np.ndarray)
elif name in ["frexp"]:
# np.frexp returns a tuple
assert not isinstance(y, xr.DataArray)
else:
assert isinstance(y, xr.DataArray)
@pytest.mark.filterwarnings("ignore:xarray.ufuncs")
def test_xarray_ufuncs_pickle():
a = 1.0
cos_pickled = pickle.loads(pickle.dumps(xu.cos))
assert_identical(cos_pickled(a), xu.cos(a))
|
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse
from django.shortcuts import get_object_or_404, redirect
from django.utils.html import escape
from django.utils.http import urlencode
from django.utils.safestring import mark_safe
from django.utils.translation import gettext as _
from django.views.decorators.cache import never_cache
from weblate.formats.models import EXPORTERS
from weblate.lang.models import Language
from weblate.trans.forms import (
AnnouncementForm,
AutoForm,
BulkEditForm,
ComponentDeleteForm,
ComponentMoveForm,
ComponentRenameForm,
DownloadForm,
NewUnitForm,
ProjectDeleteForm,
ProjectRenameForm,
ReplaceForm,
ReportsForm,
SearchForm,
TranslationDeleteForm,
get_new_language_form,
get_upload_form,
)
from weblate.trans.models import Change, ComponentList, Translation, Unit
from weblate.trans.models.component import prefetch_tasks
from weblate.trans.models.project import prefetch_project_flags
from weblate.trans.models.translation import GhostTranslation
from weblate.trans.util import render, sort_unicode
from weblate.utils import messages
from weblate.utils.stats import GhostProjectLanguageStats, prefetch_stats
from weblate.utils.views import (
get_component,
get_project,
get_translation,
optional_form,
try_set_language,
)
@never_cache
def list_projects(request):
"""List all projects."""
return render(
request,
"projects.html",
{
"allow_index": True,
"projects": prefetch_project_flags(
prefetch_stats(request.user.allowed_projects)
),
"title": _("Projects"),
},
)
def add_ghost_translations(component, user, translations, generator):
"""Adds ghost translations for user languages to the list."""
if component.can_add_new_language(user):
existing = {translation.language.code for translation in translations}
for language in user.profile.languages.all():
if language.code in existing:
continue
translations.append(generator(component, language))
def show_engage(request, project, lang=None):
# Get project object, skipping ACL
obj = get_project(request, project, skip_acl=True)
# Handle language parameter
if lang is not None:
language = get_object_or_404(Language, code=lang)
else:
language = None
full_stats = obj.stats
if language:
try_set_language(lang)
stats_obj = full_stats.get_single_language_stats(language)
else:
stats_obj = full_stats
return render(
request,
"engage.html",
{
"allow_index": True,
"object": obj,
"project": obj,
"full_stats": full_stats,
"languages": stats_obj.languages,
"total": obj.stats.source_strings,
"percent": stats_obj.translated_percent,
"language": language,
"project_link": mark_safe(
'<a href="{}">{}</a>'.format(
escape(obj.get_absolute_url()), escape(obj.name)
)
),
"title": _("Get involved in {0}!").format(obj),
},
)
@never_cache
def show_project(request, project):
obj = get_project(request, project)
obj.stats.ensure_basic()
user = request.user
last_changes = Change.objects.prefetch().order().filter(project=obj)[:10]
last_announcements = (
Change.objects.prefetch()
.order()
.filter(project=obj, action=Change.ACTION_ANNOUNCEMENT)[:10]
)
language_stats = obj.stats.get_language_stats()
# Show ghost translations for user languages
component = None
for component in obj.component_set.filter_access(user).all():
if component.can_add_new_language(user):
break
if component:
add_ghost_translations(
component, user, language_stats, GhostProjectLanguageStats
)
language_stats = sort_unicode(
language_stats,
lambda x: "{}-{}".format(
user.profile.get_language_order(x.language), x.language
),
)
all_components = obj.component_set.filter_access(user).prefetch().order()
components = prefetch_tasks(prefetch_stats(all_components))
return render(
request,
"project.html",
{
"allow_index": True,
"object": obj,
"project": obj,
"last_changes": last_changes,
"last_announcements": last_announcements,
"reports_form": ReportsForm(),
"last_changes_url": urlencode({"project": obj.slug}),
"language_stats": [stat.obj or stat for stat in language_stats],
"search_form": SearchForm(request.user),
"announcement_form": optional_form(
AnnouncementForm, user, "project.edit", obj
),
"delete_form": optional_form(
ProjectDeleteForm, user, "project.edit", obj, obj=obj
),
"rename_form": optional_form(
ProjectRenameForm,
user,
"project.edit",
obj,
request=request,
instance=obj,
),
"replace_form": optional_form(ReplaceForm, user, "unit.edit", obj),
"bulk_state_form": optional_form(
BulkEditForm,
user,
"translation.auto",
obj,
user=user,
obj=obj,
project=obj,
auto_id="id_bulk_%s",
),
"components": components,
"licenses": obj.component_set.exclude(license="").order_by("license"),
},
)
@never_cache
def show_component(request, project, component):
obj = get_component(request, project, component)
obj.stats.ensure_basic()
user = request.user
last_changes = Change.objects.prefetch().order().filter(component=obj)[:10]
translations = prefetch_stats(list(obj.translation_set.prefetch()))
# Show ghost translations for user languages
add_ghost_translations(obj, user, translations, GhostTranslation)
translations = sort_unicode(
translations,
lambda x: "{}-{}".format(
user.profile.get_language_order(x.language), x.language
),
)
return render(
request,
"component.html",
{
"allow_index": True,
"object": obj,
"project": obj.project,
"translations": translations,
"reports_form": ReportsForm(),
"last_changes": last_changes,
"last_changes_url": urlencode(
{"component": obj.slug, "project": obj.project.slug}
),
"replace_form": optional_form(ReplaceForm, user, "unit.edit", obj),
"bulk_state_form": optional_form(
BulkEditForm,
user,
"translation.auto",
obj,
user=user,
obj=obj,
project=obj.project,
auto_id="id_bulk_%s",
),
"announcement_form": optional_form(
AnnouncementForm, user, "component.edit", obj
),
"delete_form": optional_form(
ComponentDeleteForm, user, "component.edit", obj, obj=obj
),
"rename_form": optional_form(
ComponentRenameForm,
user,
"component.edit",
obj,
request=request,
instance=obj,
),
"move_form": optional_form(
ComponentMoveForm,
user,
"component.edit",
obj,
request=request,
instance=obj,
),
"search_form": SearchForm(request.user),
"alerts": obj.all_alerts,
},
)
@never_cache
def show_translation(request, project, component, lang):
obj = get_translation(request, project, component, lang)
obj.stats.ensure_all()
last_changes = Change.objects.prefetch().order().filter(translation=obj)[:10]
user = request.user
# Get form
form = get_upload_form(user, obj)
search_form = SearchForm(request.user, language=obj.language)
# Translations to same language from other components in this project
other_translations = prefetch_stats(
list(
Translation.objects.prefetch()
.filter(component__project=obj.component.project, language=obj.language)
.exclude(pk=obj.pk)
)
)
# Include ghost translations for other components, this
# adds quick way to create translations in other components
existing = {translation.component.slug for translation in other_translations}
existing.add(obj.component.slug)
for test_component in obj.component.project.component_set.filter_access(
user
).exclude(slug__in=existing):
if test_component.can_add_new_language(user):
other_translations.append(GhostTranslation(test_component, obj.language))
# Limit the number of other components displayed to 10, preferring untranslated ones
other_translations = sorted(
other_translations, key=lambda t: t.stats.translated_percent
)[:10]
return render(
request,
"translation.html",
{
"allow_index": True,
"object": obj,
"project": obj.component.project,
"form": form,
"download_form": DownloadForm(auto_id="id_dl_%s"),
"autoform": optional_form(
AutoForm, user, "translation.auto", obj, obj=obj.component
),
"search_form": search_form,
"replace_form": optional_form(ReplaceForm, user, "unit.edit", obj),
"bulk_state_form": optional_form(
BulkEditForm,
user,
"translation.auto",
obj,
user=user,
obj=obj,
project=obj.component.project,
auto_id="id_bulk_%s",
),
"new_unit_form": NewUnitForm(
user, initial={"value": Unit(translation=obj, id_hash=-1)}
),
"announcement_form": optional_form(
AnnouncementForm, user, "component.edit", obj
),
"delete_form": optional_form(
TranslationDeleteForm, user, "translation.delete", obj, obj=obj
),
"last_changes": last_changes,
"last_changes_url": urlencode(obj.get_reverse_url_kwargs()),
"other_translations": other_translations,
"exporters": EXPORTERS.list_exporters(obj),
},
)
@never_cache
def data_project(request, project):
obj = get_project(request, project)
return render(
request,
"data.html",
{
"object": obj,
"components": obj.component_set.filter_access(request.user).order(),
"project": obj,
},
)
@never_cache
@login_required
def new_language(request, project, component):
obj = get_component(request, project, component)
form_class = get_new_language_form(request, obj)
can_add = obj.can_add_new_language(request.user)
if request.method == "POST":
form = form_class(obj, request.POST)
if form.is_valid():
langs = form.cleaned_data["lang"]
kwargs = {
"user": request.user,
"author": request.user,
"component": obj,
"details": {},
}
for language in Language.objects.filter(code__in=langs):
kwargs["details"]["language"] = language.code
if can_add:
translation = obj.add_new_language(language, request)
if translation:
kwargs["translation"] = translation
if len(langs) == 1:
obj = translation
Change.objects.create(
action=Change.ACTION_ADDED_LANGUAGE, **kwargs
)
elif obj.new_lang == "contact":
Change.objects.create(
action=Change.ACTION_REQUESTED_LANGUAGE, **kwargs
)
messages.success(
request,
_(
"A request for a new translation has been "
"sent to the project's maintainers."
),
)
return redirect(obj)
messages.error(request, _("Please fix errors in the form."))
else:
form = form_class(obj)
return render(
request,
"new-language.html",
{"object": obj, "project": obj.project, "form": form, "can_add": can_add},
)
@never_cache
def healthz(request):
"""Simple health check endpoint."""
return HttpResponse("ok")
@never_cache
def show_component_list(request, name):
obj = get_object_or_404(ComponentList, slug__iexact=name)
return render(
request,
"component-list.html",
{"object": obj, "components": obj.components.filter_access(request.user)},
)
@never_cache
def guide(request, project, component):
obj = get_component(request, project, component)
return render(
request,
"guide.html",
{
"object": obj,
"project": obj.project,
"guidelines": obj.guidelines,
},
)
|
import logging
from nexia.home import NexiaHome
from requests.exceptions import ConnectTimeout, HTTPError
import voluptuous as vol
from homeassistant import config_entries, core, exceptions
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from .const import DOMAIN # pylint:disable=unused-import
from .util import is_invalid_auth_code
_LOGGER = logging.getLogger(__name__)
DATA_SCHEMA = vol.Schema({CONF_USERNAME: str, CONF_PASSWORD: str})
async def validate_input(hass: core.HomeAssistant, data):
"""Validate the user input allows us to connect.
Data has the keys from DATA_SCHEMA with values provided by the user.
"""
state_file = hass.config.path(f"nexia_config_{data[CONF_USERNAME]}.conf")
try:
nexia_home = NexiaHome(
username=data[CONF_USERNAME],
password=data[CONF_PASSWORD],
auto_login=False,
auto_update=False,
device_name=hass.config.location_name,
state_file=state_file,
)
await hass.async_add_executor_job(nexia_home.login)
except ConnectTimeout as ex:
_LOGGER.error("Unable to connect to Nexia service: %s", ex)
raise CannotConnect from ex
except HTTPError as http_ex:
_LOGGER.error("HTTP error from Nexia service: %s", http_ex)
if is_invalid_auth_code(http_ex.response.status_code):
raise InvalidAuth from http_ex
raise CannotConnect from http_ex
if not nexia_home.get_name():
raise InvalidAuth
info = {"title": nexia_home.get_name(), "house_id": nexia_home.house_id}
_LOGGER.debug("Setup ok with info: %s", info)
return info
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Nexia."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
async def async_step_user(self, user_input=None):
"""Handle the initial step."""
errors = {}
if user_input is not None:
try:
info = await validate_input(self.hass, user_input)
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
if "base" not in errors:
await self.async_set_unique_id(info["house_id"])
self._abort_if_unique_id_configured()
return self.async_create_entry(title=info["title"], data=user_input)
return self.async_show_form(
step_id="user", data_schema=DATA_SCHEMA, errors=errors
)
async def async_step_import(self, user_input):
"""Handle import."""
for entry in self._async_current_entries():
if entry.data[CONF_USERNAME] == user_input[CONF_USERNAME]:
return self.async_abort(reason="already_configured")
return await self.async_step_user(user_input)
class CannotConnect(exceptions.HomeAssistantError):
"""Error to indicate we cannot connect."""
class InvalidAuth(exceptions.HomeAssistantError):
"""Error to indicate there is invalid auth."""
|
from __future__ import unicode_literals
from lib.data.data import pyoptions
from rules.BaseTrick import simplejoin, middlejoin, tailjoins, headjoins, wordshaper
def SSrule(strs1, strs2):
for str1 in strs1:
for str2 in strs2:
for _ in simplejoin(str1, str2):
yield _
for _ in simplejoin(str2, str1):
yield _
for mid in pyoptions.sedb_trick_mid:
for _ in middlejoin(wordshaper(str1), wordshaper(str2), mid):
yield _
for _ in middlejoin(wordshaper(str2), wordshaper(str1), mid):
yield _
for suf in pyoptions.sedb_trick_suf:
for _ in tailjoins(wordshaper(str1), wordshaper(str2), suf):
yield _
for _ in tailjoins(wordshaper(str2), wordshaper(str1), suf):
yield _
for pre in pyoptions.sedb_trick_pre:
for _ in headjoins(wordshaper(str1), wordshaper(str2), pre):
yield _
for _ in headjoins(wordshaper(str2), wordshaper(str1), pre):
yield _
|
from app import app
from app.utils import RequestUtil, HookDataParse, JsonUtil
from app.tasks import tasks
from flask.globals import request
import json
from app.database.model import WebHook, History
@app.route('/api/git-webhook/<key>', methods=['POST', 'GET'])
def api_for_webhook(key):
'''git hook data
'''
# try:
data = RequestUtil.get_parameter('hook', None)
if data is None:
data = request.data
# for test
# data = WebhookData.github
# data = WebhookData.gitlab
# data = WebhookData.gitosc
try:
data = json.loads(data)
webhook = WebHook.query.filter_by(key=key).first()
if webhook:
repo = webhook.repo
branch = webhook.branch
# then repo and branch is match the config. then do the shell
if (HookDataParse.get_repo_name(data) == repo and
HookDataParse.get_repo_branch(data) == branch):
# start to process, add history into database
# waiting to done
history = History(webhook_id=webhook.id,
data=JsonUtil.object_2_json(data))
history.updateStatus('1')
# status is waiting
webhook.updateStatus('1')
# do the async task
tasks.do_webhook_shell.delay(webhook.id, history.id, data)
return "Work put into Queue."
return "Not match the Repo and Branch."
else:
return "The webhook is not exist."
except Exception as e:
return "Request is not valid Git webhook: " + str(e)
|
import configobj
import os
def str_to_bool(value):
"""
Converts string truthy/falsey strings to a bool
Empty strings are false
"""
if isinstance(value, basestring):
value = value.strip().lower()
if value in ['true', 't', 'yes', 'y']:
return True
elif value in ['false', 'f', 'no', 'n', '']:
return False
else:
raise NotImplementedError("Unknown bool %s" % value)
return value
def load_config(configfile):
"""
Load the full config / merge splitted configs if configured
"""
configfile = os.path.abspath(configfile)
config = configobj.ConfigObj(configfile)
config_extension = '.conf'
#########################################################################
# Load up other config files
#########################################################################
if 'configs' in config:
config_extension = config['configs'].get('extension', config_extension)
# Load other configs
if 'path' in config['configs']:
for cfgfile in os.listdir(config['configs']['path']):
cfgfile = os.path.join(config['configs']['path'],
cfgfile)
cfgfile = os.path.abspath(cfgfile)
if not cfgfile.endswith(config_extension):
continue
newconfig = configobj.ConfigObj(cfgfile)
config.merge(newconfig)
#########################################################################
if 'server' not in config:
raise Exception('Failed to load config file %s!' % configfile)
#########################################################################
# Load up handler specific configs
#########################################################################
if 'handlers' not in config:
config['handlers'] = configobj.ConfigObj()
if 'handlers_config_path' in config['server']:
handlers_config_path = config['server']['handlers_config_path']
if os.path.exists(handlers_config_path):
for cfgfile in os.listdir(handlers_config_path):
cfgfile = os.path.join(handlers_config_path, cfgfile)
cfgfile = os.path.abspath(cfgfile)
if not cfgfile.endswith(config_extension):
continue
filename = os.path.basename(cfgfile)
handler = os.path.splitext(filename)[0]
if handler not in config['handlers']:
config['handlers'][handler] = configobj.ConfigObj()
newconfig = configobj.ConfigObj(cfgfile)
config['handlers'][handler].merge(newconfig)
#########################################################################
# Load up Collector specific configs
#########################################################################
if 'collectors' not in config:
config['collectors'] = configobj.ConfigObj()
if 'collectors_config_path' in config['server']:
collectors_config_path = config['server']['collectors_config_path']
if os.path.exists(collectors_config_path):
for cfgfile in os.listdir(collectors_config_path):
cfgfile = os.path.join(collectors_config_path, cfgfile)
cfgfile = os.path.abspath(cfgfile)
if not cfgfile.endswith(config_extension):
continue
filename = os.path.basename(cfgfile)
collector = os.path.splitext(filename)[0]
if collector not in config['collectors']:
config['collectors'][collector] = configobj.ConfigObj()
try:
newconfig = configobj.ConfigObj(cfgfile)
except Exception as e:
raise Exception("Failed to load config file %s due to %s" %
(cfgfile, e))
config['collectors'][collector].merge(newconfig)
# Convert enabled to a bool
for collector in config['collectors']:
if 'enabled' in config['collectors'][collector]:
config['collectors'][collector]['enabled'] = str_to_bool(
config['collectors'][collector]['enabled']
)
#########################################################################
return config
|
from homeassistant.components.cover import (
ATTR_POSITION,
DEVICE_CLASS_WINDOW,
DOMAIN,
SUPPORT_CLOSE,
SUPPORT_OPEN,
SUPPORT_SET_POSITION,
SUPPORT_STOP,
CoverEntity,
)
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .const import COVER_TYPES, DAMPERS, NEW_LIGHT, WINDOW_COVERS
from .deconz_device import DeconzDevice
from .gateway import get_gateway_from_config_entry
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up covers for deCONZ component.
Covers are based on the same device class as lights in deCONZ.
"""
gateway = get_gateway_from_config_entry(hass, config_entry)
gateway.entities[DOMAIN] = set()
@callback
def async_add_cover(lights):
"""Add cover from deCONZ."""
entities = []
for light in lights:
if (
light.type in COVER_TYPES
and light.uniqueid not in gateway.entities[DOMAIN]
):
entities.append(DeconzCover(light, gateway))
if entities:
async_add_entities(entities)
gateway.listeners.append(
async_dispatcher_connect(
hass, gateway.async_signal_new_device(NEW_LIGHT), async_add_cover
)
)
async_add_cover(gateway.api.lights.values())
class DeconzCover(DeconzDevice, CoverEntity):
"""Representation of a deCONZ cover."""
TYPE = DOMAIN
def __init__(self, device, gateway):
"""Set up cover device."""
super().__init__(device, gateway)
self._features = SUPPORT_OPEN
self._features |= SUPPORT_CLOSE
self._features |= SUPPORT_STOP
self._features |= SUPPORT_SET_POSITION
@property
def current_cover_position(self):
"""Return the current position of the cover."""
return 100 - int(self._device.brightness / 254 * 100)
@property
def is_closed(self):
"""Return if the cover is closed."""
return self._device.state
@property
def device_class(self):
"""Return the class of the cover."""
if self._device.type in DAMPERS:
return "damper"
if self._device.type in WINDOW_COVERS:
return DEVICE_CLASS_WINDOW
@property
def supported_features(self):
"""Flag supported features."""
return self._features
async def async_set_cover_position(self, **kwargs):
"""Move the cover to a specific position."""
position = kwargs[ATTR_POSITION]
data = {"on": False}
if position < 100:
data["on"] = True
data["bri"] = 254 - int(position / 100 * 254)
await self._device.async_set_state(data)
async def async_open_cover(self, **kwargs):
"""Open cover."""
data = {ATTR_POSITION: 100}
await self.async_set_cover_position(**data)
async def async_close_cover(self, **kwargs):
"""Close cover."""
data = {ATTR_POSITION: 0}
await self.async_set_cover_position(**data)
async def async_stop_cover(self, **kwargs):
"""Stop cover."""
data = {"bri_inc": 0}
await self._device.async_set_state(data)
|
import dedupe
import unittest
import random
import pytest
SAMPLE = [({"name": "Bob", "age": "50"}, {"name": "Charlie", "age": "75"}),
({"name": "Meredith", "age": "40"}, {"name": "Sue", "age": "10"}),
({"name": "Willy", "age": "35"}, {"name": "William", "age": "35"}),
({"name": "Jimmy", "age": "20"}, {"name": "Jimbo", "age": "21"})]
class ActiveLearningTest(unittest.TestCase):
def setUp(self):
self.data_model = dedupe.datamodel.DataModel([{'field': 'name',
'type': 'String'},
{'field': 'age',
'type': 'String'}])
def test_AL(self):
random.seed(1111111111110)
original_N = len(SAMPLE)
active_learner = dedupe.labeler.RLRLearner(self.data_model)
active_learner.candidates = SAMPLE
assert len(active_learner) == original_N
pair = active_learner.pop()
print(pair)
assert pair == ({"name": "Willy", "age": "35"},
{"name": "William", "age": "35"})
assert len(active_learner) == original_N - 1
pair = active_learner.pop()
print(pair)
assert pair == ({"name": "Jimmy", "age": "20"},
{"name": "Jimbo", "age": "21"})
assert len(active_learner) == original_N - 2
pair = active_learner.pop()
assert pair == ({"name": "Meredith", "age": "40"},
{"name": "Sue", "age": "10"})
assert len(active_learner) == original_N - 3
active_learner.pop()
with pytest.raises(IndexError):
active_learner.pop()
if __name__ == "__main__":
unittest.main()
|
from . import DATA_KEY, VolvoEntity
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Volvo sensors."""
if discovery_info is None:
return
async_add_entities([VolvoSensor(hass.data[DATA_KEY], *discovery_info)])
class VolvoSensor(VolvoEntity):
"""Representation of a Volvo sensor."""
@property
def state(self):
"""Return the state."""
return self.instrument.state
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return self.instrument.unit
|
from datetime import datetime
import parsedatetime as pdt
from mongoengine.fields import EmbeddedDocumentField
from mongoengine.fields import EmbeddedDocumentListField
from mongoengine.fields import GenericEmbeddedDocumentField
from mongoengine.fields import GenericReferenceField
from mongoengine.fields import ListField
from mongoengine.fields import ReferenceField
from mongoengine.fields import SortedListField
def date_from_string(timeString):
cal = pdt.Calendar()
now = datetime.now()
result = str(cal.parseDT(timeString.strip(), now)[0])
return result
def update_document(document, data_dict):
"""
Recreate Document object from python dictionary
:param document:
:param data_dict:
:return:
"""
def field_value(field, value):
if field.__class__ in (
ListField,
SortedListField,
EmbeddedDocumentListField):
return [
field_value(field.field, item)
for item in value
]
if field.__class__ in (
EmbeddedDocumentField,
GenericEmbeddedDocumentField,
ReferenceField,
GenericReferenceField
):
return field.document_type(**value)
else:
return value
[setattr(
document, key.replace("_id", "id"),
field_value(document._fields[key.replace("_id", "id")], value)
) for key, value in data_dict.items()]
return document
def is_list_empty(inList):
if isinstance(inList, list): # Is a list
return all(map(is_list_empty, inList))
return False # Not a list
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.