text
stringlengths 213
32.3k
|
---|
import logging
from homeassistant.const import (
ATTR_ATTRIBUTION,
ATTR_LATITUDE,
ATTR_LONGITUDE,
CONF_SHOW_ON_MAP,
)
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity
from . import (
DATA_LUFTDATEN,
DATA_LUFTDATEN_CLIENT,
DEFAULT_ATTRIBUTION,
DOMAIN,
SENSORS,
TOPIC_UPDATE,
)
from .const import ATTR_SENSOR_ID
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up a Luftdaten sensor based on a config entry."""
luftdaten = hass.data[DOMAIN][DATA_LUFTDATEN_CLIENT][entry.entry_id]
sensors = []
for sensor_type in luftdaten.sensor_conditions:
try:
name, icon, unit = SENSORS[sensor_type]
except KeyError:
_LOGGER.debug("Unknown sensor value type: %s", sensor_type)
continue
sensors.append(
LuftdatenSensor(
luftdaten, sensor_type, name, icon, unit, entry.data[CONF_SHOW_ON_MAP]
)
)
async_add_entities(sensors, True)
class LuftdatenSensor(Entity):
"""Implementation of a Luftdaten sensor."""
def __init__(self, luftdaten, sensor_type, name, icon, unit, show):
"""Initialize the Luftdaten sensor."""
self._async_unsub_dispatcher_connect = None
self.luftdaten = luftdaten
self._icon = icon
self._name = name
self._data = None
self.sensor_type = sensor_type
self._unit_of_measurement = unit
self._show_on_map = show
self._attrs = {}
@property
def icon(self):
"""Return the icon."""
return self._icon
@property
def state(self):
"""Return the state of the device."""
if self._data is not None:
return self._data[self.sensor_type]
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
@property
def should_poll(self):
"""Disable polling."""
return False
@property
def unique_id(self) -> str:
"""Return a unique, friendly identifier for this entity."""
if self._data is not None:
return f"{self._data['sensor_id']}_{self.sensor_type}"
@property
def device_state_attributes(self):
"""Return the state attributes."""
self._attrs[ATTR_ATTRIBUTION] = DEFAULT_ATTRIBUTION
if self._data is not None:
self._attrs[ATTR_SENSOR_ID] = self._data["sensor_id"]
on_map = ATTR_LATITUDE, ATTR_LONGITUDE
no_map = "lat", "long"
lat_format, lon_format = on_map if self._show_on_map else no_map
try:
self._attrs[lon_format] = self._data["longitude"]
self._attrs[lat_format] = self._data["latitude"]
return self._attrs
except KeyError:
return
async def async_added_to_hass(self):
"""Register callbacks."""
@callback
def update():
"""Update the state."""
self.async_schedule_update_ha_state(True)
self._async_unsub_dispatcher_connect = async_dispatcher_connect(
self.hass, TOPIC_UPDATE, update
)
async def async_will_remove_from_hass(self):
"""Disconnect dispatcher listener when removed."""
if self._async_unsub_dispatcher_connect:
self._async_unsub_dispatcher_connect()
async def async_update(self):
"""Get the latest data and update the state."""
try:
self._data = self.luftdaten.data[DATA_LUFTDATEN]
except KeyError:
return
|
import gzip
import lzma
import bz2
import io
import builtins
WRITE_MODE = "wt"
class ReusableFile(object):
"""
Class which emulates the builtin file except that calling iter() on it will return separate
iterators on different file handlers (which are automatically closed when iteration stops). This
is useful for allowing a file object to be iterated over multiple times while keep evaluation
lazy.
"""
# pylint: disable=too-many-instance-attributes
def __init__(
self,
path,
delimiter=None,
mode="r",
buffering=-1,
encoding=None,
errors=None,
newline=None,
):
"""
Constructor arguments are passed directly to builtins.open
:param path: passed to open
:param delimiter: passed to open
:param mode: passed to open
:param buffering: passed to open
:param encoding: passed to open
:param errors: passed to open
:param newline: passed to open
:return: ReusableFile from the arguments
"""
self.path = path
self.delimiter = delimiter
self.mode = mode
self.buffering = buffering
self.encoding = encoding
self.errors = errors
self.newline = newline
def __iter__(self):
"""
Returns a new iterator over the file using the arguments from the constructor. Each call
to __iter__ returns a new iterator independent of all others
:return: iterator over file
"""
# pylint: disable=no-member
with builtins.open(
self.path,
mode=self.mode,
buffering=self.buffering,
encoding=self.encoding,
errors=self.errors,
newline=self.newline,
) as file_content:
for line in file_content:
yield line
def read(self):
# pylint: disable=no-member
with builtins.open(
self.path,
mode=self.mode,
buffering=self.buffering,
encoding=self.encoding,
errors=self.errors,
newline=self.newline,
) as file_content:
return file_content.read()
class CompressedFile(ReusableFile):
magic_bytes = None
# pylint: disable=too-many-instance-attributes
def __init__(
self,
path,
delimiter=None,
mode="rt",
buffering=-1,
compresslevel=9,
encoding=None,
errors=None,
newline=None,
):
super(CompressedFile, self).__init__(
path,
delimiter=delimiter,
mode=mode,
buffering=buffering,
encoding=encoding,
errors=errors,
newline=newline,
)
self.compresslevel = compresslevel
@classmethod
def is_compressed(cls, data):
return data.startswith(cls.magic_bytes)
class GZFile(CompressedFile):
magic_bytes = b"\x1f\x8b\x08"
# pylint: disable=too-many-instance-attributes
def __init__(
self,
path,
delimiter=None,
mode="rt",
buffering=-1,
compresslevel=9,
encoding=None,
errors=None,
newline=None,
):
super(GZFile, self).__init__(
path,
delimiter=delimiter,
mode=mode,
buffering=buffering,
compresslevel=compresslevel,
encoding=encoding,
errors=errors,
newline=newline,
)
def __iter__(self):
if "t" in self.mode:
with gzip.GzipFile(self.path, compresslevel=self.compresslevel) as gz_file:
gz_file.read1 = gz_file.read
with io.TextIOWrapper(
gz_file,
encoding=self.encoding,
errors=self.errors,
newline=self.newline,
) as file_content:
for line in file_content:
yield line
else:
with gzip.open(
self.path, mode=self.mode, compresslevel=self.compresslevel
) as file_content:
for line in file_content:
yield line
def read(self):
with gzip.GzipFile(self.path, compresslevel=self.compresslevel) as gz_file:
gz_file.read1 = gz_file.read
with io.TextIOWrapper(
gz_file,
encoding=self.encoding,
errors=self.errors,
newline=self.newline,
) as file_content:
return file_content.read()
class BZ2File(CompressedFile):
magic_bytes = b"\x42\x5a\x68"
# pylint: disable=too-many-instance-attributes
def __init__(
self,
path,
delimiter=None,
mode="rt",
buffering=-1,
compresslevel=9,
encoding=None,
errors=None,
newline=None,
):
super(BZ2File, self).__init__(
path,
delimiter=delimiter,
mode=mode,
buffering=buffering,
compresslevel=compresslevel,
encoding=encoding,
errors=errors,
newline=newline,
)
def __iter__(self):
with bz2.open(
self.path,
mode=self.mode,
compresslevel=self.compresslevel,
encoding=self.encoding,
errors=self.errors,
newline=self.newline,
) as file_content:
for line in file_content:
yield line
def read(self):
with bz2.open(
self.path,
mode=self.mode,
compresslevel=self.compresslevel,
encoding=self.encoding,
errors=self.errors,
newline=self.newline,
) as file_content:
return file_content.read()
class XZFile(CompressedFile):
magic_bytes = b"\xfd\x37\x7a\x58\x5a\x00"
# pylint: disable=too-many-instance-attributes
def __init__(
self,
path,
delimiter=None,
mode="rt",
buffering=-1,
compresslevel=9,
encoding=None,
errors=None,
newline=None,
check=-1,
preset=None,
filters=None,
format=None,
):
super(XZFile, self).__init__(
path,
delimiter=delimiter,
mode=mode,
buffering=buffering,
compresslevel=compresslevel,
encoding=encoding,
errors=errors,
newline=newline,
)
self.check = check
self.preset = preset
self.format = format
self.filters = filters
def __iter__(self):
with lzma.open(
self.path,
mode=self.mode,
format=self.format,
check=self.check,
preset=self.preset,
filters=self.filters,
encoding=self.encoding,
errors=self.errors,
newline=self.newline,
) as file_content:
for line in file_content:
yield line
def read(self):
with lzma.open(
self.path,
mode=self.mode,
format=self.format,
check=self.check,
preset=self.preset,
filters=self.filters,
encoding=self.encoding,
errors=self.errors,
newline=self.newline,
) as file_content:
return file_content.read()
COMPRESSION_CLASSES = [GZFile, BZ2File, XZFile]
N_COMPRESSION_CHECK_BYTES = max(len(cls.magic_bytes) for cls in COMPRESSION_CLASSES)
def get_read_function(filename, disable_compression):
if disable_compression:
return ReusableFile
else:
with open(filename, "rb") as f:
start_bytes = f.read(N_COMPRESSION_CHECK_BYTES)
for cls in COMPRESSION_CLASSES:
if cls.is_compressed(start_bytes):
return cls
return ReusableFile
def universal_write_open(
path,
mode,
buffering=-1,
encoding=None,
errors=None,
newline=None,
compresslevel=9,
format=None,
check=-1,
preset=None,
filters=None,
compression=None,
):
# pylint: disable=unexpected-keyword-arg,no-member
if compression is None:
return builtins.open(
path,
mode=mode,
buffering=buffering,
encoding=encoding,
errors=errors,
newline=newline,
)
elif compression in ("gz", "gzip"):
return gzip.open(
path,
mode=mode,
compresslevel=compresslevel,
errors=errors,
newline=newline,
encoding=encoding,
)
elif compression in ("lzma", "xz"):
return lzma.open(
path,
mode=mode,
format=format,
check=check,
preset=preset,
filters=filters,
encoding=encoding,
errors=errors,
newline=newline,
)
elif compression == "bz2":
return bz2.open(
path,
mode=mode,
compresslevel=compresslevel,
encoding=encoding,
errors=errors,
newline=newline,
)
else:
raise ValueError(
"compression must be None, gz, gzip, lzma, or xz and was {0}".format(
compression
)
)
|
import aiounifi
from homeassistant import data_entry_flow
from homeassistant.components.unifi.const import (
CONF_ALLOW_BANDWIDTH_SENSORS,
CONF_ALLOW_UPTIME_SENSORS,
CONF_BLOCK_CLIENT,
CONF_CONTROLLER,
CONF_DETECTION_TIME,
CONF_IGNORE_WIRED_BUG,
CONF_POE_CLIENTS,
CONF_SITE_ID,
CONF_SSID_FILTER,
CONF_TRACK_CLIENTS,
CONF_TRACK_DEVICES,
CONF_TRACK_WIRED_CLIENTS,
DOMAIN as UNIFI_DOMAIN,
)
from homeassistant.const import (
CONF_HOST,
CONF_PASSWORD,
CONF_PORT,
CONF_USERNAME,
CONF_VERIFY_SSL,
CONTENT_TYPE_JSON,
)
from .test_controller import setup_unifi_integration
from tests.async_mock import patch
from tests.common import MockConfigEntry
CLIENTS = [{"mac": "00:00:00:00:00:01"}]
DEVICES = [
{
"board_rev": 21,
"device_id": "mock-id",
"ip": "10.0.1.1",
"last_seen": 0,
"mac": "00:00:00:00:01:01",
"model": "U7PG2",
"name": "access_point",
"state": 1,
"type": "uap",
"version": "4.0.80.10875",
"wlan_overrides": [
{
"name": "SSID 3",
"radio": "na",
"radio_name": "wifi1",
"wlan_id": "012345678910111213141516",
},
{
"name": "",
"radio": "na",
"radio_name": "wifi1",
"wlan_id": "012345678910111213141516",
},
{
"radio": "na",
"radio_name": "wifi1",
"wlan_id": "012345678910111213141516",
},
],
}
]
WLANS = [
{"name": "SSID 1"},
{"name": "SSID 2", "name_combine_enabled": False, "name_combine_suffix": "_IOT"},
]
async def test_flow_works(hass, aioclient_mock, mock_discovery):
"""Test config flow."""
mock_discovery.return_value = "1"
result = await hass.config_entries.flow.async_init(
UNIFI_DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
assert result["data_schema"]({CONF_USERNAME: "", CONF_PASSWORD: ""}) == {
CONF_HOST: "unifi",
CONF_USERNAME: "",
CONF_PASSWORD: "",
CONF_PORT: 8443,
CONF_VERIFY_SSL: False,
}
aioclient_mock.get("https://1.2.3.4:1234", status=302)
aioclient_mock.post(
"https://1.2.3.4:1234/api/login",
json={"data": "login successful", "meta": {"rc": "ok"}},
headers={"content-type": CONTENT_TYPE_JSON},
)
aioclient_mock.get(
"https://1.2.3.4:1234/api/self/sites",
json={
"data": [{"desc": "Site name", "name": "site_id", "role": "admin"}],
"meta": {"rc": "ok"},
},
headers={"content-type": CONTENT_TYPE_JSON},
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={
CONF_HOST: "1.2.3.4",
CONF_USERNAME: "username",
CONF_PASSWORD: "password",
CONF_PORT: 1234,
CONF_VERIFY_SSL: True,
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "Site name"
assert result["data"] == {
CONF_CONTROLLER: {
CONF_HOST: "1.2.3.4",
CONF_USERNAME: "username",
CONF_PASSWORD: "password",
CONF_PORT: 1234,
CONF_SITE_ID: "site_id",
CONF_VERIFY_SSL: True,
}
}
async def test_flow_works_multiple_sites(hass, aioclient_mock):
"""Test config flow works when finding multiple sites."""
result = await hass.config_entries.flow.async_init(
UNIFI_DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
aioclient_mock.get("https://1.2.3.4:1234", status=302)
aioclient_mock.post(
"https://1.2.3.4:1234/api/login",
json={"data": "login successful", "meta": {"rc": "ok"}},
headers={"content-type": CONTENT_TYPE_JSON},
)
aioclient_mock.get(
"https://1.2.3.4:1234/api/self/sites",
json={
"data": [
{"name": "default", "role": "admin", "desc": "site name"},
{"name": "site2", "role": "admin", "desc": "site2 name"},
],
"meta": {"rc": "ok"},
},
headers={"content-type": CONTENT_TYPE_JSON},
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={
CONF_HOST: "1.2.3.4",
CONF_USERNAME: "username",
CONF_PASSWORD: "password",
CONF_PORT: 1234,
CONF_VERIFY_SSL: True,
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "site"
assert result["data_schema"]({"site": "site name"})
assert result["data_schema"]({"site": "site2 name"})
async def test_flow_fails_site_already_configured(hass, aioclient_mock):
"""Test config flow."""
entry = MockConfigEntry(
domain=UNIFI_DOMAIN, data={"controller": {"host": "1.2.3.4", "site": "site_id"}}
)
entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
UNIFI_DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
aioclient_mock.get("https://1.2.3.4:1234", status=302)
aioclient_mock.post(
"https://1.2.3.4:1234/api/login",
json={"data": "login successful", "meta": {"rc": "ok"}},
headers={"content-type": CONTENT_TYPE_JSON},
)
aioclient_mock.get(
"https://1.2.3.4:1234/api/self/sites",
json={
"data": [{"desc": "Site name", "name": "site_id", "role": "admin"}],
"meta": {"rc": "ok"},
},
headers={"content-type": CONTENT_TYPE_JSON},
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={
CONF_HOST: "1.2.3.4",
CONF_USERNAME: "username",
CONF_PASSWORD: "password",
CONF_PORT: 1234,
CONF_VERIFY_SSL: True,
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_flow_fails_user_credentials_faulty(hass, aioclient_mock):
"""Test config flow."""
result = await hass.config_entries.flow.async_init(
UNIFI_DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
aioclient_mock.get("https://1.2.3.4:1234", status=302)
with patch("aiounifi.Controller.login", side_effect=aiounifi.errors.Unauthorized):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={
CONF_HOST: "1.2.3.4",
CONF_USERNAME: "username",
CONF_PASSWORD: "password",
CONF_PORT: 1234,
CONF_VERIFY_SSL: True,
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "faulty_credentials"}
async def test_flow_fails_controller_unavailable(hass, aioclient_mock):
"""Test config flow."""
result = await hass.config_entries.flow.async_init(
UNIFI_DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
aioclient_mock.get("https://1.2.3.4:1234", status=302)
with patch("aiounifi.Controller.login", side_effect=aiounifi.errors.RequestError):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={
CONF_HOST: "1.2.3.4",
CONF_USERNAME: "username",
CONF_PASSWORD: "password",
CONF_PORT: 1234,
CONF_VERIFY_SSL: True,
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "service_unavailable"}
async def test_flow_fails_unknown_problem(hass, aioclient_mock):
"""Test config flow."""
result = await hass.config_entries.flow.async_init(
UNIFI_DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
aioclient_mock.get("https://1.2.3.4:1234", status=302)
with patch("aiounifi.Controller.login", side_effect=Exception):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={
CONF_HOST: "1.2.3.4",
CONF_USERNAME: "username",
CONF_PASSWORD: "password",
CONF_PORT: 1234,
CONF_VERIFY_SSL: True,
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
async def test_advanced_option_flow(hass):
"""Test advanced config flow options."""
controller = await setup_unifi_integration(
hass, clients_response=CLIENTS, devices_response=DEVICES, wlans_response=WLANS
)
result = await hass.config_entries.options.async_init(
controller.config_entry.entry_id, context={"show_advanced_options": True}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "device_tracker"
assert set(
result["data_schema"].schema[CONF_SSID_FILTER].options.keys()
).intersection(("SSID 1", "SSID 2", "SSID 2_IOT", "SSID 3"))
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={
CONF_TRACK_CLIENTS: False,
CONF_TRACK_WIRED_CLIENTS: False,
CONF_TRACK_DEVICES: False,
CONF_SSID_FILTER: ["SSID 1", "SSID 2_IOT", "SSID 3"],
CONF_DETECTION_TIME: 100,
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "client_control"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={CONF_BLOCK_CLIENT: [CLIENTS[0]["mac"]], CONF_POE_CLIENTS: False},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "statistics_sensors"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={
CONF_ALLOW_BANDWIDTH_SENSORS: True,
CONF_ALLOW_UPTIME_SENSORS: True,
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["data"] == {
CONF_TRACK_CLIENTS: False,
CONF_TRACK_WIRED_CLIENTS: False,
CONF_TRACK_DEVICES: False,
CONF_SSID_FILTER: ["SSID 1", "SSID 2_IOT", "SSID 3"],
CONF_DETECTION_TIME: 100,
CONF_IGNORE_WIRED_BUG: False,
CONF_POE_CLIENTS: False,
CONF_BLOCK_CLIENT: [CLIENTS[0]["mac"]],
CONF_ALLOW_BANDWIDTH_SENSORS: True,
CONF_ALLOW_UPTIME_SENSORS: True,
}
async def test_simple_option_flow(hass):
"""Test simple config flow options."""
controller = await setup_unifi_integration(
hass, clients_response=CLIENTS, wlans_response=WLANS
)
result = await hass.config_entries.options.async_init(
controller.config_entry.entry_id, context={"show_advanced_options": False}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "simple_options"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={
CONF_TRACK_CLIENTS: False,
CONF_TRACK_DEVICES: False,
CONF_BLOCK_CLIENT: [CLIENTS[0]["mac"]],
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["data"] == {
CONF_TRACK_CLIENTS: False,
CONF_TRACK_DEVICES: False,
CONF_BLOCK_CLIENT: [CLIENTS[0]["mac"]],
}
|
import pytest
from PyQt5.QtCore import QUrl
from qutebrowser.misc import httpclient, pastebin
@pytest.fixture
def pbclient(stubs):
http_stub = stubs.HTTPPostStub()
client = pastebin.PastebinClient(http_stub)
return client
def test_constructor(qapp):
http_client = httpclient.HTTPClient()
pastebin.PastebinClient(http_client)
@pytest.mark.parametrize('data', [
{
"name": "XYZ",
"title": "hello world",
"text": "xyz. 123 \n 172ANB",
"reply": "abc",
"apikey": "ihatespam",
},
{
"name": "the name",
"title": "the title",
"text": "some Text",
"reply": "some parent",
"apikey": "ihatespam",
}
])
def test_paste_with_parent(data, pbclient):
http_stub = pbclient._client
pbclient.paste(data["name"], data["title"], data["text"], data["reply"])
assert http_stub.data == data
assert http_stub.url == QUrl('https://crashes.qutebrowser.org/api/create')
@pytest.mark.parametrize('data', [
{
"name": "XYZ",
"title": "hello world",
"text": "xyz. 123 \n 172ANB",
"apikey": "ihatespam",
},
{
"name": "the name",
"title": "the title",
"text": "some Text",
"apikey": "ihatespam",
}
])
def test_paste_without_parent(data, pbclient):
http_stub = pbclient._client
pbclient.paste(data["name"], data["title"], data["text"])
assert pbclient._client.data == data
assert http_stub.url == QUrl('https://crashes.qutebrowser.org/api/create')
def test_paste_private(pbclient):
data = {
"name": "the name",
"title": "the title",
"text": "some Text",
"apikey": "ihatespam",
"private": "1",
}
http_stub = pbclient._client
pbclient.paste(data["name"], data["title"], data["text"], private=True)
assert pbclient._client.data == data
assert http_stub.url == QUrl('https://crashes.qutebrowser.org/api/create')
@pytest.mark.parametrize('http', [
"http://paste.the-compiler.org/view/ges83nt3",
"http://paste.the-compiler.org/view/3gjnwg4"
])
def test_on_client_success(http, pbclient, qtbot):
with qtbot.assertNotEmitted(pbclient.error):
with qtbot.waitSignal(pbclient.success):
pbclient._client.success.emit(http)
@pytest.mark.parametrize('http', [
"http invalid",
"http:/invalid.org"
"http//invalid.com"
])
def test_client_success_invalid_http(http, pbclient, qtbot):
with qtbot.assertNotEmitted(pbclient.success):
with qtbot.waitSignal(pbclient.error):
pbclient._client.success.emit(http)
def test_client_error(pbclient, qtbot):
with qtbot.assertNotEmitted(pbclient.success):
with qtbot.waitSignal(pbclient.error):
pbclient._client.error.emit("msg")
|
import unittest
import pandas as pd
class TestPandas(unittest.TestCase):
def test_read_csv(self):
data = pd.read_csv("/input/tests/data/train.csv")
self.assertEqual(100, len(data.index))
def test_read_feather(self):
data = pd.read_feather("/input/tests/data/feather-0_3_1.feather")
self.assertEqual(10, data.size)
|
import logging
import voluptuous as vol
from homeassistant.components.media_player import MediaPlayerEntity
from homeassistant.components.media_player.const import (
SUPPORT_BROWSE_MEDIA,
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_PLAY_MEDIA,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_SEEK,
SUPPORT_SHUFFLE_SET,
SUPPORT_STOP,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
SUPPORT_VOLUME_STEP,
)
from homeassistant.const import (
DEVICE_DEFAULT_NAME,
STATE_IDLE,
STATE_OFF,
STATE_PAUSED,
STATE_PLAYING,
)
from homeassistant.core import callback
from homeassistant.helpers import config_validation as cv, entity_platform
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from homeassistant.util import convert
from homeassistant.util.dt import utcnow
from .const import DOMAIN
from .media_browser import browse_media
SUPPORT_ROON = (
SUPPORT_BROWSE_MEDIA
| SUPPORT_PAUSE
| SUPPORT_VOLUME_SET
| SUPPORT_STOP
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_NEXT_TRACK
| SUPPORT_SHUFFLE_SET
| SUPPORT_SEEK
| SUPPORT_TURN_ON
| SUPPORT_TURN_OFF
| SUPPORT_VOLUME_MUTE
| SUPPORT_PLAY
| SUPPORT_PLAY_MEDIA
| SUPPORT_VOLUME_STEP
)
_LOGGER = logging.getLogger(__name__)
SERVICE_JOIN = "join"
SERVICE_UNJOIN = "unjoin"
SERVICE_TRANSFER = "transfer"
ATTR_JOIN = "join_ids"
ATTR_UNJOIN = "unjoin_ids"
ATTR_TRANSFER = "transfer_id"
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Roon MediaPlayer from Config Entry."""
roon_server = hass.data[DOMAIN][config_entry.entry_id]
media_players = set()
# Register entity services
platform = entity_platform.current_platform.get()
platform.async_register_entity_service(
SERVICE_JOIN,
{vol.Required(ATTR_JOIN): vol.All(cv.ensure_list, [cv.entity_id])},
"join",
)
platform.async_register_entity_service(
SERVICE_UNJOIN,
{vol.Optional(ATTR_UNJOIN): vol.All(cv.ensure_list, [cv.entity_id])},
"unjoin",
)
platform.async_register_entity_service(
SERVICE_TRANSFER,
{vol.Required(ATTR_TRANSFER): cv.entity_id},
"async_transfer",
)
@callback
def async_update_media_player(player_data):
"""Add or update Roon MediaPlayer."""
dev_id = player_data["dev_id"]
if dev_id not in media_players:
# new player!
media_player = RoonDevice(roon_server, player_data)
media_players.add(dev_id)
async_add_entities([media_player])
else:
# update existing player
async_dispatcher_send(
hass, f"room_media_player_update_{dev_id}", player_data
)
# start listening for players to be added or changed by the server component
async_dispatcher_connect(hass, "roon_media_player", async_update_media_player)
class RoonDevice(MediaPlayerEntity):
"""Representation of an Roon device."""
def __init__(self, server, player_data):
"""Initialize Roon device object."""
self._remove_signal_status = None
self._server = server
self._available = True
self._last_position_update = None
self._supports_standby = False
self._state = STATE_IDLE
self._last_playlist = None
self._last_media = None
self._unique_id = None
self._zone_id = None
self._output_id = None
self._name = DEVICE_DEFAULT_NAME
self._media_title = None
self._media_album_name = None
self._media_artist = None
self._media_position = 0
self._media_duration = 0
self._is_volume_muted = False
self._volume_step = 0
self._shuffle = False
self._media_image_url = None
self._volume_level = 0
self.update_data(player_data)
async def async_added_to_hass(self):
"""Register callback."""
self.async_on_remove(
async_dispatcher_connect(
self.hass,
f"room_media_player_update_{self.unique_id}",
self.async_update_callback,
)
)
self._server.add_player_id(self.entity_id, self.name)
@callback
def async_update_callback(self, player_data):
"""Handle device updates."""
self.update_data(player_data)
self.async_write_ha_state()
@property
def available(self):
"""Return True if entity is available."""
return self._available
@property
def supported_features(self):
"""Flag media player features that are supported."""
return SUPPORT_ROON
@property
def device_info(self):
"""Return the device info."""
dev_model = "player"
if self.player_data.get("source_controls"):
dev_model = self.player_data["source_controls"][0].get("display_name")
return {
"identifiers": {(DOMAIN, self.unique_id)},
"name": self.name,
"manufacturer": "RoonLabs",
"model": dev_model,
"via_hub": (DOMAIN, self._server.host),
}
def update_data(self, player_data=None):
"""Update session object."""
if player_data:
self.player_data = player_data
if not self.player_data["is_available"]:
# this player was removed
self._available = False
self._state = STATE_OFF
else:
self._available = True
# determine player state
self.update_state()
if self.state == STATE_PLAYING:
self._last_position_update = utcnow()
@classmethod
def _parse_volume(cls, player_data):
"""Parse volume data to determine volume levels and mute state."""
volume = {
"level": 0,
"step": 0,
"muted": False,
}
try:
volume_data = player_data["volume"]
volume_muted = volume_data["is_muted"]
volume_step = convert(volume_data["step"], int, 0)
if volume_data["type"] == "db":
level = convert(volume_data["value"], float, 0.0) / 80 * 100 + 100
else:
level = convert(volume_data["value"], float, 0.0)
volume_level = convert(level, int, 0) / 100
except KeyError:
# catch KeyError
pass
else:
volume["muted"] = volume_muted
volume["step"] = volume_step
volume["level"] = volume_level
return volume
def _parse_now_playing(self, player_data):
"""Parse now playing data to determine title, artist, position, duration and artwork."""
now_playing = {
"title": None,
"artist": None,
"album": None,
"position": 0,
"duration": 0,
"image": None,
}
now_playing_data = None
try:
now_playing_data = player_data["now_playing"]
media_title = now_playing_data["three_line"]["line1"]
media_artist = now_playing_data["three_line"]["line2"]
media_album_name = now_playing_data["three_line"]["line3"]
media_position = convert(now_playing_data["seek_position"], int, 0)
media_duration = convert(now_playing_data.get("length"), int, 0)
image_id = now_playing_data.get("image_key")
except KeyError:
# catch KeyError
pass
else:
now_playing["title"] = media_title
now_playing["artist"] = media_artist
now_playing["album"] = media_album_name
now_playing["position"] = media_position
now_playing["duration"] = media_duration
if image_id:
now_playing["image"] = self._server.roonapi.get_image(image_id)
return now_playing
def update_state(self):
"""Update the power state and player state."""
new_state = ""
# power state from source control (if supported)
if "source_controls" in self.player_data:
for source in self.player_data["source_controls"]:
if source["supports_standby"] and source["status"] != "indeterminate":
self._supports_standby = True
if source["status"] in ["standby", "deselected"]:
new_state = STATE_OFF
break
# determine player state
if not new_state:
if self.player_data["state"] == "playing":
new_state = STATE_PLAYING
elif self.player_data["state"] == "loading":
new_state = STATE_PLAYING
elif self.player_data["state"] == "stopped":
new_state = STATE_IDLE
elif self.player_data["state"] == "paused":
new_state = STATE_PAUSED
else:
new_state = STATE_IDLE
self._state = new_state
self._unique_id = self.player_data["dev_id"]
self._zone_id = self.player_data["zone_id"]
self._output_id = self.player_data["output_id"]
self._shuffle = self.player_data["settings"]["shuffle"]
self._name = self.player_data["display_name"]
volume = RoonDevice._parse_volume(self.player_data)
self._is_volume_muted = volume["muted"]
self._volume_step = volume["step"]
self._is_volume_muted = volume["muted"]
self._volume_level = volume["level"]
now_playing = self._parse_now_playing(self.player_data)
self._media_title = now_playing["title"]
self._media_artist = now_playing["artist"]
self._media_album_name = now_playing["album"]
self._media_position = now_playing["position"]
self._media_duration = now_playing["duration"]
self._media_image_url = now_playing["image"]
@property
def media_position_updated_at(self):
"""When was the position of the current playing media valid."""
# Returns value from homeassistant.util.dt.utcnow().
return self._last_position_update
@property
def unique_id(self):
"""Return the id of this roon client."""
return self._unique_id
@property
def should_poll(self):
"""Return True if entity has to be polled for state."""
return False
@property
def zone_id(self):
"""Return current session Id."""
return self._zone_id
@property
def output_id(self):
"""Return current session Id."""
return self._output_id
@property
def name(self):
"""Return device name."""
return self._name
@property
def media_title(self):
"""Return title currently playing."""
return self._media_title
@property
def media_album_name(self):
"""Album name of current playing media (Music track only)."""
return self._media_album_name
@property
def media_artist(self):
"""Artist of current playing media (Music track only)."""
return self._media_artist
@property
def media_album_artist(self):
"""Album artist of current playing media (Music track only)."""
return self._media_artist
@property
def media_playlist(self):
"""Title of Playlist currently playing."""
return self._last_playlist
@property
def media_image_url(self):
"""Image url of current playing media."""
return self._media_image_url
@property
def media_position(self):
"""Return position currently playing."""
return self._media_position
@property
def media_duration(self):
"""Return total runtime length."""
return self._media_duration
@property
def volume_level(self):
"""Return current volume level."""
return self._volume_level
@property
def is_volume_muted(self):
"""Return mute state."""
return self._is_volume_muted
@property
def volume_step(self):
""".Return volume step size."""
return self._volume_step
@property
def supports_standby(self):
"""Return power state of source controls."""
return self._supports_standby
@property
def state(self):
"""Return current playstate of the device."""
return self._state
@property
def shuffle(self):
"""Boolean if shuffle is enabled."""
return self._shuffle
def media_play(self):
"""Send play command to device."""
self._server.roonapi.playback_control(self.output_id, "play")
def media_pause(self):
"""Send pause command to device."""
self._server.roonapi.playback_control(self.output_id, "pause")
def media_play_pause(self):
"""Toggle play command to device."""
self._server.roonapi.playback_control(self.output_id, "playpause")
def media_stop(self):
"""Send stop command to device."""
self._server.roonapi.playback_control(self.output_id, "stop")
def media_next_track(self):
"""Send next track command to device."""
self._server.roonapi.playback_control(self.output_id, "next")
def media_previous_track(self):
"""Send previous track command to device."""
self._server.roonapi.playback_control(self.output_id, "previous")
def media_seek(self, position):
"""Send seek command to device."""
self._server.roonapi.seek(self.output_id, position)
# Seek doesn't cause an async update - so force one
self._media_position = position
self.schedule_update_ha_state()
def set_volume_level(self, volume):
"""Send new volume_level to device."""
volume = int(volume * 100)
self._server.roonapi.change_volume(self.output_id, volume)
def mute_volume(self, mute=True):
"""Send mute/unmute to device."""
self._server.roonapi.mute(self.output_id, mute)
def volume_up(self):
"""Send new volume_level to device."""
self._server.roonapi.change_volume(self.output_id, 3, "relative")
def volume_down(self):
"""Send new volume_level to device."""
self._server.roonapi.change_volume(self.output_id, -3, "relative")
def turn_on(self):
"""Turn on device (if supported)."""
if not (self.supports_standby and "source_controls" in self.player_data):
self.media_play()
return
for source in self.player_data["source_controls"]:
if source["supports_standby"] and source["status"] != "indeterminate":
self._server.roonapi.convenience_switch(
self.output_id, source["control_key"]
)
return
def turn_off(self):
"""Turn off device (if supported)."""
if not (self.supports_standby and "source_controls" in self.player_data):
self.media_stop()
return
for source in self.player_data["source_controls"]:
if source["supports_standby"] and not source["status"] == "indeterminate":
self._server.roonapi.standby(self.output_id, source["control_key"])
return
def set_shuffle(self, shuffle):
"""Set shuffle state."""
self._server.roonapi.shuffle(self.output_id, shuffle)
def play_media(self, media_type, media_id, **kwargs):
"""Send the play_media command to the media player."""
# Roon itself doesn't support playback of media by filename/url so this a bit of a workaround.
media_type = media_type.lower()
if media_type == "radio":
if self._server.roonapi.play_radio(self.zone_id, media_id):
self._last_playlist = media_id
self._last_media = media_id
elif media_type == "playlist":
if self._server.roonapi.play_playlist(
self.zone_id, media_id, shuffle=False
):
self._last_playlist = media_id
elif media_type == "shuffleplaylist":
if self._server.roonapi.play_playlist(self.zone_id, media_id, shuffle=True):
self._last_playlist = media_id
elif media_type == "queueplaylist":
self._server.roonapi.queue_playlist(self.zone_id, media_id)
elif media_type == "genre":
self._server.roonapi.play_genre(self.zone_id, media_id)
elif media_type in ("library", "track"):
self._server.roonapi.play_id(self.zone_id, media_id)
else:
_LOGGER.error(
"Playback requested of unsupported type: %s --> %s",
media_type,
media_id,
)
def join(self, join_ids):
"""Add another Roon player to this player's join group."""
zone_data = self._server.roonapi.zone_by_output_id(self._output_id)
if zone_data is None:
_LOGGER.error("No zone data for %s", self.name)
return
sync_available = {}
for zone in self._server.zones.values():
for output in zone["outputs"]:
if (
zone["display_name"] != self.name
and output["output_id"]
in self.player_data["can_group_with_output_ids"]
and zone["display_name"] not in sync_available
):
sync_available[zone["display_name"]] = output["output_id"]
names = []
for entity_id in join_ids:
name = self._server.roon_name(entity_id)
if name is None:
_LOGGER.error("No roon player found for %s", entity_id)
return
if name not in sync_available:
_LOGGER.error(
"Can't join player %s with %s because it's not in the join available list %s",
name,
self.name,
list(sync_available),
)
return
names.append(name)
_LOGGER.debug("Joining %s to %s", names, self.name)
self._server.roonapi.group_outputs(
[self._output_id] + [sync_available[name] for name in names]
)
def unjoin(self, unjoin_ids=None):
"""Remove a Roon player to this player's join group."""
zone_data = self._server.roonapi.zone_by_output_id(self._output_id)
if zone_data is None:
_LOGGER.error("No zone data for %s", self.name)
return
join_group = {
output["display_name"]: output["output_id"]
for output in zone_data["outputs"]
if output["display_name"] != self.name
}
if unjoin_ids is None:
# unjoin everything
names = list(join_group)
else:
names = []
for entity_id in unjoin_ids:
name = self._server.roon_name(entity_id)
if name is None:
_LOGGER.error("No roon player found for %s", entity_id)
return
if name not in join_group:
_LOGGER.error(
"Can't unjoin player %s from %s because it's not in the joined group %s",
name,
self.name,
list(join_group),
)
return
names.append(name)
_LOGGER.debug("Unjoining %s from %s", names, self.name)
self._server.roonapi.ungroup_outputs([join_group[name] for name in names])
async def async_transfer(self, transfer_id):
"""Transfer playback from this roon player to another."""
name = self._server.roon_name(transfer_id)
if name is None:
_LOGGER.error("No roon player found for %s", transfer_id)
return
zone_ids = {
output["display_name"]: output["zone_id"]
for output in self._server.zones.values()
if output["display_name"] != self.name
}
transfer_id = zone_ids.get(name)
if transfer_id is None:
_LOGGER.error(
"Can't transfer from %s to %s because destination is not known %s",
self.name,
transfer_id,
list(zone_ids),
)
_LOGGER.debug("Transferring from %s to %s", self.name, name)
await self.hass.async_add_executor_job(
self._server.roonapi.transfer_zone, self._zone_id, transfer_id
)
async def async_browse_media(self, media_content_type=None, media_content_id=None):
"""Implement the websocket media browsing helper."""
return await self.hass.async_add_executor_job(
browse_media,
self.zone_id,
self._server,
media_content_type,
media_content_id,
)
|
import logging
from uuid import uuid4
from .const import (
API_CONTEXT,
API_DIRECTIVE,
API_ENDPOINT,
API_EVENT,
API_HEADER,
API_PAYLOAD,
API_SCOPE,
)
from .entities import ENTITY_ADAPTERS
from .errors import AlexaInvalidEndpointError
_LOGGER = logging.getLogger(__name__)
class AlexaDirective:
"""An incoming Alexa directive."""
def __init__(self, request):
"""Initialize a directive."""
self._directive = request[API_DIRECTIVE]
self.namespace = self._directive[API_HEADER]["namespace"]
self.name = self._directive[API_HEADER]["name"]
self.payload = self._directive[API_PAYLOAD]
self.has_endpoint = API_ENDPOINT in self._directive
self.entity = self.entity_id = self.endpoint = self.instance = None
def load_entity(self, hass, config):
"""Set attributes related to the entity for this request.
Sets these attributes when self.has_endpoint is True:
- entity
- entity_id
- endpoint
- instance (when header includes instance property)
Behavior when self.has_endpoint is False is undefined.
Will raise AlexaInvalidEndpointError if the endpoint in the request is
malformed or nonexistent.
"""
_endpoint_id = self._directive[API_ENDPOINT]["endpointId"]
self.entity_id = _endpoint_id.replace("#", ".")
self.entity = hass.states.get(self.entity_id)
if not self.entity or not config.should_expose(self.entity_id):
raise AlexaInvalidEndpointError(_endpoint_id)
self.endpoint = ENTITY_ADAPTERS[self.entity.domain](hass, config, self.entity)
if "instance" in self._directive[API_HEADER]:
self.instance = self._directive[API_HEADER]["instance"]
def response(self, name="Response", namespace="Alexa", payload=None):
"""Create an API formatted response.
Async friendly.
"""
response = AlexaResponse(name, namespace, payload)
token = self._directive[API_HEADER].get("correlationToken")
if token:
response.set_correlation_token(token)
if self.has_endpoint:
response.set_endpoint(self._directive[API_ENDPOINT].copy())
return response
def error(
self,
namespace="Alexa",
error_type="INTERNAL_ERROR",
error_message="",
payload=None,
):
"""Create a API formatted error response.
Async friendly.
"""
payload = payload or {}
payload["type"] = error_type
payload["message"] = error_message
_LOGGER.info(
"Request %s/%s error %s: %s",
self._directive[API_HEADER]["namespace"],
self._directive[API_HEADER]["name"],
error_type,
error_message,
)
return self.response(name="ErrorResponse", namespace=namespace, payload=payload)
class AlexaResponse:
"""Class to hold a response."""
def __init__(self, name, namespace, payload=None):
"""Initialize the response."""
payload = payload or {}
self._response = {
API_EVENT: {
API_HEADER: {
"namespace": namespace,
"name": name,
"messageId": str(uuid4()),
"payloadVersion": "3",
},
API_PAYLOAD: payload,
}
}
@property
def name(self):
"""Return the name of this response."""
return self._response[API_EVENT][API_HEADER]["name"]
@property
def namespace(self):
"""Return the namespace of this response."""
return self._response[API_EVENT][API_HEADER]["namespace"]
def set_correlation_token(self, token):
"""Set the correlationToken.
This should normally mirror the value from a request, and is set by
AlexaDirective.response() usually.
"""
self._response[API_EVENT][API_HEADER]["correlationToken"] = token
def set_endpoint_full(self, bearer_token, endpoint_id, cookie=None):
"""Set the endpoint dictionary.
This is used to send proactive messages to Alexa.
"""
self._response[API_EVENT][API_ENDPOINT] = {
API_SCOPE: {"type": "BearerToken", "token": bearer_token}
}
if endpoint_id is not None:
self._response[API_EVENT][API_ENDPOINT]["endpointId"] = endpoint_id
if cookie is not None:
self._response[API_EVENT][API_ENDPOINT]["cookie"] = cookie
def set_endpoint(self, endpoint):
"""Set the endpoint.
This should normally mirror the value from a request, and is set by
AlexaDirective.response() usually.
"""
self._response[API_EVENT][API_ENDPOINT] = endpoint
def _properties(self):
context = self._response.setdefault(API_CONTEXT, {})
return context.setdefault("properties", [])
def add_context_property(self, prop):
"""Add a property to the response context.
The Alexa response includes a list of properties which provides
feedback on how states have changed. For example if a user asks,
"Alexa, set thermostat to 20 degrees", the API expects a response with
the new value of the property, and Alexa will respond to the user
"Thermostat set to 20 degrees".
async_handle_message() will call .merge_context_properties() for every
request automatically, however often handlers will call services to
change state but the effects of those changes are applied
asynchronously. Thus, handlers should call this method to confirm
changes before returning.
"""
self._properties().append(prop)
def merge_context_properties(self, endpoint):
"""Add all properties from given endpoint if not already set.
Handlers should be using .add_context_property().
"""
properties = self._properties()
already_set = {(p["namespace"], p["name"]) for p in properties}
for prop in endpoint.serialize_properties():
if (prop["namespace"], prop["name"]) not in already_set:
self.add_context_property(prop)
def serialize(self):
"""Return response as a JSON-able data structure."""
return self._response
|
import numpy as np
from scipy import linalg
from .. import pick_types
from ..utils import _validate_type, _ensure_int, _check_preload
from ..io import BaseRaw
from ..io.constants import FIFF
from ..epochs import BaseEpochs
from ..evoked import Evoked
from ..bem import fit_sphere_to_headshape
from ..channels.interpolation import _calc_g, _calc_h
def _prepare_G(G, lambda2):
G.flat[::len(G) + 1] += lambda2
# compute the CSD
Gi = linalg.inv(G)
TC = Gi.sum(0)
sgi = np.sum(TC) # compute sum total
return Gi, TC, sgi
def _compute_csd(G_precomputed, H, radius):
"""Compute the CSD."""
n_channels = H.shape[0]
data = np.eye(n_channels)
mu = data.mean(0)
Z = data - mu
Gi, TC, sgi = G_precomputed
Cp2 = np.dot(Gi, Z)
c02 = np.sum(Cp2, axis=0) / sgi
C2 = Cp2 - np.dot(TC[:, np.newaxis], c02[np.newaxis, :])
X = np.dot(C2.T, H).T / radius ** 2
return X
def compute_current_source_density(inst, sphere='auto', lambda2=1e-5,
stiffness=4, n_legendre_terms=50,
copy=True):
"""Get the current source density (CSD) transformation.
Transformation based on spherical spline surface Laplacian
:footcite:`PerrinEtAl1987,PerrinEtAl1989,Cohen2014,KayserTenke2015`.
Parameters
----------
inst : instance of Raw, Epochs or Evoked
The data to be transformed.
sphere : array-like, shape (4,) | str
The sphere, head-model of the form (x, y, z, r) where x, y, z
is the center of the sphere and r is the radius in meters.
Can also be "auto" to use a digitization-based fit.
lambda2 : float
Regularization parameter, produces smoothness. Defaults to 1e-5.
stiffness : float
Stiffness of the spline.
n_legendre_terms : int
Number of Legendre terms to evaluate.
copy : bool
Whether to overwrite instance data or create a copy.
Returns
-------
inst_csd : instance of Raw, Epochs or Evoked
The transformed data. Output type will match input type.
Notes
-----
This function applies an average reference to the data if copy is False.
Do not transform CSD data to source space.
.. versionadded:: 0.20
References
----------
.. footbibliography::
"""
_validate_type(inst, (BaseEpochs, BaseRaw, Evoked), 'inst')
_check_preload(inst, 'Computing CSD')
if inst.info['custom_ref_applied'] == FIFF.FIFFV_MNE_CUSTOM_REF_CSD:
raise ValueError('CSD already applied, should not be reapplied')
inst = inst.copy() if copy else inst
picks = pick_types(inst.info, meg=False, eeg=True, exclude=[])
if any([ch in np.array(inst.ch_names)[picks] for ch in inst.info['bads']]):
raise ValueError('CSD cannot be computed with bad EEG channels. Either'
' drop (inst.drop_channels(inst.info[\'bads\']) '
'or interpolate (`inst.interpolate_bads()`) '
'bad EEG channels.')
if len(picks) == 0:
raise ValueError('No EEG channels found.')
_validate_type(lambda2, 'numeric', 'lambda2')
if not 0 <= lambda2 < 1:
raise ValueError('lambda2 must be between 0 and 1, got %s' % lambda2)
_validate_type(stiffness, 'numeric', 'stiffness')
if stiffness < 0:
raise ValueError('stiffness must be non-negative got %s' % stiffness)
n_legendre_terms = _ensure_int(n_legendre_terms, 'n_legendre_terms')
if n_legendre_terms < 1:
raise ValueError('n_legendre_terms must be greater than 0, '
'got %s' % n_legendre_terms)
if isinstance(sphere, str) and sphere == 'auto':
radius, origin_head, origin_device = fit_sphere_to_headshape(inst.info)
x, y, z = origin_head - origin_device
sphere = (x, y, z, radius)
try:
sphere = np.array(sphere, float)
x, y, z, radius = sphere
except Exception:
raise ValueError(
f'sphere must be "auto" or array-like with shape (4,), '
f'got {sphere}')
_validate_type(x, 'numeric', 'x')
_validate_type(y, 'numeric', 'y')
_validate_type(z, 'numeric', 'z')
_validate_type(radius, 'numeric', 'radius')
if radius <= 0:
raise ValueError('sphere radius must be greater than 0, '
'got %s' % radius)
_validate_type(copy, (bool), 'copy')
pos = np.array([inst.info['chs'][pick]['loc'][:3] for pick in picks])
if not np.isfinite(pos).all() or np.isclose(pos, 0.).all(1).any():
raise ValueError('Zero or infinite position found in chs')
pos -= (x, y, z)
# Project onto a unit sphere to compute the cosine similarity:
pos /= np.linalg.norm(pos, axis=1, keepdims=True)
cos_dist = np.clip(np.dot(pos, pos.T), -1, 1)
# This is equivalent to doing one minus half the squared Euclidean:
# from scipy.spatial.distance import squareform, pdist
# cos_dist = 1 - squareform(pdist(pos, 'sqeuclidean')) / 2.
del pos
G = _calc_g(cos_dist, stiffness=stiffness,
n_legendre_terms=n_legendre_terms)
H = _calc_h(cos_dist, stiffness=stiffness,
n_legendre_terms=n_legendre_terms)
G_precomputed = _prepare_G(G, lambda2)
trans_csd = _compute_csd(G_precomputed=G_precomputed,
H=H, radius=radius)
epochs = [inst._data] if not isinstance(inst, BaseEpochs) else inst._data
for epo in epochs:
epo[picks] = np.dot(trans_csd, epo[picks])
inst.info['custom_ref_applied'] = FIFF.FIFFV_MNE_CUSTOM_REF_CSD
for pick in picks:
inst.info['chs'][pick].update(coil_type=FIFF.FIFFV_COIL_EEG_CSD,
unit=FIFF.FIFF_UNIT_V_M2)
return inst
|
import datetime
import json
import logging
from absl import flags
from perfkitbenchmarker import errors
from perfkitbenchmarker import linux_packages
from perfkitbenchmarker import object_storage_service
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.providers import azure
from perfkitbenchmarker.providers.azure import azure_network
FLAGS = flags.FLAGS
DEFAULT_AZURE_REGION = 'eastus2'
class AzureBlobStorageService(object_storage_service.ObjectStorageService):
"""Interface to Azure Blob Storage.
Relevant documentation:
http://azure.microsoft.com/en-us/documentation/articles/xplat-cli/
"""
def __init__(self):
self.storage_account = None
self.resource_group = None
STORAGE_NAME = azure.CLOUD
def PrepareService(self, location,
existing_storage_account_and_resource_group=None,
try_to_create_storage_account_and_resource_group=False):
"""See base class (without additional args).
TODO(deitz): We should use the same interface across the clouds without
additional arguments.
Args:
location: where to place our data.
existing_storage_account_and_resource_group: An existing storage account
and resource group for reading objects that may have already been
created.
try_to_create_storage_account_and_resource_group: Whether to try to create
the storage account and resource group in case it does not exist yet.
This supports invoking the object_storage_service_benchmark multiple
times on the same bucket name and creating the resource group the
first time. While this defaults to False, if there is no existing
storage account and resource group passed to this function via
existing_storage_account_and_resource_group, then one will be created.
"""
# abs is "Azure Blob Storage"
prefix = 'pkb%sabs' % FLAGS.run_uri
# Maybe extract existing storage account and resource group names
existing_storage_account, existing_resource_group = None, None
if existing_storage_account_and_resource_group:
existing_storage_account, existing_resource_group = \
existing_storage_account_and_resource_group
assert existing_storage_account is not None
assert existing_resource_group is not None
else:
# We don't have an existing storage account or resource group so we better
# create one.
try_to_create_storage_account_and_resource_group = True
storage_account_name = existing_storage_account or prefix + 'storage'
resource_group_name = existing_resource_group or prefix + '-resource-group'
# If we have an existing storage account and resource, we typically would
# not try to create it. If try_to_create_storage_account_and_resource_group
# is True, however, then we do try to create it. In this case, we shouldn't
# raise on a failure since it may already exist.
raise_on_create_failure = not (
existing_storage_account_and_resource_group and
try_to_create_storage_account_and_resource_group)
# We use a separate resource group so that our buckets can optionally stick
# around after PKB runs. This is useful for things like cold reads tests
self.resource_group = \
azure_network.AzureResourceGroup(
resource_group_name,
use_existing=not try_to_create_storage_account_and_resource_group,
timeout_minutes=max(FLAGS.timeout_minutes,
FLAGS.persistent_timeout_minutes),
raise_on_create_failure=raise_on_create_failure)
self.resource_group.Create()
# We use a different Azure storage account than the VM account
# because a) we need to be able to set the storage class
# separately, including using a blob-specific storage account and
# b) this account might be in a different location than any
# VM-related account.
self.storage_account = azure_network.AzureStorageAccount(
FLAGS.azure_storage_type,
location or DEFAULT_AZURE_REGION,
storage_account_name,
kind=FLAGS.azure_blob_account_kind,
resource_group=self.resource_group,
use_existing=not try_to_create_storage_account_and_resource_group,
raise_on_create_failure=raise_on_create_failure)
self.storage_account.Create()
def CleanupService(self):
if hasattr(self, 'storage_account') and self.storage_account:
self.storage_account.Delete()
if hasattr(self, 'resource_group') and self.resource_group:
self.resource_group.Delete()
def MakeBucket(self, bucket, raise_on_failure=True):
_, stderr, ret_code = vm_util.IssueCommand(
[azure.AZURE_PATH, 'storage', 'container', 'create', '--name', bucket] +
self.storage_account.connection_args,
raise_on_failure=False)
if ret_code and raise_on_failure:
raise errors.Benchmarks.BucketCreationError(stderr)
def DeleteBucket(self, bucket):
if not hasattr(self, 'storage_account') or not self.storage_account:
logging.warning(
'storage_account not configured. Skipping DeleteBucket %s', bucket)
return
vm_util.IssueCommand(
[azure.AZURE_PATH, 'storage', 'container', 'delete', '--name', bucket] +
self.storage_account.connection_args,
raise_on_failure=False)
def Copy(self, src_url, dst_url, recursive=False):
"""See base class."""
raise NotImplementedError()
def CopyToBucket(self, src_path, bucket, object_path):
vm_util.IssueCommand(['az', 'storage', 'blob', 'upload',
'--account-name', self.storage_account.name,
'--file', src_path,
'--container', bucket,
'--name', object_path])
def _GenerateDownloadToken(self, bucket, object_path):
blob_store_expiry = datetime.datetime.utcnow() + datetime.timedelta(
days=365)
stdout, _, _ = vm_util.IssueCommand([
'az', 'storage', 'blob', 'generate-sas',
'--account-name', self.storage_account.name,
'--container-name', bucket,
'--name', object_path,
'--expiry', blob_store_expiry.strftime('%Y-%m-%dT%H:%M:%SZ'),
'--permissions', 'r'
])
token = stdout.strip('\n').strip('"')
return token
def MakeRemoteCliDownloadUrl(self, bucket, object_path):
"""See base class."""
token = self._GenerateDownloadToken(bucket, object_path)
url = 'https://{acc}.blob.core.windows.net/{con}/{src}?{tkn}'.format(
acc=self.storage_account.name,
con=bucket,
src=object_path,
tkn=token)
return url
def GenerateCliDownloadFileCommand(self, src_url, dst_url):
"""See base class."""
return 'wget -O {dst_url} "{src_url}"'.format(src_url=src_url,
dst_url=dst_url)
def List(self, bucket):
"""See base class."""
stdout, _, _ = vm_util.IssueCommand([
'az', 'storage', 'blob', 'list', '--container-name', bucket,
'--account-name', self.storage_account.name
])
return [metadata['name'] for metadata in json.loads(str(stdout))]
def ListTopLevelSubfolders(self, bucket):
"""Lists the top level folders (not files) in a bucket.
Each listed item is a full file name, eg. "supplier/supplier.csv", so just
the high level folder name is extracted, and repetitions are eliminated for
when there's multiple files in a folder.
Args:
bucket: Name of the bucket to list the top level subfolders of.
Returns:
A list of top level subfolder names. Can be empty if there are no folders.
"""
unique_folders = set([
obj.split('/')[0].strip()
for obj in self.List(bucket)
if obj and obj.contains('/')
])
return list(unique_folders)
def EmptyBucket(self, bucket):
# Emptying buckets on Azure is hard. We pass for now - this will
# increase our use of storage space, but should not affect the
# benchmark results.
pass
def PrepareVM(self, vm):
vm.Install('azure_cli')
vm.Install('azure_sdk')
vm.Install('azure_credentials')
def CLIUploadDirectory(self, vm, directory, file_names, bucket):
return vm.RemoteCommand(
('time for file in {files}; '
'do azure storage blob upload -q {directory}/$file {bucket} '
'--connection-string {connection_string}; '
'done').format(
files=' '.join(file_names),
directory=directory,
bucket=bucket,
connection_string=self.storage_account.connection_string))
def CLIDownloadBucket(self, vm, bucket, objects, dest):
return vm.RemoteCommand(
('time for object in {objects}; '
'do azure storage blob download {bucket} $object {dest} '
'--connection-string {connection_string}; '
'done').format(
objects=' '.join(objects),
bucket=bucket,
dest=dest,
connection_string=self.storage_account.connection_string))
def Metadata(self, vm):
return {'azure_lib_version':
linux_packages.GetPipPackageVersion(vm, 'azure')}
def APIScriptArgs(self):
return ['--azure_account=%s' % self.storage_account.name,
'--azure_key=%s' % self.storage_account.key]
@classmethod
def APIScriptFiles(cls):
return ['azure_service.py']
|
from django.utils.translation import gettext as _
from django.views.generic.list import ListView
from zinnia.models.entry import Entry
from zinnia.settings import PAGINATION
from zinnia.views.mixins.prefetch_related import PrefetchCategoriesAuthorsMixin
class BaseEntrySearch(object):
"""
Mixin providing the behavior of the entry search view,
by returning in the context the pattern searched, the
error if something wrong has happened and finally the
the queryset of published entries matching the pattern.
"""
pattern = ''
error = None
def get_queryset(self):
"""
Overridde the get_queryset method to
do some validations and build the search queryset.
"""
entries = Entry.published.none()
if self.request.GET:
self.pattern = self.request.GET.get('pattern', '')
if len(self.pattern) < 3:
self.error = _('The pattern is too short')
else:
entries = Entry.published.search(self.pattern)
else:
self.error = _('No pattern to search found')
return entries
def get_context_data(self, **kwargs):
"""
Add error and pattern in context.
"""
context = super(BaseEntrySearch, self).get_context_data(**kwargs)
context.update({'error': self.error, 'pattern': self.pattern})
return context
class EntrySearch(PrefetchCategoriesAuthorsMixin,
BaseEntrySearch,
ListView):
"""
Search view for entries combinating these mixins:
- PrefetchCategoriesAuthorsMixin to prefetch related Categories
and Authors to belonging the entry list.
- BaseEntrySearch to provide the behavior of the view.
- ListView to implement the ListView and template name resolution.
"""
paginate_by = PAGINATION
template_name_suffix = '_search'
|
import click
from flask.cli import with_appcontext
from flask_migrate import init as _init
from flask_migrate import revision as _revision
from flask_migrate import migrate as _migrate
from flask_migrate import edit as _edit
from flask_migrate import merge as _merge
from flask_migrate import upgrade as _upgrade
from flask_migrate import downgrade as _downgrade
from flask_migrate import show as _show
from flask_migrate import history as _history
from flask_migrate import heads as _heads
from flask_migrate import branches as _branches
from flask_migrate import current as _current
from flask_migrate import stamp as _stamp
@click.group()
def db():
"""Perform database migrations."""
pass
@db.command()
@click.option('-d', '--directory', default=None,
help=('Migration script directory (default is "migrations")'))
@click.option('--multidb', is_flag=True,
help=('Support multiple databases'))
@with_appcontext
def init(directory, multidb):
"""Creates a new migration repository."""
_init(directory, multidb)
@db.command()
@click.option('-d', '--directory', default=None,
help=('Migration script directory (default is "migrations")'))
@click.option('-m', '--message', default=None, help='Revision message')
@click.option('--autogenerate', is_flag=True,
help=('Populate revision script with candidate migration '
'operations, based on comparison of database to model'))
@click.option('--sql', is_flag=True,
help=('Don\'t emit SQL to database - dump to standard output '
'instead'))
@click.option('--head', default='head',
help=('Specify head revision or <branchname>@head to base new '
'revision on'))
@click.option('--splice', is_flag=True,
help=('Allow a non-head revision as the "head" to splice onto'))
@click.option('--branch-label', default=None,
help=('Specify a branch label to apply to the new revision'))
@click.option('--version-path', default=None,
help=('Specify specific path from config for version file'))
@click.option('--rev-id', default=None,
help=('Specify a hardcoded revision id instead of generating '
'one'))
@with_appcontext
def revision(directory, message, autogenerate, sql, head, splice, branch_label,
version_path, rev_id):
"""Create a new revision file."""
_revision(directory, message, autogenerate, sql, head, splice,
branch_label, version_path, rev_id)
@db.command()
@click.option('-d', '--directory', default=None,
help=('Migration script directory (default is "migrations")'))
@click.option('-m', '--message', default=None, help='Revision message')
@click.option('--sql', is_flag=True,
help=('Don\'t emit SQL to database - dump to standard output '
'instead'))
@click.option('--head', default='head',
help=('Specify head revision or <branchname>@head to base new '
'revision on'))
@click.option('--splice', is_flag=True,
help=('Allow a non-head revision as the "head" to splice onto'))
@click.option('--branch-label', default=None,
help=('Specify a branch label to apply to the new revision'))
@click.option('--version-path', default=None,
help=('Specify specific path from config for version file'))
@click.option('--rev-id', default=None,
help=('Specify a hardcoded revision id instead of generating '
'one'))
@click.option('-x', '--x-arg', multiple=True,
help='Additional arguments consumed by custom env.py scripts')
@with_appcontext
def migrate(directory, message, sql, head, splice, branch_label, version_path,
rev_id, x_arg):
"""Autogenerate a new revision file (Alias for
'revision --autogenerate')"""
_migrate(directory, message, sql, head, splice, branch_label, version_path,
rev_id, x_arg)
@db.command()
@click.option('-d', '--directory', default=None,
help=('Migration script directory (default is "migrations")'))
@click.argument('revision', default='head')
@with_appcontext
def edit(directory, revision):
"""Edit a revision file"""
_edit(directory, revision)
@db.command()
@click.option('-d', '--directory', default=None,
help=('Migration script directory (default is "migrations")'))
@click.option('-m', '--message', default=None, help='Merge revision message')
@click.option('--branch-label', default=None,
help=('Specify a branch label to apply to the new revision'))
@click.option('--rev-id', default=None,
help=('Specify a hardcoded revision id instead of generating '
'one'))
@click.argument('revisions', nargs=-1)
@with_appcontext
def merge(directory, message, branch_label, rev_id, revisions):
"""Merge two revisions together, creating a new revision file"""
_merge(directory, revisions, message, branch_label, rev_id)
@db.command()
@click.option('-d', '--directory', default=None,
help=('Migration script directory (default is "migrations")'))
@click.option('--sql', is_flag=True,
help=('Don\'t emit SQL to database - dump to standard output '
'instead'))
@click.option('--tag', default=None,
help=('Arbitrary "tag" name - can be used by custom env.py '
'scripts'))
@click.option('-x', '--x-arg', multiple=True,
help='Additional arguments consumed by custom env.py scripts')
@click.argument('revision', default='head')
@with_appcontext
def upgrade(directory, sql, tag, x_arg, revision):
"""Upgrade to a later version"""
_upgrade(directory, revision, sql, tag, x_arg)
@db.command()
@click.option('-d', '--directory', default=None,
help=('Migration script directory (default is "migrations")'))
@click.option('--sql', is_flag=True,
help=('Don\'t emit SQL to database - dump to standard output '
'instead'))
@click.option('--tag', default=None,
help=('Arbitrary "tag" name - can be used by custom env.py '
'scripts'))
@click.option('-x', '--x-arg', multiple=True,
help='Additional arguments consumed by custom env.py scripts')
@click.argument('revision', default='-1')
@with_appcontext
def downgrade(directory, sql, tag, x_arg, revision):
"""Revert to a previous version"""
_downgrade(directory, revision, sql, tag, x_arg)
@db.command()
@click.option('-d', '--directory', default=None,
help=('Migration script directory (default is "migrations")'))
@click.argument('revision', default='head')
@with_appcontext
def show(directory, revision):
"""Show the revision denoted by the given symbol."""
_show(directory, revision)
@db.command()
@click.option('-d', '--directory', default=None,
help=('Migration script directory (default is "migrations")'))
@click.option('-r', '--rev-range', default=None,
help='Specify a revision range; format is [start]:[end]')
@click.option('-v', '--verbose', is_flag=True, help='Use more verbose output')
@click.option('-i', '--indicate-current', is_flag=True,
help=('Indicate current version (Alembic 0.9.9 or greater is '
'required)'))
@with_appcontext
def history(directory, rev_range, verbose, indicate_current):
"""List changeset scripts in chronological order."""
_history(directory, rev_range, verbose, indicate_current)
@db.command()
@click.option('-d', '--directory', default=None,
help=('Migration script directory (default is "migrations")'))
@click.option('-v', '--verbose', is_flag=True, help='Use more verbose output')
@click.option('--resolve-dependencies', is_flag=True,
help='Treat dependency versions as down revisions')
@with_appcontext
def heads(directory, verbose, resolve_dependencies):
"""Show current available heads in the script directory"""
_heads(directory, verbose, resolve_dependencies)
@db.command()
@click.option('-d', '--directory', default=None,
help=('Migration script directory (default is "migrations")'))
@click.option('-v', '--verbose', is_flag=True, help='Use more verbose output')
@with_appcontext
def branches(directory, verbose):
"""Show current branch points"""
_branches(directory, verbose)
@db.command()
@click.option('-d', '--directory', default=None,
help=('Migration script directory (default is "migrations")'))
@click.option('-v', '--verbose', is_flag=True, help='Use more verbose output')
@click.option('--head-only', is_flag=True,
help='Deprecated. Use --verbose for additional output')
@with_appcontext
def current(directory, verbose, head_only):
"""Display the current revision for each database."""
_current(directory, verbose, head_only)
@db.command()
@click.option('-d', '--directory', default=None,
help=('Migration script directory (default is "migrations")'))
@click.option('--sql', is_flag=True,
help=('Don\'t emit SQL to database - dump to standard output '
'instead'))
@click.option('--tag', default=None,
help=('Arbitrary "tag" name - can be used by custom env.py '
'scripts'))
@click.argument('revision', default='head')
@with_appcontext
def stamp(directory, sql, tag, revision):
"""'stamp' the revision table with the given revision; don't run any
migrations"""
_stamp(directory, revision, sql, tag)
|
import sys
import unittest
from unittest.mock import patch
from ReText.editor import ReTextEdit
from ReText.editor import documentIndentMore, documentIndentLess
from PyQt5.QtGui import QImage, QTextCursor, QTextDocument, QKeyEvent
from PyQt5.QtCore import Qt, QMimeData, QEvent
from PyQt5.QtWidgets import QApplication
from PyQt5.QtTest import QTest
from markups import MarkdownMarkup, ReStructuredTextMarkup
QApplication.setAttribute(Qt.AA_ShareOpenGLContexts)
# Keep a reference so it is not garbage collected
app = QApplication.instance() or QApplication(sys.argv)
class SettingsMock:
tabWidth = 4
tabInsertsSpaces = True
class TestIndentation(unittest.TestCase):
def setUp(self):
self.document = QTextDocument()
self.document.setPlainText('foo\nbar\nbaz')
self.settings = SettingsMock()
def test_indentMore(self):
cursor = QTextCursor(self.document)
cursor.setPosition(4)
documentIndentMore(self.document, cursor, self.settings)
self.assertEqual('foo\n bar\nbaz',
self.document.toPlainText())
cursor.setPosition(3)
documentIndentMore(self.document, cursor, self.settings)
self.assertEqual('foo \n bar\nbaz',
self.document.toPlainText())
def test_indentMoreWithTabs(self):
cursor = QTextCursor(self.document)
self.settings.tabInsertsSpaces = False
documentIndentMore(self.document, cursor, self.settings)
self.assertEqual('\tfoo\nbar\nbaz', self.document.toPlainText())
def test_indentMoreWithSelection(self):
cursor = QTextCursor(self.document)
cursor.setPosition(1)
cursor.setPosition(6, QTextCursor.KeepAnchor)
self.assertEqual('oo\u2029ba', # \u2029 is paragraph separator
cursor.selectedText())
documentIndentMore(self.document, cursor, self.settings)
self.assertEqual(' foo\n bar\nbaz',
self.document.toPlainText())
def test_indentLess(self):
self.document.setPlainText(' foo')
cursor = QTextCursor(self.document)
cursor.setPosition(10)
documentIndentLess(self.document, cursor, self.settings)
self.assertEqual(' foo', self.document.toPlainText())
documentIndentLess(self.document, cursor, self.settings)
self.assertEqual('foo', self.document.toPlainText())
def test_indentLessWithSelection(self):
self.document.setPlainText(' foo\n bar\nbaz')
cursor = QTextCursor(self.document)
cursor.setPosition(5)
cursor.setPosition(11, QTextCursor.KeepAnchor)
documentIndentLess(self.document, cursor, self.settings)
self.assertEqual('foo\nbar\nbaz', self.document.toPlainText())
class TestClipboardHandling(unittest.TestCase):
class DummyReTextTab():
def __init__(self):
self.markupClass = None
def getActiveMarkupClass(self):
return self.markupClass
def setUp(self):
self.p = self
self.editor = ReTextEdit(self)
self.dummytab = self.DummyReTextTab()
self.editor.tab = self.dummytab
def _create_image(self):
image = QImage(80, 60, QImage.Format_RGB32)
image.fill(Qt.green)
return image
def test_pasteText(self):
mimeData = QMimeData()
mimeData.setText('pasted text')
self.editor.insertFromMimeData(mimeData)
self.assertTrue('pasted text' in self.editor.toPlainText())
@patch.object(ReTextEdit, 'getImageFilename', return_value='/tmp/myimage.jpg')
@patch.object(QImage, 'save')
def test_pasteImage_Markdown(self, _mock_image, _mock_editor):
mimeData = QMimeData()
mimeData.setImageData(self._create_image())
app.clipboard().setMimeData(mimeData)
self.dummytab.markupClass = MarkdownMarkup
self.dummytab.fileName = '/tmp/foo.md'
self.editor.pasteImage()
self.assertTrue('' in self.editor.toPlainText())
@patch.object(ReTextEdit, 'getImageFilename', return_value='/tmp/myimage.jpg')
@patch.object(QImage, 'save')
def test_pasteImage_RestructuredText(self, _mock_image, _mock_editor):
mimeData = QMimeData()
mimeData.setImageData(self._create_image())
app.clipboard().setMimeData(mimeData)
self.dummytab.markupClass = ReStructuredTextMarkup
self.dummytab.fileName = '/tmp/foo.rst'
self.editor.pasteImage()
self.assertTrue('.. image:: myimage.jpg' in self.editor.toPlainText())
class TestSurround(unittest.TestCase):
def setUp(self):
self.p = self
self.editor = ReTextEdit(self)
self.document = QTextDocument()
self.document.setPlainText('foo bar baz qux corge grault')
self.cursor = QTextCursor(self.document)
def getText(self, key):
if key == Qt.Key_ParenLeft: return '('
if key == Qt.Key_BracketLeft: return '['
if key == Qt.Key_Underscore: return '_'
if key == Qt.Key_Asterisk: return '*'
if key == Qt.Key_QuoteDbl: return '"'
if key == Qt.Key_Apostrophe: return '\''
def getEvent(self, key):
return QKeyEvent(QEvent.KeyPress, key, Qt.NoModifier, text=self.getText(key))
def test_isSurroundKey(self):
# close keys should not start a surrounding
self.assertFalse(self.editor.isSurroundKey(Qt.Key_ParenRight))
self.assertFalse(self.editor.isSurroundKey(Qt.Key_BracketRight))
self.assertTrue(self.editor.isSurroundKey(Qt.Key_ParenLeft))
self.assertTrue(self.editor.isSurroundKey(Qt.Key_BracketLeft))
self.assertTrue(self.editor.isSurroundKey(Qt.Key_Underscore))
self.assertTrue(self.editor.isSurroundKey(Qt.Key_Asterisk))
self.assertTrue(self.editor.isSurroundKey(Qt.Key_QuoteDbl))
self.assertTrue(self.editor.isSurroundKey(Qt.Key_Apostrophe))
def test_getCloseKey(self):
self.assertEqual(self.editor.getCloseKey(self.getEvent(Qt.Key_Underscore), Qt.Key_Underscore), '_')
self.assertEqual(self.editor.getCloseKey(self.getEvent(Qt.Key_Asterisk), Qt.Key_Asterisk), '*')
self.assertEqual(self.editor.getCloseKey(self.getEvent(Qt.Key_QuoteDbl), Qt.Key_QuoteDbl), '"')
self.assertEqual(self.editor.getCloseKey(self.getEvent(Qt.Key_Apostrophe), Qt.Key_Apostrophe), '\'')
self.assertEqual(self.editor.getCloseKey(self.getEvent(Qt.Key_ParenLeft), Qt.Key_ParenLeft), ')')
self.assertEqual(self.editor.getCloseKey(self.getEvent(Qt.Key_BracketLeft), Qt.Key_BracketLeft), ']')
def changeCursor(self, posI, posF):
self.cursor.setPosition(posI)
self.cursor.setPosition(posF, QTextCursor.KeepAnchor)
def test_surroundText(self):
self.changeCursor(0, 3)
self.editor.surroundText(self.cursor, self.getEvent(Qt.Key_Underscore), Qt.Key_Underscore)
self.assertEqual(self.document.toPlainText(), '_foo_ bar baz qux corge grault')
self.changeCursor(6, 9)
self.editor.surroundText(self.cursor, self.getEvent(Qt.Key_Asterisk), Qt.Key_Asterisk)
self.assertEqual(self.document.toPlainText(), '_foo_ *bar* baz qux corge grault')
self.changeCursor(12, 15)
self.editor.surroundText(self.cursor, self.getEvent(Qt.Key_QuoteDbl), Qt.Key_QuoteDbl)
self.assertEqual(self.document.toPlainText(), '_foo_ *bar* "baz" qux corge grault')
self.changeCursor(18, 21)
self.editor.surroundText(self.cursor, self.getEvent(Qt.Key_Apostrophe), Qt.Key_Apostrophe)
self.assertEqual(self.document.toPlainText(), '_foo_ *bar* "baz" \'qux\' corge grault')
self.changeCursor(24, 29)
self.editor.surroundText(self.cursor, self.getEvent(Qt.Key_ParenLeft), Qt.Key_ParenLeft)
self.assertEqual(self.document.toPlainText(), '_foo_ *bar* "baz" \'qux\' (corge) grault')
self.changeCursor(32, 38)
self.editor.surroundText(self.cursor, self.getEvent(Qt.Key_BracketLeft), Qt.Key_BracketLeft)
self.assertEqual(self.document.toPlainText(), '_foo_ *bar* "baz" \'qux\' (corge) [grault]')
class TestOrderedListMode(unittest.TestCase):
class DummyReTextTab():
def __init__(self):
self.markupClass = None
def getActiveMarkupClass(self):
return self.markupClass
def setUp(self):
self.p = self
def test_increment(self):
editor = ReTextEdit(self)
editor.tab = self.DummyReTextTab()
QTest.keyClicks(editor, '1. Hello')
QTest.keyClick(editor, Qt.Key_Return)
QTest.keyClicks(editor, 'World')
self.assertEqual(editor.document().toPlainText(), '1. Hello\n2. World')
def test_repeat(self):
class TestSettings:
orderedListMode = 'repeat'
useFakeVim = False
editor = ReTextEdit(self, settings=TestSettings())
editor.tab = self.DummyReTextTab()
QTest.keyClicks(editor, '1. Hello')
QTest.keyClick(editor, Qt.Key_Return)
QTest.keyClicks(editor, 'World')
self.assertEqual(editor.document().toPlainText(), '1. Hello\n1. World')
if __name__ == '__main__':
unittest.main()
|
from django.core import exceptions
from django.core.cache import cache
from django.template import TemplateDoesNotExist
from django.template.loader import select_template
from django.utils.html import strip_spaces_between_tags
from django.utils.safestring import mark_safe, SafeText
from django.utils.translation import get_language_from_request
from rest_framework import serializers
from shop.conf import app_settings
from shop.models.customer import CustomerModel
from shop.models.product import ProductModel
from shop.models.order import OrderItemModel
from shop.rest.money import MoneyField
class BaseCustomerSerializer(serializers.ModelSerializer):
number = serializers.CharField(source='get_number')
class Meta:
model = CustomerModel
fields = ['number', 'first_name', 'last_name', 'email']
class AvailabilitySerializer(serializers.Serializer):
earliest = serializers.DateTimeField()
latest = serializers.DateTimeField()
quantity = serializers.ReadOnlyField()
sell_short = serializers.BooleanField()
limited_offer = serializers.BooleanField()
class ProductSerializer(serializers.ModelSerializer):
"""
Common serializer for our product model.
"""
price = serializers.SerializerMethodField()
product_type = serializers.CharField(read_only=True)
product_model = serializers.CharField(read_only=True)
product_url = serializers.URLField(source='get_absolute_url', read_only=True)
class Meta:
model = ProductModel
fields = '__all__'
def __init__(self, *args, **kwargs):
kwargs.setdefault('label', 'catalog')
super().__init__(*args, **kwargs)
def get_price(self, product):
price = product.get_price(self.context['request'])
return '{:f}'.format(price)
def render_html(self, product, postfix):
"""
Return a HTML snippet containing a rendered summary for the given product.
This HTML snippet typically contains a ``<figure>`` element with a sample image
``<img src="..." >`` and a ``<figcaption>`` containing a short description of the product.
Build a template search path with `postfix` distinction.
"""
if not self.label:
msg = "The Product Serializer must be configured using a `label` field."
raise exceptions.ImproperlyConfigured(msg)
app_label = product._meta.app_label.lower()
request = self.context['request']
cache_key = 'product:{0}|{1}-{2}-{3}-{4}-{5}'.format(product.id, app_label, self.label,
product.product_model, postfix, get_language_from_request(request))
content = cache.get(cache_key)
if content:
return mark_safe(content)
params = [
(app_label, self.label, product.product_model, postfix),
(app_label, self.label, 'product', postfix),
('shop', self.label, product.product_model, postfix),
('shop', self.label, 'product', postfix),
]
try:
template = select_template(['{0}/products/{1}-{2}-{3}.html'.format(*p) for p in params])
except TemplateDoesNotExist:
return SafeText("<!-- no such template: '{0}/products/{1}-{2}-{3}.html' -->".format(*params[0]))
# when rendering emails, we require an absolute URI, so that media can be accessed from
# the mail client
absolute_base_uri = request.build_absolute_uri('/').rstrip('/')
context = {'product': product, 'ABSOLUTE_BASE_URI': absolute_base_uri}
content = strip_spaces_between_tags(template.render(context, request).strip())
cache.set(cache_key, content, app_settings.CACHE_DURATIONS['product_html_snippet'])
return mark_safe(content)
class BaseOrderItemSerializer(serializers.ModelSerializer):
line_total = MoneyField()
unit_price = MoneyField()
product_code = serializers.CharField()
class Meta:
model = OrderItemModel
|
import platform
import os
import diamond.collector
# Detect the architecture of the system
# and set the counters for MAX_VALUES
# appropriately. Otherwise, rolling over
# counters will cause incorrect or
# negative values.
if platform.architecture()[0] == '64bit':
counter = (2 ** 64) - 1
else:
counter = (2 ** 32) - 1
class ProcessStatCollector(diamond.collector.Collector):
PROC = '/proc/stat'
def get_default_config_help(self):
config_help = super(ProcessStatCollector,
self).get_default_config_help()
config_help.update({
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(ProcessStatCollector, self).get_default_config()
config.update({
'path': 'proc'
})
return config
def collect(self):
"""
Collect process stat data
"""
if not os.access(self.PROC, os.R_OK):
return False
# Open PROC file
file = open(self.PROC, 'r')
# Get data
for line in file:
if line.startswith('ctxt') or line.startswith('processes'):
data = line.split()
metric_name = data[0]
metric_value = int(data[1])
metric_value = int(self.derivative(metric_name,
long(metric_value),
counter))
self.publish(metric_name, metric_value)
if line.startswith('procs_') or line.startswith('btime'):
data = line.split()
metric_name = data[0]
metric_value = int(data[1])
self.publish(metric_name, metric_value)
# Close file
file.close()
|
import os
import re
import roslib.packages
import roslib.stack_manifest
import rospkg
ROS_ROOT = rospkg.environment.ROS_ROOT
ROS_PACKAGE_PATH = rospkg.environment.ROS_PACKAGE_PATH
STACK_FILE = 'stack.xml'
ROS_STACK = 'ros'
class ROSStackException(Exception):
pass
class InvalidROSStackException(ROSStackException):
pass
def stack_of(pkg, env=None):
"""
@param env: override environment variables
@type env: {str: str}
@return: name of stack that pkg is in, or None if pkg is not part of a stack
@rtype: str
@raise roslib.packages.InvalidROSPkgException: if pkg cannot be located
"""
if env is None:
env = os.environ
pkg_dir = roslib.packages.get_pkg_dir(pkg, ros_root=env[ROS_ROOT], ros_package_path=env.get(ROS_PACKAGE_PATH, None))
d = pkg_dir
while d and os.path.dirname(d) != d:
stack_file = os.path.join(d, STACK_FILE)
if os.path.exists(stack_file):
# TODO: need to resolve issues regarding whether the
# stack.xml or the directory defines the stack name
return os.path.basename(d)
d = os.path.dirname(d)
def get_stack_dir(stack, env=None):
"""
Get the directory of a ROS stack. This will initialize an internal
cache and return cached results if possible.
This routine is not thread-safe to os.environ changes.
@param env: override environment variables
@type env: {str: str}
@param stack: name of ROS stack to locate on disk
@type stack: str
@return: directory of stack.
@rtype: str
@raise InvalidROSStackException: if stack cannot be located.
"""
_init_rosstack(env=env)
try:
return _rosstack.get_path(stack)
except rospkg.ResourceNotFound:
# preserve old signature
raise InvalidROSStackException(stack)
_rosstack = None
_ros_paths = None
def _init_rosstack(env=None):
global _rosstack, _ros_paths
if env is None:
env = os.environ
ros_paths = rospkg.get_ros_paths(env)
if ros_paths != _ros_paths:
_ros_paths = ros_paths
_rosstack = rospkg.RosStack(ros_paths)
def list_stacks(env=None):
"""
Get list of all ROS stacks. This uses an internal cache.
This routine is not thread-safe to os.environ changes.
@param env: override environment variables
@type env: {str: str}
@return: complete list of stacks names in ROS environment
@rtype: [str]
"""
_init_rosstack(env=env)
return _rosstack.list()
def list_stacks_by_path(path, stacks=None, cache=None):
"""
List ROS stacks within the specified path.
Optionally, a cache dictionary can be provided, which will be
updated with the stack->path mappings. list_stacks_by_path() does
NOT returned cached results -- it only updates the cache.
@param path: path to list stacks in
@type path: str
@param stacks: list of stacks to append to. If stack is
already present in stacks, it will be ignored.
@type stacks: [str]
@param cache: (optional) stack path cache to update. Maps stack name to directory path.
@type cache: {str: str}
@return: complete list of stack names in ROS environment. Same as stacks parameter.
@rtype: [str]
"""
if stacks is None:
stacks = []
MANIFEST_FILE = rospkg.MANIFEST_FILE
basename = os.path.basename
for d, dirs, files in os.walk(path, topdown=True):
if STACK_FILE in files:
stack = basename(d)
if stack not in stacks:
stacks.append(stack)
if cache is not None:
cache[stack] = d
del dirs[:]
continue # leaf
elif MANIFEST_FILE in files:
del dirs[:]
continue # leaf
elif 'rospack_nosubdirs' in files:
del dirs[:]
continue # leaf
# remove hidden dirs (esp. .svn/.git)
[dirs.remove(di) for di in dirs if di[0] == '.']
for sub_d in dirs:
# followlinks=True only available in Python 2.6, so we
# have to implement manually
sub_p = os.path.join(d, sub_d)
if os.path.islink(sub_p):
stacks.extend(list_stacks_by_path(sub_p, cache=cache))
return stacks
# #2022
def expand_to_packages(names, env=None):
"""
Expand names into a list of packages. Names can either be of packages or stacks.
@param names: names of stacks or packages
@type names: [str]
@return: ([packages], [not_found]). expand_packages() returns two
lists. The first is of packages names. The second is a list of
names for which no matching stack or package was found. Lists may have duplicates.
@rtype: ([str], [str])
"""
if env is None:
env = os.environ
ros_paths = rospkg.get_ros_paths(env)
rospack = rospkg.RosPack(ros_paths)
rosstack = rospkg.RosStack(ros_paths)
return rospkg.expand_to_packages(names, rospack, rosstack)
def get_stack_version(stack, env=None):
"""
@param env: override environment variables
@type env: {str: str}
@return: version number of stack, or None if stack is unversioned.
@rtype: str
"""
_init_rosstack(env=env)
return _rosstack.get_stack_version(stack)
def get_stack_version_by_dir(stack_dir):
"""
Get stack version where stack_dir points to root directory of stack.
@param env: override environment variables
@type env: {str: str}
@return: version number of stack, or None if stack is unversioned.
@rtype: str
"""
# REP 109: check for <version> tag first, then CMakeLists.txt
manifest_filename = os.path.join(stack_dir, STACK_FILE)
if os.path.isfile(manifest_filename):
m = roslib.stack_manifest.parse_file(manifest_filename)
if m.version:
return m.version
cmake_filename = os.path.join(stack_dir, 'CMakeLists.txt')
if os.path.isfile(cmake_filename):
with open(cmake_filename) as f:
return _get_cmake_version(f.read())
else:
return None
def _get_cmake_version(text):
for l in text.split('\n'):
if l.strip().startswith('rosbuild_make_distribution'):
x_re = re.compile(r'[()]')
lsplit = x_re.split(l.strip())
if len(lsplit) < 2:
raise ReleaseException("couldn't find version number in CMakeLists.txt:\n\n%s" % l)
return lsplit[1]
|
import logging
from aiohue.sensors import TYPE_ZGP_SWITCH, TYPE_ZLL_ROTARY, TYPE_ZLL_SWITCH
from homeassistant.const import CONF_EVENT, CONF_ID, CONF_UNIQUE_ID
from homeassistant.core import callback
from homeassistant.util import slugify
from .sensor_device import GenericHueDevice
_LOGGER = logging.getLogger(__name__)
CONF_HUE_EVENT = "hue_event"
CONF_LAST_UPDATED = "last_updated"
EVENT_NAME_FORMAT = "{}"
class HueEvent(GenericHueDevice):
"""When you want signals instead of entities.
Stateless sensors such as remotes are expected to generate an event
instead of a sensor entity in hass.
"""
def __init__(self, sensor, name, bridge, primary_sensor=None):
"""Register callback that will be used for signals."""
super().__init__(sensor, name, bridge, primary_sensor)
self.device_registry_id = None
self.event_id = slugify(self.sensor.name)
# Use the aiohue sensor 'state' dict to detect new remote presses
self._last_state = dict(self.sensor.state)
# Register callback in coordinator and add job to remove it on bridge reset.
self.bridge.reset_jobs.append(
self.bridge.sensor_manager.coordinator.async_add_listener(
self.async_update_callback
)
)
_LOGGER.debug("Hue event created: %s", self.event_id)
@callback
def async_update_callback(self):
"""Fire the event if reason is that state is updated."""
if self.sensor.state == self._last_state:
return
# Extract the press code as state
if hasattr(self.sensor, "rotaryevent"):
state = self.sensor.rotaryevent
else:
state = self.sensor.buttonevent
self._last_state = dict(self.sensor.state)
# Fire event
data = {
CONF_ID: self.event_id,
CONF_UNIQUE_ID: self.unique_id,
CONF_EVENT: state,
CONF_LAST_UPDATED: self.sensor.lastupdated,
}
self.bridge.hass.bus.async_fire(CONF_HUE_EVENT, data)
async def async_update_device_registry(self):
"""Update device registry."""
device_registry = (
await self.bridge.hass.helpers.device_registry.async_get_registry()
)
entry = device_registry.async_get_or_create(
config_entry_id=self.bridge.config_entry.entry_id, **self.device_info
)
self.device_registry_id = entry.id
_LOGGER.debug(
"Event registry with entry_id: %s and device_id: %s",
self.device_registry_id,
self.device_id,
)
EVENT_CONFIG_MAP = {
TYPE_ZGP_SWITCH: {"name_format": EVENT_NAME_FORMAT, "class": HueEvent},
TYPE_ZLL_SWITCH: {"name_format": EVENT_NAME_FORMAT, "class": HueEvent},
TYPE_ZLL_ROTARY: {"name_format": EVENT_NAME_FORMAT, "class": HueEvent},
}
|
import argparse
import io
import itertools
import logging
import sys
from contextlib import redirect_stdout
from typing import Mapping
from typing import MutableSequence
from typing import Optional
from typing import Sequence
from typing import Tuple
import a_sync
from marathon.exceptions import MarathonError
from mypy_extensions import TypedDict
from paasta_tools import __version__
from paasta_tools.cli.utils import get_instance_config
from paasta_tools.kubernetes_tools import is_kubernetes_available
from paasta_tools.kubernetes_tools import KubeClient
from paasta_tools.marathon_tools import get_marathon_clients
from paasta_tools.marathon_tools import get_marathon_servers
from paasta_tools.marathon_tools import MarathonClient
from paasta_tools.marathon_tools import MarathonClients
from paasta_tools.mesos.exceptions import MasterNotAvailableException
from paasta_tools.mesos.master import MesosMaster
from paasta_tools.mesos.master import MesosState
from paasta_tools.mesos_tools import get_mesos_config_path
from paasta_tools.mesos_tools import get_mesos_leader
from paasta_tools.mesos_tools import get_mesos_master
from paasta_tools.mesos_tools import is_mesos_available
from paasta_tools.metrics import metastatus_lib
from paasta_tools.metrics.metastatus_lib import _GenericNodeGroupingFunctionT
from paasta_tools.metrics.metastatus_lib import _KeyFuncRetT
from paasta_tools.metrics.metastatus_lib import HealthCheckResult
from paasta_tools.metrics.metastatus_lib import ResourceUtilization
from paasta_tools.metrics.metastatus_lib import ResourceUtilizationDict
from paasta_tools.utils import format_table
from paasta_tools.utils import load_system_paasta_config
from paasta_tools.utils import PaastaColors
from paasta_tools.utils import print_with_indent
log = logging.getLogger("paasta_metastatus")
logging.basicConfig()
# kazoo can be really noisy - turn it down
logging.getLogger("kazoo").setLevel(logging.CRITICAL)
ServiceInstanceStats = TypedDict(
"ServiceInstanceStats", {"mem": float, "cpus": float, "disk": float, "gpus": int}
)
class FatalError(Exception):
def __init__(self, exit_code: int) -> None:
self.exit_code = exit_code
def parse_args(argv):
parser = argparse.ArgumentParser(description="")
parser.add_argument(
"-g",
"--groupings",
nargs="+",
default=["pool", "region"],
help=(
"Group resource information of slaves grouped by attribute."
"Note: This is only effective with -vv"
),
)
parser.add_argument("-t", "--threshold", type=int, default=90)
parser.add_argument("--use-mesos-cache", action="store_true", default=False)
parser.add_argument(
"-v",
"--verbose",
action="count",
dest="verbose",
default=0,
help="Print out more output regarding the state of the cluster",
)
parser.add_argument(
"-s",
"--service",
help=(
"Show how many of a given service instance can be run on a cluster slave."
"Note: This is only effective with -vvv and --instance must also be specified"
),
)
parser.add_argument(
"-i",
"--instance",
help=(
"Show how many of a given service instance can be run on a cluster slave."
"Note: This is only effective with -vvv and --service must also be specified"
),
)
return parser.parse_args(argv)
def get_marathon_framework_ids(
marathon_clients: Sequence[MarathonClient],
) -> Sequence[str]:
return [client.get_info().framework_id for client in marathon_clients]
def _run_mesos_checks(
mesos_master: MesosMaster, mesos_state: MesosState
) -> Sequence[HealthCheckResult]:
mesos_state_status = metastatus_lib.get_mesos_state_status(mesos_state)
metrics = a_sync.block(mesos_master.metrics_snapshot)
mesos_metrics_status = metastatus_lib.get_mesos_resource_utilization_health(
mesos_metrics=metrics, mesos_state=mesos_state
)
return mesos_state_status + mesos_metrics_status # type: ignore
def _run_marathon_checks(
marathon_clients: Sequence[MarathonClient],
) -> Sequence[HealthCheckResult]:
try:
marathon_results = metastatus_lib.get_marathon_status(marathon_clients)
return marathon_results
except (MarathonError, ValueError) as e:
print(PaastaColors.red(f"CRITICAL: Unable to contact Marathon cluster: {e}"))
raise FatalError(2)
def all_marathon_clients(
marathon_clients: MarathonClients,
) -> Sequence[MarathonClient]:
return [
c for c in itertools.chain(marathon_clients.current, marathon_clients.previous)
]
def utilization_table_by_grouping(
groupings: Sequence[str],
grouping_function: _GenericNodeGroupingFunctionT,
resource_info_dict_grouped: Mapping[_KeyFuncRetT, ResourceUtilizationDict],
threshold: float,
service_instance_stats: Optional[ServiceInstanceStats] = None,
) -> Tuple[Sequence[MutableSequence[str]], bool]:
static_headers = [
"CPU (used/total)",
"RAM (used/total)",
"Disk (used/total)",
"GPU (used/total)",
"Agent count",
]
# service_instance_stats could be None so check and insert a header if needed.
if service_instance_stats:
# Insert so agent count is still last
static_headers.insert(-1, "Slots + Limiting Resource")
all_rows = [[grouping.capitalize() for grouping in groupings] + static_headers]
table_rows = []
for grouping_values, resource_info_dict in resource_info_dict_grouped.items():
resource_utilizations = metastatus_lib.resource_utillizations_from_resource_info(
total=resource_info_dict["total"], free=resource_info_dict["free"]
)
healthcheck_utilization_pairs = [
metastatus_lib.healthcheck_result_resource_utilization_pair_for_resource_utilization(
utilization, threshold
)
for utilization in resource_utilizations
]
healthy_exit = all(pair[0].healthy for pair in healthcheck_utilization_pairs)
table_rows.append(
metastatus_lib.get_table_rows_for_resource_info_dict(
[v for g, v in grouping_values], healthcheck_utilization_pairs
)
)
# Fill table rows with service-instance data if possible.
if service_instance_stats:
fill_table_rows_with_service_instance_stats(
service_instance_stats, resource_utilizations, table_rows
)
# Always append the agent count last
table_rows[-1].append(str(resource_info_dict["slave_count"]))
table_rows = sorted(table_rows, key=lambda x: x[0 : len(groupings)])
all_rows.extend(table_rows)
return all_rows, healthy_exit
def utilization_table_by_grouping_from_mesos_state(
groupings: Sequence[str],
threshold: float,
mesos_state: MesosState,
service_instance_stats: Optional[ServiceInstanceStats] = None,
) -> Tuple[Sequence[MutableSequence[str]], bool]:
grouping_function = metastatus_lib.key_func_for_attribute_multi(groupings)
resource_info_dict_grouped = metastatus_lib.get_resource_utilization_by_grouping(
grouping_function, mesos_state
)
return utilization_table_by_grouping(
groupings,
grouping_function,
resource_info_dict_grouped,
threshold,
service_instance_stats,
)
def utilization_table_by_grouping_from_kube(
groupings: Sequence[str],
threshold: float,
kube_client: KubeClient,
service_instance_stats: Optional[ServiceInstanceStats] = None,
) -> Tuple[Sequence[MutableSequence[str]], bool]:
grouping_function = metastatus_lib.key_func_for_attribute_multi_kube(groupings)
resource_info_dict_grouped = metastatus_lib.get_resource_utilization_by_grouping_kube(
grouping_function, kube_client
)
return utilization_table_by_grouping(
groupings,
grouping_function,
resource_info_dict_grouped,
threshold,
service_instance_stats,
)
def fill_table_rows_with_service_instance_stats(
service_instance_stats: ServiceInstanceStats,
resource_utilizations: Sequence[ResourceUtilization],
table_rows: MutableSequence[MutableSequence[str]],
) -> None:
# Calculate the max number of runnable service instances given the current resources (e.g. cpus, mem, disk)
resource_free_dict = {rsrc.metric: rsrc.free for rsrc in resource_utilizations}
num_service_instances_allowed = float("inf")
limiting_factor = "Unknown"
# service_instance_stats.keys() should be a subset of resource_free_dict
for rsrc_name, rsrc_amt_wanted in service_instance_stats.items():
if rsrc_amt_wanted > 0: # type: ignore
# default=0 to indicate there is none of that resource
rsrc_free = resource_free_dict.get(rsrc_name, 0)
if (
rsrc_free // rsrc_amt_wanted # type: ignore
< num_service_instances_allowed # type: ignore
):
limiting_factor = rsrc_name
num_service_instances_allowed = (
rsrc_free // rsrc_amt_wanted # type: ignore
)
table_rows[-1].append(
"{:6} ; {}".format(int(num_service_instances_allowed), limiting_factor)
)
def get_service_instance_stats(
service: str, instance: str, cluster: str
) -> Optional[ServiceInstanceStats]:
"""Returns a Dict with stats about a given service instance.
Args:
service: the service name
instance: the instance name
cluster: the cluster name where the service instance will be searched for
Returns:
A Dict mapping resource name to the amount of that resource the particular service instance consumes.
"""
if service is None or instance is None or cluster is None:
return None
try:
instance_config = get_instance_config(service, instance, cluster)
# Get all fields that are showed in the 'paasta metastatus -vvv' command
if instance_config.get_gpus():
gpus = int(instance_config.get_gpus())
else:
gpus = 0
service_instance_stats = ServiceInstanceStats(
mem=instance_config.get_mem(),
cpus=instance_config.get_cpus(),
disk=instance_config.get_disk(),
gpus=gpus,
)
return service_instance_stats
except Exception as e:
log.error(
f"Failed to get stats for service {service} instance {instance}: {str(e)}"
)
return None
def _run_kube_checks(kube_client: KubeClient,) -> Sequence[HealthCheckResult]:
kube_status = metastatus_lib.get_kube_status(kube_client)
kube_metrics_status = metastatus_lib.get_kube_resource_utilization_health(
kube_client=kube_client
)
return kube_status + kube_metrics_status # type: ignore
def print_output(argv: Optional[Sequence[str]] = None) -> None:
mesos_available = is_mesos_available()
kube_available = is_kubernetes_available()
args = parse_args(argv)
system_paasta_config = load_system_paasta_config()
if mesos_available:
master_kwargs = {}
# we don't want to be passing False to not override a possible True
# value from system config
if args.use_mesos_cache:
master_kwargs["use_mesos_cache"] = True
master = get_mesos_master(
mesos_config_path=get_mesos_config_path(system_paasta_config),
**master_kwargs,
)
marathon_servers = get_marathon_servers(system_paasta_config)
marathon_clients = all_marathon_clients(get_marathon_clients(marathon_servers))
try:
mesos_state = a_sync.block(master.state)
all_mesos_results = _run_mesos_checks(
mesos_master=master, mesos_state=mesos_state
)
except MasterNotAvailableException as e:
# if we can't connect to master at all,
# then bomb out early
print(PaastaColors.red("CRITICAL: %s" % "\n".join(e.args)))
raise FatalError(2)
marathon_results = _run_marathon_checks(marathon_clients)
else:
marathon_results = [
metastatus_lib.HealthCheckResult(
message="Marathon is not configured to run here", healthy=True
)
]
all_mesos_results = [
metastatus_lib.HealthCheckResult(
message="Mesos is not configured to run here", healthy=True
)
]
if kube_available:
kube_client = KubeClient()
kube_results = _run_kube_checks(kube_client)
else:
kube_results = [
metastatus_lib.HealthCheckResult(
message="Kubernetes is not configured to run here", healthy=True
)
]
mesos_ok = all(metastatus_lib.status_for_results(all_mesos_results))
marathon_ok = all(metastatus_lib.status_for_results(marathon_results))
kube_ok = all(metastatus_lib.status_for_results(kube_results))
mesos_summary = metastatus_lib.generate_summary_for_check("Mesos", mesos_ok)
marathon_summary = metastatus_lib.generate_summary_for_check(
"Marathon", marathon_ok
)
kube_summary = metastatus_lib.generate_summary_for_check("Kubernetes", kube_ok)
healthy_exit = True if all([mesos_ok, marathon_ok]) else False
print(f"Master paasta_tools version: {__version__}")
print("Mesos leader: %s" % get_mesos_leader())
metastatus_lib.print_results_for_healthchecks(
mesos_summary, mesos_ok, all_mesos_results, args.verbose
)
if args.verbose > 1 and mesos_available:
print_with_indent("Resources Grouped by %s" % ", ".join(args.groupings), 2)
all_rows, healthy_exit = utilization_table_by_grouping_from_mesos_state(
groupings=args.groupings, threshold=args.threshold, mesos_state=mesos_state
)
for line in format_table(all_rows):
print_with_indent(line, 4)
if args.verbose >= 3:
print_with_indent("Per Slave Utilization", 2)
cluster = system_paasta_config.get_cluster()
service_instance_stats = get_service_instance_stats(
args.service, args.instance, cluster
)
if service_instance_stats:
print_with_indent(
"Service-Instance stats:" + str(service_instance_stats), 2
)
# print info about slaves here. Note that we don't make modifications to
# the healthy_exit variable here, because we don't care about a single slave
# having high usage.
all_rows, _ = utilization_table_by_grouping_from_mesos_state(
groupings=args.groupings + ["hostname"],
threshold=args.threshold,
mesos_state=mesos_state,
service_instance_stats=service_instance_stats,
)
# The last column from utilization_table_by_grouping_from_mesos_state is "Agent count", which will always be
# 1 for per-slave resources, so delete it.
for row in all_rows:
row.pop()
for line in format_table(all_rows):
print_with_indent(line, 4)
metastatus_lib.print_results_for_healthchecks(
marathon_summary, marathon_ok, marathon_results, args.verbose
)
metastatus_lib.print_results_for_healthchecks(
kube_summary, kube_ok, kube_results, args.verbose
)
if args.verbose > 1 and kube_available:
print_with_indent("Resources Grouped by %s" % ", ".join(args.groupings), 2)
all_rows, healthy_exit = utilization_table_by_grouping_from_kube(
groupings=args.groupings, threshold=args.threshold, kube_client=kube_client
)
for line in format_table(all_rows):
print_with_indent(line, 4)
if args.verbose >= 3:
print_with_indent("Per Node Utilization", 2)
cluster = system_paasta_config.get_cluster()
service_instance_stats = get_service_instance_stats(
args.service, args.instance, cluster
)
if service_instance_stats:
print_with_indent(
"Service-Instance stats:" + str(service_instance_stats), 2
)
# print info about nodes here. Note that we don't make
# modifications to the healthy_exit variable here, because we don't
# care about a single node having high usage.
all_rows, _ = utilization_table_by_grouping_from_kube(
groupings=args.groupings + ["hostname"],
threshold=args.threshold,
kube_client=kube_client,
service_instance_stats=service_instance_stats,
)
# The last column from utilization_table_by_grouping_from_kube is "Agent count", which will always be
# 1 for per-node resources, so delete it.
for row in all_rows:
row.pop()
for line in format_table(all_rows):
print_with_indent(line, 4)
if not healthy_exit:
raise FatalError(2)
def get_output(argv: Optional[Sequence[str]] = None) -> Tuple[str, int]:
output = io.StringIO()
exit_code = 1
with redirect_stdout(output):
exit_code = 0
try:
print_output(argv)
except FatalError as e:
exit_code = e.exit_code
ret = output.getvalue()
return ret, exit_code
def main(argv: Optional[Sequence[str]] = None) -> None:
exit_code = 0
try:
print_output(argv)
except FatalError as e:
exit_code = e.exit_code
sys.exit(exit_code)
if __name__ == "__main__":
main()
|
import logging
from pyxeoma.xeoma import Xeoma, XeomaError
import voluptuous as vol
from homeassistant.components.camera import PLATFORM_SCHEMA, Camera
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_USERNAME
from homeassistant.helpers import config_validation as cv
_LOGGER = logging.getLogger(__name__)
CONF_CAMERAS = "cameras"
CONF_HIDE = "hide"
CONF_IMAGE_NAME = "image_name"
CONF_NEW_VERSION = "new_version"
CONF_VIEWER_PASSWORD = "viewer_password"
CONF_VIEWER_USERNAME = "viewer_username"
CAMERAS_SCHEMA = vol.Schema(
{
vol.Required(CONF_IMAGE_NAME): cv.string,
vol.Optional(CONF_HIDE, default=False): cv.boolean,
vol.Optional(CONF_NAME): cv.string,
},
required=False,
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_CAMERAS): vol.Schema(
vol.All(cv.ensure_list, [CAMERAS_SCHEMA])
),
vol.Optional(CONF_NEW_VERSION, default=True): cv.boolean,
vol.Optional(CONF_PASSWORD): cv.string,
vol.Optional(CONF_USERNAME): cv.string,
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Discover and setup Xeoma Cameras."""
host = config[CONF_HOST]
login = config.get(CONF_USERNAME)
password = config.get(CONF_PASSWORD)
xeoma = Xeoma(host, login, password)
try:
await xeoma.async_test_connection()
discovered_image_names = await xeoma.async_get_image_names()
discovered_cameras = [
{
CONF_IMAGE_NAME: image_name,
CONF_HIDE: False,
CONF_NAME: image_name,
CONF_VIEWER_USERNAME: username,
CONF_VIEWER_PASSWORD: pw,
}
for image_name, username, pw in discovered_image_names
]
for cam in config.get(CONF_CAMERAS, []):
camera = next(
(
dc
for dc in discovered_cameras
if dc[CONF_IMAGE_NAME] == cam[CONF_IMAGE_NAME]
),
None,
)
if camera is not None:
if CONF_NAME in cam:
camera[CONF_NAME] = cam[CONF_NAME]
if CONF_HIDE in cam:
camera[CONF_HIDE] = cam[CONF_HIDE]
cameras = list(filter(lambda c: not c[CONF_HIDE], discovered_cameras))
async_add_entities(
[
XeomaCamera(
xeoma,
camera[CONF_IMAGE_NAME],
camera[CONF_NAME],
camera[CONF_VIEWER_USERNAME],
camera[CONF_VIEWER_PASSWORD],
)
for camera in cameras
]
)
except XeomaError as err:
_LOGGER.error("Error: %s", err.message)
return
class XeomaCamera(Camera):
"""Implementation of a Xeoma camera."""
def __init__(self, xeoma, image, name, username, password):
"""Initialize a Xeoma camera."""
super().__init__()
self._xeoma = xeoma
self._name = name
self._image = image
self._username = username
self._password = password
self._last_image = None
async def async_camera_image(self):
"""Return a still image response from the camera."""
try:
image = await self._xeoma.async_get_camera_image(
self._image, self._username, self._password
)
self._last_image = image
except XeomaError as err:
_LOGGER.error("Error fetching image: %s", err.message)
return self._last_image
@property
def name(self):
"""Return the name of this device."""
return self._name
|
from scattertext.indexstore import IndexStore
class IndexStoreFromDict(object):
@staticmethod
def build(term_to_index_dict):
'''
Parameters
----------
term_to_index_dict: term -> idx dictionary
Returns
-------
IndexStore
'''
idxstore = IndexStore()
idxstore._val2i = term_to_index_dict
idxstore._next_i = len(term_to_index_dict)
idxstore._i2val = [None for _ in range(idxstore._next_i)]
for term, idx in idxstore._val2i.items():
idxstore._i2val[idx] = term
return idxstore
|
from unittest import TestCase
import numpy as np
from scattertext.termsignificance import LogOddsRatioUninformativeDirichletPrior
from scattertext.termsignificance.LogOddsRatioUninformativeDirichletPrior import z_to_p_val
from scattertext.test.test_termDocMatrixFactory import build_hamlet_jz_term_doc_mat
class TestLogOddsRatioUninformativeDirichletPrior(TestCase):
def test_get_p_vals(self):
tdm = build_hamlet_jz_term_doc_mat()
df = tdm.get_term_freq_df()
X = df[['hamlet freq', 'jay-z/r. kelly freq']].values
pvals = LogOddsRatioUninformativeDirichletPrior().get_p_vals(X)
self.assertGreaterEqual(min(pvals), 0)
self.assertLessEqual(min(pvals), 1)
def test_z_to_p_val(self):
np.testing.assert_almost_equal(z_to_p_val(0), 0.5)
np.testing.assert_almost_equal(z_to_p_val(1.96), 0.97500210485177952)
np.testing.assert_almost_equal(z_to_p_val(-1.96), 0.024997895148220428)
self.assertLessEqual(z_to_p_val(-0.1), z_to_p_val(0))
self.assertLessEqual(z_to_p_val(0), z_to_p_val(0.1))
self.assertLessEqual(z_to_p_val(0.1), z_to_p_val(0.2))
|
__docformat__ = "restructuredtext en"
import os
import errno
import signal
import sys
import time
import warnings
from six.moves import range
def setugid(user):
"""Change process user and group ID
Argument is a numeric user id or a user name"""
try:
from pwd import getpwuid
passwd = getpwuid(int(user))
except ValueError:
from pwd import getpwnam
passwd = getpwnam(user)
if hasattr(os, 'initgroups'): # python >= 2.7
os.initgroups(passwd.pw_name, passwd.pw_gid)
else:
import ctypes
if ctypes.CDLL(None).initgroups(passwd.pw_name, passwd.pw_gid) < 0:
err = ctypes.c_int.in_dll(ctypes.pythonapi,"errno").value
raise OSError(err, os.strerror(err), 'initgroups')
os.setgid(passwd.pw_gid)
os.setuid(passwd.pw_uid)
os.environ['HOME'] = passwd.pw_dir
def daemonize(pidfile=None, uid=None, umask=0o77):
"""daemonize a Unix process. Set paranoid umask by default.
Return 1 in the original process, 2 in the first fork, and None for the
second fork (eg daemon process).
"""
# http://www.faqs.org/faqs/unix-faq/programmer/faq/
#
# fork so the parent can exit
if os.fork(): # launch child and...
return 1
# disconnect from tty and create a new session
os.setsid()
# fork again so the parent, (the session group leader), can exit.
# as a non-session group leader, we can never regain a controlling
# terminal.
if os.fork(): # launch child again.
return 2
# move to the root to avoit mount pb
os.chdir('/')
# redirect standard descriptors
null = os.open('/dev/null', os.O_RDWR)
for i in range(3):
try:
os.dup2(null, i)
except OSError as e:
if e.errno != errno.EBADF:
raise
os.close(null)
# filter warnings
warnings.filterwarnings('ignore')
# write pid in a file
if pidfile:
# ensure the directory where the pid-file should be set exists (for
# instance /var/run/cubicweb may be deleted on computer restart)
piddir = os.path.dirname(pidfile)
if not os.path.exists(piddir):
os.makedirs(piddir)
f = file(pidfile, 'w')
f.write(str(os.getpid()))
f.close()
# set umask if specified
if umask is not None:
os.umask(umask)
# change process uid
if uid:
setugid(uid)
return None
|
import csv
from datetime import datetime, timedelta
import gzip
import json
import logging
import os
from aiohttp.hdrs import USER_AGENT
import pytz
import requests
import voluptuous as vol
from homeassistant.const import (
AREA_SQUARE_METERS,
ATTR_ATTRIBUTION,
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_MONITORED_CONDITIONS,
CONF_NAME,
DEGREE,
LENGTH_METERS,
PERCENTAGE,
PRESSURE_HPA,
SPEED_KILOMETERS_PER_HOUR,
TEMP_CELSIUS,
__version__,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
ATTR_STATION = "station"
ATTR_UPDATED = "updated"
ATTRIBUTION = "Data provided by ZAMG"
CONF_STATION_ID = "station_id"
DEFAULT_NAME = "zamg"
MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=10)
SENSOR_TYPES = {
"pressure": ("Pressure", PRESSURE_HPA, "LDstat hPa", float),
"pressure_sealevel": ("Pressure at Sea Level", PRESSURE_HPA, "LDred hPa", float),
"humidity": ("Humidity", PERCENTAGE, "RF %", int),
"wind_speed": (
"Wind Speed",
SPEED_KILOMETERS_PER_HOUR,
f"WG {SPEED_KILOMETERS_PER_HOUR}",
float,
),
"wind_bearing": ("Wind Bearing", DEGREE, f"WR {DEGREE}", int),
"wind_max_speed": (
"Top Wind Speed",
SPEED_KILOMETERS_PER_HOUR,
f"WSG {SPEED_KILOMETERS_PER_HOUR}",
float,
),
"wind_max_bearing": ("Top Wind Bearing", DEGREE, f"WSR {DEGREE}", int),
"sun_last_hour": ("Sun Last Hour", PERCENTAGE, f"SO {PERCENTAGE}", int),
"temperature": ("Temperature", TEMP_CELSIUS, f"T {TEMP_CELSIUS}", float),
"precipitation": (
"Precipitation",
f"l/{AREA_SQUARE_METERS}",
f"N l/{AREA_SQUARE_METERS}",
float,
),
"dewpoint": ("Dew Point", TEMP_CELSIUS, f"TP {TEMP_CELSIUS}", float),
# The following probably not useful for general consumption,
# but we need them to fill in internal attributes
"station_name": ("Station Name", None, "Name", str),
"station_elevation": (
"Station Elevation",
LENGTH_METERS,
f"Höhe {LENGTH_METERS}",
int,
),
"update_date": ("Update Date", None, "Datum", str),
"update_time": ("Update Time", None, "Zeit", str),
}
PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_MONITORED_CONDITIONS, default=["temperature"]): vol.All(
cv.ensure_list, [vol.In(SENSOR_TYPES)]
),
vol.Optional(CONF_STATION_ID): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Inclusive(
CONF_LATITUDE, "coordinates", "Latitude and longitude must exist together"
): cv.latitude,
vol.Inclusive(
CONF_LONGITUDE, "coordinates", "Latitude and longitude must exist together"
): cv.longitude,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the ZAMG sensor platform."""
name = config.get(CONF_NAME)
latitude = config.get(CONF_LATITUDE, hass.config.latitude)
longitude = config.get(CONF_LONGITUDE, hass.config.longitude)
station_id = config.get(CONF_STATION_ID) or closest_station(
latitude, longitude, hass.config.config_dir
)
if station_id not in zamg_stations(hass.config.config_dir):
_LOGGER.error(
"Configured ZAMG %s (%s) is not a known station",
CONF_STATION_ID,
station_id,
)
return False
probe = ZamgData(station_id=station_id)
try:
probe.update()
except (ValueError, TypeError) as err:
_LOGGER.error("Received error from ZAMG: %s", err)
return False
add_entities(
[
ZamgSensor(probe, variable, name)
for variable in config[CONF_MONITORED_CONDITIONS]
],
True,
)
class ZamgSensor(Entity):
"""Implementation of a ZAMG sensor."""
def __init__(self, probe, variable, name):
"""Initialize the sensor."""
self.probe = probe
self.client_name = name
self.variable = variable
@property
def name(self):
"""Return the name of the sensor."""
return f"{self.client_name} {self.variable}"
@property
def state(self):
"""Return the state of the sensor."""
return self.probe.get_data(self.variable)
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return SENSOR_TYPES[self.variable][1]
@property
def device_state_attributes(self):
"""Return the state attributes."""
return {
ATTR_ATTRIBUTION: ATTRIBUTION,
ATTR_STATION: self.probe.get_data("station_name"),
ATTR_UPDATED: self.probe.last_update.isoformat(),
}
def update(self):
"""Delegate update to probe."""
self.probe.update()
class ZamgData:
"""The class for handling the data retrieval."""
API_URL = "http://www.zamg.ac.at/ogd/"
API_HEADERS = {USER_AGENT: f"home-assistant.zamg/ {__version__}"}
def __init__(self, station_id):
"""Initialize the probe."""
self._station_id = station_id
self.data = {}
@property
def last_update(self):
"""Return the timestamp of the most recent data."""
date, time = self.data.get("update_date"), self.data.get("update_time")
if date is not None and time is not None:
return datetime.strptime(date + time, "%d-%m-%Y%H:%M").replace(
tzinfo=pytz.timezone("Europe/Vienna")
)
@classmethod
def current_observations(cls):
"""Fetch the latest CSV data."""
try:
response = requests.get(cls.API_URL, headers=cls.API_HEADERS, timeout=15)
response.raise_for_status()
response.encoding = "UTF8"
return csv.DictReader(
response.text.splitlines(), delimiter=";", quotechar='"'
)
except requests.exceptions.HTTPError:
_LOGGER.error("While fetching data")
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the latest data from ZAMG."""
if self.last_update and (
self.last_update + timedelta(hours=1)
> datetime.utcnow().replace(tzinfo=pytz.utc)
):
return # Not time to update yet; data is only hourly
for row in self.current_observations():
if row.get("Station") == self._station_id:
api_fields = {
col_heading: (standard_name, dtype)
for standard_name, (
_,
_,
col_heading,
dtype,
) in SENSOR_TYPES.items()
}
self.data = {
api_fields.get(col_heading)[0]: api_fields.get(col_heading)[1](
v.replace(",", ".")
)
for col_heading, v in row.items()
if col_heading in api_fields and v
}
break
else:
raise ValueError(f"No weather data for station {self._station_id}")
def get_data(self, variable):
"""Get the data."""
return self.data.get(variable)
def _get_zamg_stations():
"""Return {CONF_STATION: (lat, lon)} for all stations, for auto-config."""
capital_stations = {r["Station"] for r in ZamgData.current_observations()}
req = requests.get(
"https://www.zamg.ac.at/cms/en/documents/climate/"
"doc_metnetwork/zamg-observation-points",
timeout=15,
)
stations = {}
for row in csv.DictReader(req.text.splitlines(), delimiter=";", quotechar='"'):
if row.get("synnr") in capital_stations:
try:
stations[row["synnr"]] = tuple(
float(row[coord].replace(",", "."))
for coord in ["breite_dezi", "länge_dezi"]
)
except KeyError:
_LOGGER.error("ZAMG schema changed again, cannot autodetect station")
return stations
def zamg_stations(cache_dir):
"""Return {CONF_STATION: (lat, lon)} for all stations, for auto-config.
Results from internet requests are cached as compressed json, making
subsequent calls very much faster.
"""
cache_file = os.path.join(cache_dir, ".zamg-stations.json.gz")
if not os.path.isfile(cache_file):
stations = _get_zamg_stations()
with gzip.open(cache_file, "wt") as cache:
json.dump(stations, cache, sort_keys=True)
return stations
with gzip.open(cache_file, "rt") as cache:
return {k: tuple(v) for k, v in json.load(cache).items()}
def closest_station(lat, lon, cache_dir):
"""Return the ZONE_ID.WMO_ID of the closest station to our lat/lon."""
if lat is None or lon is None or not os.path.isdir(cache_dir):
return
stations = zamg_stations(cache_dir)
def comparable_dist(zamg_id):
"""Calculate the pseudo-distance from lat/lon."""
station_lat, station_lon = stations[zamg_id]
return (lat - station_lat) ** 2 + (lon - station_lon) ** 2
return min(stations, key=comparable_dist)
|
import os.path
import re
import textwrap
from coverage import env
from coverage.phystokens import source_token_lines, source_encoding
from coverage.phystokens import neuter_encoding_declaration, compile_unicode
from coverage.python import get_python_source
from tests.coveragetest import CoverageTest, TESTS_DIR
# A simple program and its token stream.
SIMPLE = u"""\
# yay!
def foo():
say('two = %d' % 2)
"""
SIMPLE_TOKENS = [
[('com', "# yay!")],
[('key', 'def'), ('ws', ' '), ('nam', 'foo'), ('op', '('), ('op', ')'), ('op', ':')],
[('ws', ' '), ('nam', 'say'), ('op', '('),
('str', "'two = %d'"), ('ws', ' '), ('op', '%'),
('ws', ' '), ('num', '2'), ('op', ')')],
]
# Mixed-whitespace program, and its token stream.
MIXED_WS = u"""\
def hello():
a="Hello world!"
\tb="indented"
"""
MIXED_WS_TOKENS = [
[('key', 'def'), ('ws', ' '), ('nam', 'hello'), ('op', '('), ('op', ')'), ('op', ':')],
[('ws', ' '), ('nam', 'a'), ('op', '='), ('str', '"Hello world!"')],
[('ws', ' '), ('nam', 'b'), ('op', '='), ('str', '"indented"')],
]
# https://github.com/nedbat/coveragepy/issues/822
BUG_822 = u"""\
print( "Message 1" )
array = [ 1,2,3,4, # 4 numbers \\
5,6,7 ] # 3 numbers
print( "Message 2" )
"""
class PhysTokensTest(CoverageTest):
"""Tests for coverage.py's improved tokenizer."""
run_in_temp_dir = False
def check_tokenization(self, source):
"""Tokenize `source`, then put it back together, should be the same."""
tokenized = ""
for line in source_token_lines(source):
text = "".join(t for _, t in line)
tokenized += text + "\n"
# source_token_lines doesn't preserve trailing spaces, so trim all that
# before comparing.
source = source.replace('\r\n', '\n')
source = re.sub(r"(?m)[ \t]+$", "", source)
tokenized = re.sub(r"(?m)[ \t]+$", "", tokenized)
self.assertMultiLineEqual(source, tokenized)
def check_file_tokenization(self, fname):
"""Use the contents of `fname` for `check_tokenization`."""
self.check_tokenization(get_python_source(fname))
def test_simple(self):
self.assertEqual(list(source_token_lines(SIMPLE)), SIMPLE_TOKENS)
self.check_tokenization(SIMPLE)
def test_missing_final_newline(self):
# We can tokenize source that is missing the final newline.
self.assertEqual(list(source_token_lines(SIMPLE.rstrip())), SIMPLE_TOKENS)
def test_tab_indentation(self):
# Mixed tabs and spaces...
self.assertEqual(list(source_token_lines(MIXED_WS)), MIXED_WS_TOKENS)
def test_bug_822(self):
self.check_tokenization(BUG_822)
def test_tokenize_real_file(self):
# Check the tokenization of a real file (large, btw).
real_file = os.path.join(TESTS_DIR, "test_coverage.py")
self.check_file_tokenization(real_file)
def test_stress(self):
# Check the tokenization of a stress-test file.
stress = os.path.join(TESTS_DIR, "stress_phystoken.tok")
self.check_file_tokenization(stress)
stress = os.path.join(TESTS_DIR, "stress_phystoken_dos.tok")
self.check_file_tokenization(stress)
# The default encoding is different in Python 2 and Python 3.
if env.PY3:
DEF_ENCODING = "utf-8"
else:
DEF_ENCODING = "ascii"
ENCODING_DECLARATION_SOURCES = [
# Various forms from http://www.python.org/dev/peps/pep-0263/
(1, b"# coding=cp850\n\n", "cp850"),
(1, b"# coding=latin-1\n", "iso-8859-1"),
(1, b"# coding=iso-latin-1\n", "iso-8859-1"),
(1, b"#!/usr/bin/python\n# -*- coding: cp850 -*-\n", "cp850"),
(1, b"#!/usr/bin/python\n# vim: set fileencoding=cp850:\n", "cp850"),
(1, b"# This Python file uses this encoding: cp850\n", "cp850"),
(1, b"# This file uses a different encoding:\n# coding: cp850\n", "cp850"),
(1, b"\n# coding=cp850\n\n", "cp850"),
(2, b"# -*- coding:cp850 -*-\n# vim: fileencoding=cp850\n", "cp850"),
]
class SourceEncodingTest(CoverageTest):
"""Tests of source_encoding() for detecting encodings."""
run_in_temp_dir = False
def test_detect_source_encoding(self):
for _, source, expected in ENCODING_DECLARATION_SOURCES:
self.assertEqual(
source_encoding(source),
expected,
"Wrong encoding in %r" % source
)
def test_detect_source_encoding_not_in_comment(self):
if env.PYPY3: # pragma: no metacov
# PyPy3 gets this case wrong. Not sure what I can do about it,
# so skip the test.
self.skipTest("PyPy3 is wrong about non-comment encoding. Skip it.")
# Should not detect anything here
source = b'def parse(src, encoding=None):\n pass'
self.assertEqual(source_encoding(source), DEF_ENCODING)
def test_dont_detect_source_encoding_on_third_line(self):
# A coding declaration doesn't count on the third line.
source = b"\n\n# coding=cp850\n\n"
self.assertEqual(source_encoding(source), DEF_ENCODING)
def test_detect_source_encoding_of_empty_file(self):
# An important edge case.
self.assertEqual(source_encoding(b""), DEF_ENCODING)
def test_bom(self):
# A BOM means utf-8.
source = b"\xEF\xBB\xBFtext = 'hello'\n"
self.assertEqual(source_encoding(source), 'utf-8-sig')
def test_bom_with_encoding(self):
source = b"\xEF\xBB\xBF# coding: utf-8\ntext = 'hello'\n"
self.assertEqual(source_encoding(source), 'utf-8-sig')
def test_bom_is_wrong(self):
# A BOM with an explicit non-utf8 encoding is an error.
source = b"\xEF\xBB\xBF# coding: cp850\n"
with self.assertRaisesRegex(SyntaxError, "encoding problem: utf-8"):
source_encoding(source)
def test_unknown_encoding(self):
source = b"# coding: klingon\n"
with self.assertRaisesRegex(SyntaxError, "unknown encoding: klingon"):
source_encoding(source)
class NeuterEncodingDeclarationTest(CoverageTest):
"""Tests of phystokens.neuter_encoding_declaration()."""
run_in_temp_dir = False
def test_neuter_encoding_declaration(self):
for lines_diff_expected, source, _ in ENCODING_DECLARATION_SOURCES:
neutered = neuter_encoding_declaration(source.decode("ascii"))
neutered = neutered.encode("ascii")
# The neutered source should have the same number of lines.
source_lines = source.splitlines()
neutered_lines = neutered.splitlines()
self.assertEqual(len(source_lines), len(neutered_lines))
# Only one of the lines should be different.
lines_different = sum(
int(nline != sline) for nline, sline in zip(neutered_lines, source_lines)
)
self.assertEqual(lines_diff_expected, lines_different)
# The neutered source will be detected as having no encoding
# declaration.
self.assertEqual(
source_encoding(neutered),
DEF_ENCODING,
"Wrong encoding in %r" % neutered
)
def test_two_encoding_declarations(self):
input_src = textwrap.dedent(u"""\
# -*- coding: ascii -*-
# -*- coding: utf-8 -*-
# -*- coding: utf-16 -*-
""")
expected_src = textwrap.dedent(u"""\
# (deleted declaration) -*-
# (deleted declaration) -*-
# -*- coding: utf-16 -*-
""")
output_src = neuter_encoding_declaration(input_src)
self.assertEqual(expected_src, output_src)
def test_one_encoding_declaration(self):
input_src = textwrap.dedent(u"""\
# -*- coding: utf-16 -*-
# Just a comment.
# -*- coding: ascii -*-
""")
expected_src = textwrap.dedent(u"""\
# (deleted declaration) -*-
# Just a comment.
# -*- coding: ascii -*-
""")
output_src = neuter_encoding_declaration(input_src)
self.assertEqual(expected_src, output_src)
class Bug529Test(CoverageTest):
"""Test of bug 529"""
def test_bug_529(self):
# Don't over-neuter coding declarations. This happened with a test
# file which contained code in multi-line strings, all with coding
# declarations. The neutering of the file also changed the multi-line
# strings, which it shouldn't have.
self.make_file("the_test.py", '''\
# -*- coding: utf-8 -*-
import unittest
class Bug529Test(unittest.TestCase):
def test_two_strings_are_equal(self):
src1 = u"""\\
# -*- coding: utf-8 -*-
# Just a comment.
"""
src2 = u"""\\
# -*- coding: utf-8 -*-
# Just a comment.
"""
self.assertEqual(src1, src2)
if __name__ == "__main__":
unittest.main()
''')
status, out = self.run_command_status("coverage run the_test.py")
self.assertEqual(status, 0)
self.assertIn("OK", out)
# If this test fails, the output will be super-confusing, because it
# has a failing unit test contained within the failing unit test.
class CompileUnicodeTest(CoverageTest):
"""Tests of compiling Unicode strings."""
run_in_temp_dir = False
def assert_compile_unicode(self, source):
"""Assert that `source` will compile properly with `compile_unicode`."""
source += u"a = 42\n"
# This doesn't raise an exception:
code = compile_unicode(source, "<string>", "exec")
globs = {}
exec(code, globs)
self.assertEqual(globs['a'], 42)
def test_cp1252(self):
uni = u"""# coding: cp1252\n# \u201C curly \u201D\n"""
self.assert_compile_unicode(uni)
def test_double_coding_declaration(self):
# Build this string in a weird way so that actual vim's won't try to
# interpret it...
uni = u"# -*- coding:utf-8 -*-\n# v" + "im: fileencoding=utf-8\n"
self.assert_compile_unicode(uni)
|
import os
from functools import reduce
from uuid import uuid4
from pygal._compat import is_list_like
from pygal.adapters import decimal_to_float, not_zero, positive
from pygal.config import Config, SerieConfig
from pygal.serie import Serie
from pygal.state import State
from pygal.svg import Svg
from pygal.util import compose, ident
from pygal.view import Box, Margin
class BaseGraph(object):
"""Chart internal behaviour related functions"""
_adapters = []
def __init__(self, config=None, **kwargs):
"""Config preparation and various initialization"""
if config:
if isinstance(config, type):
config = config()
else:
config = config.copy()
else:
config = Config()
config(**kwargs)
self.config = config
self.state = None
self.uuid = str(uuid4())
self.raw_series = []
self.xml_filters = []
def __setattr__(self, name, value):
"""Set an attribute on the class or in the state if there is one"""
if name.startswith('__') or getattr(self, 'state', None) is None:
super(BaseGraph, self).__setattr__(name, value)
else:
setattr(self.state, name, value)
def __getattribute__(self, name):
"""Get an attribute from the class or from the state if there is one"""
if name.startswith('__') or name == 'state' or getattr(
self, 'state',
None) is None or name not in self.state.__dict__:
return super(BaseGraph, self).__getattribute__(name)
return getattr(self.state, name)
def prepare_values(self, raw, offset=0):
"""Prepare the values to start with sane values"""
from pygal import Histogram
from pygal.graph.map import BaseMap
if self.zero == 0 and isinstance(self, BaseMap):
self.zero = 1
if self.x_label_rotation:
self.x_label_rotation %= 360
if self.y_label_rotation:
self.y_label_rotation %= 360
for key in ('x_labels', 'y_labels'):
if getattr(self, key):
setattr(self, key, list(getattr(self, key)))
if not raw:
return
adapters = list(self._adapters) or [lambda x: x]
if self.logarithmic:
for fun in not_zero, positive:
if fun in adapters:
adapters.remove(fun)
adapters = adapters + [positive, not_zero]
adapters = adapters + [decimal_to_float]
self._adapt = reduce(compose, adapters) if not self.strict else ident
self._x_adapt = reduce(
compose, self._x_adapters
) if not self.strict and getattr(self, '_x_adapters', None) else ident
series = []
raw = [(
list(raw_values) if not isinstance(raw_values, dict) else
raw_values, serie_config_kwargs
) for raw_values, serie_config_kwargs in raw]
width = max([len(values)
for values, _ in raw] + [len(self.x_labels or [])])
for raw_values, serie_config_kwargs in raw:
metadata = {}
values = []
if isinstance(raw_values, dict):
if isinstance(self, BaseMap):
raw_values = list(raw_values.items())
else:
value_list = [None] * width
for k, v in raw_values.items():
if k in (self.x_labels or []):
value_list[self.x_labels.index(k)] = v
raw_values = value_list
for index, raw_value in enumerate(raw_values + (
(width - len(raw_values)) * [None] # aligning values
if len(raw_values) < width else [])):
if isinstance(raw_value, dict):
raw_value = dict(raw_value)
value = raw_value.pop('value', None)
metadata[index] = raw_value
else:
value = raw_value
# Fix this by doing this in charts class methods
if isinstance(self, Histogram):
if value is None:
value = (None, None, None)
elif not is_list_like(value):
value = (value, self.zero, self.zero)
elif len(value) == 2:
value = (1, value[0], value[1])
value = list(map(self._adapt, value))
elif self._dual:
if value is None:
value = (None, None)
elif not is_list_like(value):
value = (value, self.zero)
if self._x_adapt:
value = (
self._x_adapt(value[0]), self._adapt(value[1])
)
if isinstance(self, BaseMap):
value = (self._adapt(value[0]), value[1])
else:
value = list(map(self._adapt, value))
else:
value = self._adapt(value)
values.append(value)
serie_config = SerieConfig()
serie_config(
**dict((k, v) for k, v in self.state.__dict__.items()
if k in dir(serie_config))
)
serie_config(**serie_config_kwargs)
series.append(
Serie(offset + len(series), values, serie_config, metadata)
)
return series
def setup(self, **kwargs):
"""Set up the transient state prior rendering"""
# Keep labels in case of map
if getattr(self, 'x_labels', None) is not None:
self.x_labels = list(self.x_labels)
if getattr(self, 'y_labels', None) is not None:
self.y_labels = list(self.y_labels)
self.state = State(self, **kwargs)
if isinstance(self.style, type):
self.style = self.style()
self.series = self.prepare_values([
rs for rs in self.raw_series if not rs[1].get('secondary')
]) or []
self.secondary_series = self.prepare_values([
rs for rs in self.raw_series if rs[1].get('secondary')
], len(self.series)) or []
self.horizontal = getattr(self, 'horizontal', False)
self.svg = Svg(self)
self._x_labels = None
self._y_labels = None
self._x_2nd_labels = None
self._y_2nd_labels = None
self.nodes = {}
self.margin_box = Margin(
self.margin_top or self.margin, self.margin_right or self.margin,
self.margin_bottom or self.margin, self.margin_left or self.margin
)
self._box = Box()
self.view = None
if self.logarithmic and self.zero == 0:
# Explicit min to avoid interpolation dependency
positive_values = list(
filter(
lambda x: x > 0, [
val[1] or 1 if self._dual else val
for serie in self.series for val in serie.safe_values
]
)
)
self.zero = min(positive_values or (1, )) or 1
if self._len < 3:
self.interpolate = None
self._draw()
self.svg.pre_render()
def teardown(self):
"""Remove the transient state after rendering"""
if os.getenv('PYGAL_KEEP_STATE'):
return
del self.state
self.state = None
def _repr_svg_(self):
"""Display svg in IPython notebook"""
return self.render(disable_xml_declaration=True)
def _repr_png_(self):
"""Display png in IPython notebook"""
return self.render_to_png()
|
import os
import unittest
import mock
from docker_registry.lib import config
fakeenv = {}
def mockget(key, opt=None):
if key in fakeenv:
print('%s key is %s' % (key, fakeenv[key]))
return fakeenv[key]
return opt
@mock.patch('os.environ.get', mockget)
class TestConfig(unittest.TestCase):
def setUp(self):
p = os.path.join(
os.path.dirname(__file__), 'fixtures', 'test_config.yaml')
self.c = config.Config(open(p, 'rb').read())
def test__init__parse_error(self):
self.assertRaises(config.exceptions.ConfigError, config.Config, '\1')
def test__init__no_arg(self):
self.c = config.Config()
assert self.c['whatevertheflush'] is None
assert self.c.whatevertheflush is None
@mock.patch('__builtin__.repr')
def test__repr(self, r):
self.c.__repr__()
r.assert_called_once_with(self.c._config)
def test__methods__(self):
self.assertEqual(self.c.__methods__, [])
def test__members__(self):
self.assertEqual(type(self.c.__members__), list)
self.assertEqual(self.c.__members__, self.c.keys())
self.assertEqual(self.c.__members__, self.c.__dir__())
def test_accessors(self):
assert self.c.booltrue == self.c['booltrue']
assert self.c.dict.one == self.c.dict['one']
assert self.c.dict.one == self.c['dict']['one']
def test_key_existence(self):
assert 'boolfalse' in self.c
assert 'whatevertheflush' not in self.c
def test_non_existent_access(self):
assert self.c['whatevertheflush'] is None
assert self.c.whatevertheflush is None
def test_simple_types(self):
conf = self.c
assert conf.booltrue is True
assert not conf.booltrue == 'True'
assert conf.boolfalse is False
assert not conf.booltrue == 'False'
assert conf.uint == 10
assert not conf.uint == '10'
assert conf.int == -10
assert not conf.int == '-10'
assert conf.float == 0.01
assert not conf.float == '0.01'
assert conf.emptystring == ''
assert conf.emptystring is not None
assert conf.isnone is None
assert conf.nonemptystring == 'nonemptystring'
assert conf.anothernonemptystring == 'nonemptystring'
assert conf.yetanothernonemptystring == 'nonemptystring'
assert conf.array[2] == 'three'
assert len(conf.array) == 3
assert conf.dict.two == 'valuetwo'
assert isinstance(conf.dict, config.Config)
def test_env_defaults(self):
global fakeenv
fakeenv = {}
conf = self.c.ENV
assert conf.booltrue is True
assert conf.boolfalse is False
assert conf.uint == 10
assert conf.int == -10
assert conf.float == 0.01
assert conf.emptystring == ''
assert conf.emptystring is not None
assert conf.isnone is None
assert conf.nonemptystring == 'nonemptystring'
assert conf.anothernonemptystring == 'nonemptystring'
assert conf.yetanothernonemptystring == 'nonemptystring'
assert conf.bugger == 'bug:me:endlessly'
assert conf.array[2] == 'three'
assert len(conf.array) == 3
assert conf.dict is None
def test_env_overrides(self):
global fakeenv
fakeenv['BOOLTRUE'] = 'False'
fakeenv['BOOLFALSE'] = 'True'
fakeenv['UINT'] = '0'
fakeenv['INT'] = '0'
fakeenv['FLOAT'] = '0'
fakeenv['EMPTYSTRING'] = 'NOTREALLY'
fakeenv['ISNONE'] = 'False'
fakeenv['NONEMPTYSTRING'] = '""'
fakeenv['BUGGER'] = '"whatever:the:flush:"'
fakeenv['ARRAY'] = '[one, again]'
fakeenv['DICT'] = '{"one": "oneagain", "two": "twoagain"}'
conf = self.c.ENV
assert conf.booltrue is False
assert conf.boolfalse is True
assert conf.uint == 0
assert conf.int == 0
assert conf.float == 0
assert conf.emptystring == 'NOTREALLY'
assert conf.isnone is False
assert conf.isnone is not None
assert conf.nonemptystring == ''
assert conf.anothernonemptystring == 'nonemptystring'
assert conf.yetanothernonemptystring == 'nonemptystring'
assert conf.bugger == 'whatever:the:flush:'
assert conf.array[1] == 'again'
assert len(conf.array) == 2
fakeenv['ISNONE'] = ''
assert conf.isnone is None
assert isinstance(conf.dict, config.Config)
assert conf.dict.one == 'oneagain'
def test_write(self):
conf = self.c
assert conf.something == 'else'
conf.something = 'or'
assert conf.something == 'or'
conf.something = None
assert conf.something is None
def test_unicode(self):
assert self.c.uni == u'ß∞'
class TestLoad(unittest.TestCase):
def setUp(self):
self._config = config._config
def tearDown(self):
config._config = self._config
@mock.patch.object(config.os.environ, 'get')
def test_config_path_exception(self, get):
config._config = None
self.assertRaises(config.exceptions.FileNotFoundError, config.load)
|
from datetime import datetime
import json
import logging
from sqlalchemy import (
Boolean,
Column,
DateTime,
ForeignKey,
Index,
Integer,
String,
Text,
distinct,
)
from sqlalchemy.ext.declarative import declarative_base
from homeassistant.core import Event, EventOrigin, State, split_entity_id
from homeassistant.helpers.json import JSONEncoder
import homeassistant.util.dt as dt_util
# SQLAlchemy Schema
# pylint: disable=invalid-name
Base = declarative_base()
_LOGGER = logging.getLogger(__name__)
class Events(Base): # type: ignore
"""Event history data."""
__tablename__ = "events"
event_id = Column(Integer, primary_key=True)
event_type = Column(String(32), index=True)
event_data = Column(Text)
origin = Column(String(32))
time_fired = Column(DateTime(timezone=True))
created = Column(DateTime(timezone=True), default=datetime.utcnow)
@staticmethod
def from_event(event):
"""Create an event database object from a native event."""
return Events(
event_type=event.event_type,
event_data=json.dumps(event.data, cls=JSONEncoder),
origin=str(event.origin),
time_fired=event.time_fired,
)
def to_native(self):
"""Convert to a natve HA Event."""
try:
return Event(
self.event_type,
json.loads(self.event_data),
EventOrigin(self.origin),
_process_timestamp(self.time_fired),
)
except ValueError:
# When json.loads fails
_LOGGER.exception("Error converting to event: %s", self)
return None
class States(Base): # type: ignore
"""State change history."""
__tablename__ = "states"
state_id = Column(Integer, primary_key=True)
domain = Column(String(64))
entity_id = Column(String(255))
state = Column(String(255))
attributes = Column(Text)
event_id = Column(Integer, ForeignKey("events.event_id"))
last_changed = Column(DateTime(timezone=True), default=datetime.utcnow)
last_updated = Column(DateTime(timezone=True), default=datetime.utcnow)
created = Column(DateTime(timezone=True), default=datetime.utcnow)
__table_args__ = (
Index("states__state_changes", "last_changed", "last_updated", "entity_id"),
Index("states__significant_changes", "domain", "last_updated", "entity_id"),
)
@staticmethod
def from_event(event):
"""Create object from a state_changed event."""
entity_id = event.data["entity_id"]
state = event.data.get("new_state")
dbstate = States(entity_id=entity_id)
# State got deleted
if state is None:
dbstate.state = ""
dbstate.domain = split_entity_id(entity_id)[0]
dbstate.attributes = "{}"
dbstate.last_changed = event.time_fired
dbstate.last_updated = event.time_fired
else:
dbstate.domain = state.domain
dbstate.state = state.state
dbstate.attributes = json.dumps(dict(state.attributes), cls=JSONEncoder)
dbstate.last_changed = state.last_changed
dbstate.last_updated = state.last_updated
return dbstate
def to_native(self):
"""Convert to an HA state object."""
try:
return State(
self.entity_id,
self.state,
json.loads(self.attributes),
_process_timestamp(self.last_changed),
_process_timestamp(self.last_updated),
)
except ValueError:
# When json.loads fails
_LOGGER.exception("Error converting row to state: %s", self)
return None
class RecorderRuns(Base): # type: ignore
"""Representation of recorder run."""
__tablename__ = "recorder_runs"
run_id = Column(Integer, primary_key=True)
start = Column(DateTime(timezone=True), default=datetime.utcnow)
end = Column(DateTime(timezone=True))
closed_incorrect = Column(Boolean, default=False)
created = Column(DateTime(timezone=True), default=datetime.utcnow)
def entity_ids(self, point_in_time=None):
"""Return the entity ids that existed in this run.
Specify point_in_time if you want to know which existed at that point
in time inside the run.
"""
from sqlalchemy.orm.session import Session
session = Session.object_session(self)
assert session is not None, "RecorderRuns need to be persisted"
query = session.query(distinct(States.entity_id)).filter(
States.last_updated >= self.start
)
if point_in_time is not None:
query = query.filter(States.last_updated < point_in_time)
elif self.end is not None:
query = query.filter(States.last_updated < self.end)
return [row[0] for row in query]
def to_native(self):
"""Return self, native format is this model."""
return self
def _process_timestamp(ts):
"""Process a timestamp into datetime object."""
if ts is None:
return None
if ts.tzinfo is None:
return dt_util.UTC.localize(ts)
return dt_util.as_utc(ts)
|
import urwid
class PopUpDialog(urwid.WidgetWrap):
"""A dialog that appears with nothing but a close button """
signals = ['close']
def __init__(self):
close_button = urwid.Button("that's pretty cool")
urwid.connect_signal(close_button, 'click',
lambda button:self._emit("close"))
pile = urwid.Pile([urwid.Text(
"^^ I'm attached to the widget that opened me. "
"Try resizing the window!\n"), close_button])
fill = urwid.Filler(pile)
self.__super.__init__(urwid.AttrWrap(fill, 'popbg'))
class ThingWithAPopUp(urwid.PopUpLauncher):
def __init__(self):
self.__super.__init__(urwid.Button("click-me"))
urwid.connect_signal(self.original_widget, 'click',
lambda button: self.open_pop_up())
def create_pop_up(self):
pop_up = PopUpDialog()
urwid.connect_signal(pop_up, 'close',
lambda button: self.close_pop_up())
return pop_up
def get_pop_up_parameters(self):
return {'left':0, 'top':1, 'overlay_width':32, 'overlay_height':7}
fill = urwid.Filler(urwid.Padding(ThingWithAPopUp(), 'center', 15))
loop = urwid.MainLoop(
fill,
[('popbg', 'white', 'dark blue')],
pop_ups=True)
loop.run()
|
import os
import unittest
import mock
from Tests.utils.utils import get_test_path
from kalliope.core.Cortex import Cortex
from kalliope.core.Models import Singleton
from kalliope.core.Models.settings.Tts import Tts
from kalliope import SettingLoader
from kalliope.core.NeuronModule import NeuronModule, TemplateFileNotFoundException, TTSModuleNotFound
class TestNeuronModule(unittest.TestCase):
def setUp(self):
# kill singleton
Singleton._instances = dict()
self.expected_result = "hello, this is a replaced word"
# this allow us to run the test from an IDE and from the root with python -m unittest tests.TestNeuronModule
self.file_template = get_test_path("templates/template_test.j2")
self.file_template_contains_kalliope_memory = get_test_path("templates/template_test_with_kalliope_memory.j2")
self.say_template = "hello, this is a {{ test }}"
self.message = {
"test": "replaced word"
}
self.neuron_module_test = NeuronModule()
self.file_settings = get_test_path("settings/settings_test.yml")
self.settings = SettingLoader(file_path=self.file_settings).settings
def tearDown(self):
del self.neuron_module_test
def test_get_audio_from_stt(self):
"""
Test the OrderListener thread is started
"""
with mock.patch("kalliope.core.OrderListener.start") as mock_orderListener_start:
with mock.patch("kalliope.core.OrderListener.join"):
def callback():
pass
self.neuron_module_test.get_audio_from_stt(callback=callback())
mock_orderListener_start.assert_called_once_with()
mock_orderListener_start.reset_mock()
def test_get_tts_object(self):
# no TTS name provided. should return the default tts
expected_tts = Tts(name="pico2wave", parameters={"language": "fr-FR", "cache": True})
self.assertEqual(NeuronModule._get_tts_object(settings=self.settings), expected_tts)
# TTS provided, only cache parameter updated
expected_tts = Tts(name="pico2wave", parameters={"language": "fr-FR", "cache": False})
self.assertEqual(NeuronModule._get_tts_object(tts_name="pico2wave",
override_parameter={"cache": False},
settings=self.settings), expected_tts)
# TTS provided, all parameters updated
expected_tts = Tts(name="pico2wave", parameters={"language": "es-ES", "cache": False})
self.assertEqual(NeuronModule._get_tts_object(tts_name="pico2wave",
override_parameter={"language": "es-ES", "cache": False},
settings=self.settings), expected_tts)
# TTS not existing in settings
with self.assertRaises(TTSModuleNotFound):
NeuronModule._get_tts_object(tts_name="no_existing_tts",
override_parameter={"cache": False},
settings=self.settings)
def get_message_from_dict(self):
# TODO not working in pycharm
with mock.patch.object(NeuronModule, 'say', return_value=None) as mock_method:
self.neuron_module_test.say_template = self.say_template
self.assertEqual(self.neuron_module_test._get_message_from_dict(self.message), self.expected_result)
del self.neuron_module_test
self.neuron_module_test = NeuronModule()
# test with file_template
self.neuron_module_test.file_template = self.file_template
self.assertEqual(self.neuron_module_test._get_message_from_dict(self.message), self.expected_result)
del self.neuron_module_test
# test with no say_template and no file_template
self.neuron_module_test = NeuronModule()
self.assertEqual(self.neuron_module_test._get_message_from_dict(self.message), None)
def test_get_say_template(self):
# test with a string
self.assertEqual(NeuronModule._get_say_template(self.say_template, self.message), self.expected_result)
# test with a list
say_template = list()
say_template.append("hello, this is a {{ test }} one")
say_template.append("hello, this is a {{ test }} two")
expected_result = list()
expected_result.append("hello, this is a replaced word one")
expected_result.append("hello, this is a replaced word two")
self.assertTrue(NeuronModule._get_say_template(say_template, self.message) in expected_result)
def test_get_file_template(self):
# test with a valid template
self.assertEqual(NeuronModule._get_file_template(self.file_template, self.message), self.expected_result)
# test with kalliope memory
Cortex.memory = {
"kalliope_last_tts_message": "memory"
}
expected = "hello, this is a replaced word with memory"
self.assertEqual(NeuronModule._get_file_template(self.file_template_contains_kalliope_memory, self.message),
expected)
# test raise with a non existing template
file_template = "does_not_exist.j2"
with self.assertRaises(TemplateFileNotFoundException):
NeuronModule._get_file_template(file_template, self.message)
def test_get_content_of_file(self):
expected_result = "hello, this is a {{ test }}"
self.assertEqual(NeuronModule._get_content_of_file(self.file_template), expected_result)
def test_serialize(self):
"""
Test the serialisation of the neuron module
"""
neuron_module = NeuronModule()
neuron_module.neuron_name = "kalliope"
neuron_module.tts_message = "I am french"
expected_result = {
'neuron_name': "kalliope",
'generated_message': "I am french"
}
self.assertEqual(expected_result, neuron_module.serialize())
if __name__ == '__main__':
unittest.main()
|
import six
from chainer import testing
def parameterize(*params):
""":func:`chainer.testing.parameterize` for `pytest-xdist`.
:func:`chainer.testing.parameterize` cannot work with `pytest-xdist`
when the params contain functions (lambdas), classes, and random values.
This wrapper replaces the params with their indices
and restore the original params in :meth:`setUp`.
"""
def deco(cls):
setUp_orig = cls.setUp
def setUp(self):
param = params[self._chainercv_parameterize_index]
print('params: {}'.format(param))
for k, v in six.iteritems(param):
setattr(self, k, v)
setUp_orig(self)
cls.setUp = setUp
params_indices = [
{'_chainercv_parameterize_index': i} for i in range(len(params))]
return testing.parameterize(*params_indices)(cls)
return deco
|
import json
import unittest
import mock
from Tests.test_api.base import RestAPITestBase
from kalliope.core.Cortex import Cortex
class TestSynapseView(RestAPITestBase):
def test_get_all_synapses(self):
url = self.get_server_url() + "/synapses"
response = self.client.get(url)
expected_content = {
"synapses": [
{"signals": [{"name": "order", "parameters": "test_order"}],
"neurons": [{"name": "say", "parameters": {"message": ["test message"]}}],
"name": "test", "enabled": True},
{"signals": [{"name": "order", "parameters": "test_order_miss_configured_neuron"}],
"neurons": [{"name": "say", "parameters": {"not_valid_parameter": ["test message"]}}],
"name": "test2", "enabled": True},
{"signals": [{"name": "order", "parameters": "test_order_with_parameter"}],
"neurons": [{"name": "say", "parameters": {"message": ["test message {{parameter1}}"]}}],
"name": "test4", "enabled": True},
{"signals": [],
"neurons": [{"name": "say", "parameters": {"message": "order not found"}}],
"name": "order-not-found-synapse", "enabled": True},
{"signals": [{"name": "order", "parameters": "test_order_3"}],
"neurons": [{"name": "say", "parameters": {"message": ["test message"]}}],
"name": "test3", "enabled": True}]}
# a lot of char to process
self.maxDiff = None
self.assertEqual(response.status_code, 200)
self.assertEqual(json.dumps(expected_content, sort_keys=True),
json.dumps(json.loads(response.get_data().decode('utf-8')), sort_keys=True))
def test_get_one_synapse(self):
url = self.get_server_url() + "/synapses/test"
response = self.client.get(url)
expected_content = {
"synapses": {
"name": "test",
'enabled': True,
"neurons": [
{
"name": "say",
"parameters": {
"message": [
"test message"
]
}
}
],
"signals": [
{
"name": "order",
"parameters": "test_order"
}
]
}
}
self.assertEqual(json.dumps(expected_content, sort_keys=True),
json.dumps(json.loads(response.get_data().decode('utf-8')), sort_keys=True))
def test_delete_synapse(self):
# test with existing synapse
url = self.get_server_url() + "/synapses/test"
response = self.client.delete(url)
self.assertEqual(response.status_code, 204)
# test with non existing synapse
url = self.get_server_url() + "/synapses/test-none"
response = self.client.delete(url)
self.assertEqual(response.status_code, 404)
expected_content = {
"error": {
"synapse name not found": "test-none"
}
}
self.assertEqual(expected_content, json.loads(response.get_data().decode('utf-8')))
def test_get_synapse_not_found(self):
url = self.get_server_url() + "/synapses/test-none"
result = self.client.get(url)
expected_content = {
"error": {
"synapse name not found": "test-none"
}
}
self.assertEqual(expected_content, json.loads(result.get_data().decode('utf-8')))
self.assertEqual(result.status_code, 404)
def test_run_synapse_by_name(self):
url = self.get_server_url() + "/synapses/start/id/test"
result = self.client.post(url)
expected_content = {'status': 'complete',
'matched_synapses':
[{'matched_order': None,
'neuron_module_list':
[{'generated_message': 'test message', 'neuron_name': 'Say'}],
'synapse_name': 'test'}],
'user_order': None
}
self.assertEqual(json.dumps(expected_content, sort_keys=True),
json.dumps(json.loads(result.get_data().decode('utf-8')), sort_keys=True))
self.assertEqual(result.status_code, 201)
# run a synapse by its name with parameter
url = self.get_server_url() + "/synapses/start/id/test4"
headers = {"Content-Type": "application/json"}
data = {"parameters": {"parameter1": "replaced_value"}}
result = self.client.post(url, headers=headers, data=json.dumps(data))
expected_content = {
"matched_synapses": [
{
"matched_order": None,
"neuron_module_list": [
{
"generated_message": "test message replaced_value",
"neuron_name": "Say"
}
],
"synapse_name": "test4"
}
],
"status": "complete",
"user_order": None
}
self.assertEqual(json.dumps(expected_content, sort_keys=True),
json.dumps(json.loads(result.get_data().decode('utf-8')), sort_keys=True))
self.assertEqual(result.status_code, 201)
def test_post_synapse_not_found(self):
url = self.get_server_url() + "/synapses/start/id/test-none"
result = self.client.post(url)
expected_content = {
"error": {
"synapse name not found": "test-none"
}
}
self.assertEqual(json.dumps(expected_content, sort_keys=True),
json.dumps(json.loads(result.get_data().decode('utf-8')), sort_keys=True))
self.assertEqual(result.status_code, 404)
def test_run_synapse_with_order(self):
url = self.get_server_url() + "/synapses/start/order"
headers = {"Content-Type": "application/json"}
data = {"order": "test_order"}
result = self.client.post(url, headers=headers, data=json.dumps(data))
expected_content = {'status': 'complete',
'matched_synapses':
[
{
'matched_order': "test_order",
'neuron_module_list':
[
{
'generated_message': 'test message', 'neuron_name': 'Say'
}
],
'synapse_name': 'test'
}
],
'user_order': "test_order"
}
self.assertEqual(json.dumps(expected_content, sort_keys=True),
json.dumps(json.loads(result.get_data().decode('utf-8')), sort_keys=True))
self.assertEqual(result.status_code, 201)
# check that the cortex contains the last order
self.assertEqual("test_order", Cortex.get_from_key("kalliope_last_order"))
# test with a wrong parameter in a neuron
data = {"order": "test_order_miss_configured_neuron"}
result = self.client.post(url, headers=headers, data=json.dumps(data))
self.assertEqual(result.status_code, 201)
def test_post_synapse_by_order_not_found(self):
url = self.get_server_url() + "/synapses/start/order"
data = {"order": "non existing order"}
headers = {"Content-Type": "application/json"}
result = self.client.post(url,
headers=headers,
data=json.dumps(data))
expected_content = {"matched_synapses": [{"matched_order": None,
"neuron_module_list": [
{"generated_message": "order not found",
"neuron_name": "Say"}
],
"synapse_name": "order-not-found-synapse"}],
"status": "complete",
"user_order": None}
self.assertEqual(json.dumps(expected_content, sort_keys=True),
json.dumps(json.loads(result.get_data().decode('utf-8')), sort_keys=True))
self.assertEqual(result.status_code, 201)
# TODO this doesn't work on travis but works locally with python -m unittest discover
# def test_post_synapse_by_audio(self):
# url = self.get_server_url() + "/synapses/start/audio"
# with open(os.path.join(self.audio_file), 'rb') as fp:
# file = FileStorage(fp)
# data = {
# 'file': file
# }
# result = self.client.post(url, data=data, content_type='multipart/form-data')
#
# expected_content = {
# "synapses": [
# {
# "name": "test2",
# "neurons": [
# {
# "name": "say",
# "parameters": {
# "message": [
# "test message"
# ]
# }
# }
# ],
# "signals": [
# {
# "order": "bonjour"
# }
# ]
# }
# ]
# }
#
# self.assertEqual(json.dumps(expected_content), json.dumps(json.loads(result.get_data())))
# self.assertEqual(result.status_code, 201)
def test_convert_to_wav(self):
"""
Test the api function to convert incoming sound file to wave.
"""
with mock.patch("os.system") as mock_os_system:
# Scenario 1 : input wav file
temp_file = "/tmp/kalliope/tempfile.wav" # tempfile.NamedTemporaryFile(suffix=".wav")
result_file = self.flask_api.synapses_blueprint._convert_to_wav(temp_file)
self.assertEqual(temp_file, result_file)
mock_os_system.assert_not_called()
# Scenario 2 : input not a wav file
temp_file = "/tmp/kalliope/tempfile.amr" # tempfile.NamedTemporaryFile(suffix=".wav")
expected_result = "/tmp/kalliope/tempfile.wav"
result_file = self.flask_api.synapses_blueprint._convert_to_wav(temp_file)
self.assertEqual(expected_result, result_file)
mock_os_system.assert_called_once_with("ffmpeg -loglevel panic -y -i " + temp_file + " " + expected_result)
def test_create_synapse(self):
url = self.get_server_url() + "/synapses"
headers = {"Content-Type": "application/json"}
# test with valid synapse
data = {
"name": "create-synapse",
"signals": [
{
"order": "I'm Batman"
}
],
"neurons": [
{
"say": {
"message": "I know"
}
}
]
}
result = self.client.post(url,
headers=headers,
data=json.dumps(data))
expected_content = {
"enabled": True,
"name": "create-synapse",
"neurons": [
{
"name": "say",
"parameters": {
"message": "I know"
}
}
],
"signals": [
{
"name": "order",
"parameters": "I'm Batman"
}
]
}
self.assertEqual(result.status_code, 201)
self.assertEqual(json.dumps(expected_content, sort_keys=True),
json.dumps(json.loads(result.get_data().decode('utf-8')), sort_keys=True))
# test with non existing neuron
data = {
"name": "create-synapse",
"signals": [
{
"order": "I'm Batman"
}
],
"neurons": [
{
"notexist": {
"key": "value"
}
}
]
}
result = self.client.post(url,
headers=headers,
data=json.dumps(data))
self.assertEqual(result.status_code, 400)
# test with non valid synapse
data = {
"name": "create-synapse",
"signals": [
{
"order": "I'm Batman"
}
]
}
result = self.client.post(url,
headers=headers,
data=json.dumps(data))
self.assertEqual(result.status_code, 400)
if __name__ == '__main__':
unittest.main()
|
from time import sleep
from django.contrib.auth.models import AnonymousUser
from django.contrib.messages.storage import default_storage
from django.contrib.sessions.backends.signed_cookies import SessionStore
from django.http.request import HttpRequest
from django.test import SimpleTestCase
from django.test.utils import override_settings
from weblate.auth.models import User
from weblate.utils.ratelimit import (
check_rate_limit,
reset_rate_limit,
revert_rate_limit,
session_ratelimit_post,
)
class RateLimitTest(SimpleTestCase):
def get_request(self):
request = HttpRequest()
request.META["REMOTE_ADDR"] = "1.2.3.4"
request.method = "POST"
request.session = SessionStore()
request._messages = default_storage(request)
request.user = AnonymousUser()
return request
def setUp(self):
# Ensure no rate limits are there
reset_rate_limit("test", self.get_request())
def test_basic(self):
self.assertTrue(check_rate_limit("test", self.get_request()))
@override_settings(RATELIMIT_ATTEMPTS=5, RATELIMIT_WINDOW=60)
def test_limit(self):
request = self.get_request()
for _unused in range(5):
self.assertTrue(check_rate_limit("test", request))
self.assertFalse(check_rate_limit("test", request))
@override_settings(RATELIMIT_ATTEMPTS=1, RATELIMIT_WINDOW=2, RATELIMIT_LOCKOUT=1)
def test_window(self):
request = self.get_request()
self.assertTrue(check_rate_limit("test", request))
sleep(1)
self.assertFalse(check_rate_limit("test", request))
sleep(2)
self.assertTrue(check_rate_limit("test", request))
@override_settings(RATELIMIT_ATTEMPTS=1, RATELIMIT_WINDOW=2, RATELIMIT_LOCKOUT=100)
def test_lockout(self):
request = self.get_request()
self.assertTrue(check_rate_limit("test", request))
sleep(1)
self.assertFalse(check_rate_limit("test", request))
sleep(1)
self.assertFalse(check_rate_limit("test", request))
@override_settings(RATELIMIT_ATTEMPTS=2, RATELIMIT_WINDOW=2, RATELIMIT_LOCKOUT=100)
def test_interval(self):
request = self.get_request()
self.assertTrue(check_rate_limit("test", request))
sleep(1.5)
self.assertTrue(check_rate_limit("test", request))
sleep(1.5)
self.assertTrue(check_rate_limit("test", request))
sleep(1.5)
self.assertTrue(check_rate_limit("test", request))
@override_settings(RATELIMIT_ATTEMPTS=2, RATELIMIT_WINDOW=2)
def test_revert(self):
request = self.get_request()
self.assertTrue(check_rate_limit("test", request))
self.assertTrue(check_rate_limit("test", request))
revert_rate_limit("test", request)
self.assertTrue(check_rate_limit("test", request))
self.assertFalse(check_rate_limit("test", request))
@override_settings(RATELIMIT_ATTEMPTS=1, RATELIMIT_WINDOW=1, RATELIMIT_LOCKOUT=1)
def test_post(self):
request = self.get_request()
limiter = session_ratelimit_post("test")(lambda request: "RESPONSE")
# First attempt should work
self.assertEqual(limiter(request), "RESPONSE")
# Second attempt should be blocked
self.assertEqual(limiter(request).url, "/accounts/login/")
# During lockout period request should be blocked
request = self.get_request()
self.assertEqual(limiter(request).url, "/accounts/login/")
# Wait until lockout expires and it should work again
sleep(1)
request = self.get_request()
self.assertEqual(limiter(request), "RESPONSE")
class RateLimitUserTest(RateLimitTest):
def get_request(self):
request = super().get_request()
request.user = User()
return request
|
import sys
from subprocess import Popen, PIPE
from collections import defaultdict
from boto3 import resource
import credstash
def main():
UPDATED_DIGEST = 'SHA256'
DIGESTS_TO_UPDATE = ['WHIRLPOOL', 'RIPEMD']
keys = defaultdict(lambda:0)
keys_to_update = []
dynamodb_resource = resource('dynamodb')
table = dynamodb_resource.Table('credential-store')
response = table.scan()
items = response['Items']
# appending all dynamodb entries to items dict
while True:
if response.get('LastEvaluatedKey'):
response = table.scan(ExclusiveStartKey=response['LastEvaluatedKey'])
items += response['Items']
else:
break
# storing latest version of keys with their digests
for i in range(len(items)):
try:
digest = items[i]['digest']
version = int(items[i]['version'])
key = items[i]['name']
except:
continue
if key in keys:
if version > keys[key][0]:
keys[key][0] = version
keys[key][1] = digest
else:
keys[key] = [version, digest]
# store keys to be updated
for k, v in keys.items():
if v[1] in DIGESTS_TO_UPDATE:
keys_to_update.append(k)
# confirms update of digests
if len(keys_to_update):
print('\nThe following keys will be updated to {0}:\n'.format(UPDATED_DIGEST))
for key in keys_to_update:
print('{0}\n'.format(key))
confirmed = None
while not confirmed:
val = input('Continue? y/n ')
if val.lower() == 'y' or val.lower() == 'yes':
confirmed = True
elif val.lower() == 'n' or val.lower() == 'no':
print('\nexiting...\n')
sys.exit()
else:
print('\nInvalid input\n')
else:
print('\nNo digests to update!\n')
sys.exit()
# updating deprecated digests
for key in keys_to_update:
p = Popen(['credstash', 'get', key], stdout=PIPE, stderr=PIPE)
secret, err = p.communicate()
secret = secret[:-1] # removes credstash-added newline for stdout
if not err:
p = Popen(['credstash', 'put', key, secret, '-a', '-d', UPDATED_DIGEST], stdout=PIPE)
update, err = p.communicate()
print('{0} has been updated!\n'.format(key))
else:
print('Error found, skipping update of {0}. Error: {1}'.format(key, err))
if __name__ == '__main__':
main()
|
from django.conf import settings
from django.db import models
from django.db.models import Q
from django.utils.translation import gettext_lazy as _
from post_office.models import EmailTemplate
from filer.fields.file import FilerFileField
from shop.conf import app_settings
from shop.models.fields import ChoiceEnum, ChoiceEnumField
class Notify(ChoiceEnum):
RECIPIENT = 0, _("Recipient")
VENDOR = 1, _("Vendor")
CUSTOMER = 2, _("Customer")
NOBODY = 9, _("Nobody")
class Notification(models.Model):
"""
A task executed on receiving a signal.
"""
name = models.CharField(
max_length=50,
verbose_name=_("Name"),
)
transition_target = models.CharField(
max_length=50,
verbose_name=_("Event"),
)
notify = ChoiceEnumField(
_("Whom to notify"),
enum_type=Notify,
)
recipient = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
verbose_name=_("Recipient"),
null=True,
limit_choices_to={'is_staff': True},
)
mail_template = models.ForeignKey(
EmailTemplate,
on_delete=models.CASCADE,
verbose_name=_("Template"),
limit_choices_to=Q(language__isnull=True) | Q(language=''),
)
class Meta:
app_label = 'shop'
verbose_name = _("Notification")
verbose_name_plural = _("Notifications")
ordering = ['transition_target', 'recipient_id']
def __str__(self):
return self.name
def get_recipient(self, order):
if self.notify is Notify.RECIPIENT:
return self.recipient.email
if self.notify is Notify.CUSTOMER:
return order.customer.email
if self.notify is Notify.VENDOR:
if hasattr(order, 'vendor'):
return order.vendor.email
return app_settings.SHOP_VENDOR_EMAIL
class NotificationAttachment(models.Model):
notification = models.ForeignKey(
Notification,
on_delete=models.CASCADE,
)
attachment = FilerFileField(
on_delete=models.SET_NULL,
related_name='email_attachment',
null=True,
blank=True,
)
class Meta:
app_label = 'shop'
|
from distutils.version import LooseVersion
import numpy as np
import pytest
from numpy.testing import assert_array_almost_equal
from mne.time_frequency import psd_multitaper
from mne.time_frequency.multitaper import dpss_windows
from mne.utils import requires_nitime
from mne.io import RawArray
from mne import create_info
@requires_nitime
def test_dpss_windows():
"""Test computation of DPSS windows."""
import nitime as ni
N = 1000
half_nbw = 4
Kmax = int(2 * half_nbw)
dpss, eigs = dpss_windows(N, half_nbw, Kmax, low_bias=False)
with pytest.warns(None): # conversions
dpss_ni, eigs_ni = ni.algorithms.dpss_windows(N, half_nbw, Kmax)
assert_array_almost_equal(dpss, dpss_ni)
assert_array_almost_equal(eigs, eigs_ni)
dpss, eigs = dpss_windows(N, half_nbw, Kmax, interp_from=200,
low_bias=False)
with pytest.warns(None): # conversions
dpss_ni, eigs_ni = ni.algorithms.dpss_windows(N, half_nbw, Kmax,
interp_from=200)
assert_array_almost_equal(dpss, dpss_ni)
assert_array_almost_equal(eigs, eigs_ni)
@requires_nitime
def test_multitaper_psd():
"""Test multi-taper PSD computation."""
import nitime as ni
for n_times in (100, 101):
n_channels = 5
data = np.random.RandomState(0).randn(n_channels, n_times)
sfreq = 500
info = create_info(n_channels, sfreq, 'eeg')
raw = RawArray(data, info)
pytest.raises(ValueError, psd_multitaper, raw, sfreq,
normalization='foo')
ni_5 = (LooseVersion(ni.__version__) >= LooseVersion('0.5'))
norm = 'full' if ni_5 else 'length'
for adaptive, n_jobs in zip((False, True, True), (1, 1, 2)):
psd, freqs = psd_multitaper(raw, adaptive=adaptive,
n_jobs=n_jobs,
normalization=norm)
with pytest.warns(None): # nitime integers
freqs_ni, psd_ni, _ = ni.algorithms.spectral.multi_taper_psd(
data, sfreq, adaptive=adaptive, jackknife=False)
assert_array_almost_equal(psd, psd_ni, decimal=4)
if n_times % 2 == 0:
# nitime's frequency definitions must be incorrect,
# they give the same values for 100 and 101 samples
assert_array_almost_equal(freqs, freqs_ni)
with pytest.raises(ValueError, match='use a value of at least'):
psd_multitaper(raw, bandwidth=4.9)
|
import itertools
import json
import sys
from collections import defaultdict
from datetime import datetime
def get_datetime_from_ts(ts):
tformat = "%Y-%m-%dT%H:%M:%S.%f"
return datetime.strptime(ts, tformat)
def get_deploy_durations_from_file(filename):
"""
filename: path to a file to be parsed for datetime data
The expected input is a paasta service log for the deploy events
The way I've been fetching them is by running 'internal logreader command' | grep deploy | grep event > filename
"""
file_object = open(filename, "r")
data = sorted(
[json.loads(line.rstrip("\n")) for line in file_object],
key=lambda x: get_datetime_from_ts(x["timestamp"]),
)
timedeltas = defaultdict(list)
last_time = dict()
instance_bitvector = defaultdict(bool) # defaults to False
for datum in data:
time = get_datetime_from_ts(datum["timestamp"])
instance = datum["instance"]
if "in progress" in datum["message"] and not instance_bitvector[instance]:
instance_bitvector[instance] = True
last_time[instance] = time
elif "finishing" in datum["message"]:
instance_bitvector[instance] = False
timedeltas[instance].append(time - last_time[instance])
return timedeltas
def display_bounce_info(timedeltas):
"""
timedeltas: iterable of timedelta objects
"""
std = list(sorted(timedeltas))
print("Median time to bounce: {} seconds".format(std[len(std) / 2]))
print("10% time to bounce: {}".format(std[len(std) / 10]))
print("90% time to bounce: {}".format(std[len(std) * 9 / 10]))
def main(filenames):
for filename in filenames:
print(filename)
print("=========================")
timedeltas = get_deploy_durations_from_file(filename)
for instance, tdlist in timedeltas.items():
if timedeltas:
print("Instance: %s" % instance)
display_bounce_info(tdlist)
print("Overall:")
display_bounce_info(itertools.chain.from_iterable(timedeltas.values()))
print("=========================")
if __name__ == "__main__":
main(filenames=sys.argv[1:])
|
from contextlib import contextmanager
import logging
import sys
import threading
try:
import colorlog
except ImportError:
colorlog = None
DEBUG = 'debug'
INFO = 'info'
WARNING = 'warning'
ERROR = 'error'
LOG_LEVELS = {
DEBUG: logging.DEBUG,
INFO: logging.INFO,
WARNING: logging.WARNING,
ERROR: logging.ERROR
}
class ThreadLogContext(object):
"""Per-thread context for log message prefix labels."""
def __init__(self, thread_log_context=None):
"""Constructs a ThreadLogContext by copying a previous ThreadLogContext.
Args:
thread_log_context: A ThreadLogContext for an existing thread whose state
will be copied to initialize a ThreadLogContext for a new thread.
"""
if thread_log_context:
self._label_list = thread_log_context._label_list[:]
else:
self._label_list = []
self._RecalculateLabel()
@property
def label(self):
return self._label
def _RecalculateLabel(self):
"""Recalculate the string label used to to prepend log messages.
The label is the concatenation of all non-empty strings in the _label_list.
"""
non_empty_string_list = [s for s in self._label_list if s]
if len(non_empty_string_list):
self._label = ' '.join(non_empty_string_list) + ' '
else:
self._label = ''
@contextmanager
def ExtendLabel(self, label_extension):
"""Extends the string label used to prepend log messages.
Args:
label_extension: A string appended to the end of the current label.
"""
self._label_list.append(label_extension)
self._RecalculateLabel()
yield
self._label_list.pop()
self._RecalculateLabel()
class _ThreadData(threading.local):
def __init__(self):
self.pkb_thread_log_context = ThreadLogContext()
thread_local = _ThreadData()
def SetThreadLogContext(thread_log_context):
"""Set the current thread's ThreadLogContext object.
Args:
thread_log_context: A ThreadLogContext to be written to thread local
storage.
"""
thread_local.pkb_thread_log_context = thread_log_context
def GetThreadLogContext():
"""Get the current thread's ThreadLogContext object.
Returns:
The ThreadLogContext previously written via SetThreadLogContext.
"""
return thread_local.pkb_thread_log_context
class PkbLogFilter(logging.Filter):
"""Filter that injects a thread's ThreadLogContext label into log messages.
Sets the LogRecord's pkb_label attribute with the ThreadLogContext label.
"""
def filter(self, record):
record.pkb_label = GetThreadLogContext().label
return True
def ConfigureBasicLogging():
"""Initializes basic python logging before a log file is available."""
logging.basicConfig(format='%(levelname)-8s %(message)s', level=logging.INFO)
def ConfigureLogging(stderr_log_level, log_path, run_uri,
file_log_level=logging.DEBUG):
"""Configure logging.
Note that this will destroy existing logging configuration!
This configures python logging to emit messages to stderr and a log file.
Args:
stderr_log_level: Messages at this level and above are emitted to stderr.
log_path: Path to the log file.
run_uri: A string containing the run_uri to be appended to the log prefix
labels.
file_log_level: Messages at this level and above are written to the log
file.
"""
# Build the format strings for the stderr and log file message formatters.
stderr_format = ('%(asctime)s {} %(threadName)s %(pkb_label)s'
'%(levelname)-8s %(message)s').format(run_uri)
stderr_color_format = ('%(log_color)s%(asctime)s {} %(threadName)s '
'%(pkb_label)s%(levelname)-8s%(reset)s '
'%(message)s').format(run_uri)
file_format = ('%(asctime)s {} %(threadName)s %(pkb_label)s'
'%(filename)s:%(lineno)d %(levelname)-8s %(message)s')
file_format = file_format.format(run_uri)
# Reset root logger settings.
logger = logging.getLogger()
logger.handlers = []
logger.setLevel(logging.DEBUG)
# Initialize the main thread's ThreadLogContext. This object must be
# initialized to use the PkbLogFilter, and it is used to derive the
# ThreadLogContext of other threads started through vm_util.RunThreaded.
SetThreadLogContext(ThreadLogContext())
# Add handler to output to stderr.
handler = logging.StreamHandler()
handler.addFilter(PkbLogFilter())
handler.setLevel(stderr_log_level)
if colorlog is not None and sys.stderr.isatty():
formatter = colorlog.ColoredFormatter(stderr_color_format, reset=True)
handler.setFormatter(formatter)
else:
handler.setFormatter(logging.Formatter(stderr_format))
logger.addHandler(handler)
# Add handler for output to log file.
logging.info('Verbose logging to: %s', log_path)
handler = logging.FileHandler(filename=log_path)
handler.addFilter(PkbLogFilter())
handler.setLevel(file_log_level)
handler.setFormatter(logging.Formatter(file_format))
logger.addHandler(handler)
logging.getLogger('requests').setLevel(logging.ERROR)
|
import urwid
def main():
urwid.set_encoding('utf8')
term = urwid.Terminal(None, encoding='utf-8')
mainframe = urwid.LineBox(
urwid.Pile([
('weight', 70, term),
('fixed', 1, urwid.Filler(urwid.Edit('focus test edit: '))),
]),
)
def set_title(widget, title):
mainframe.set_title(title)
def quit(*args, **kwargs):
raise urwid.ExitMainLoop()
def handle_key(key):
if key in ('q', 'Q'):
quit()
urwid.connect_signal(term, 'title', set_title)
urwid.connect_signal(term, 'closed', quit)
loop = urwid.MainLoop(
mainframe,
handle_mouse=False,
unhandled_input=handle_key)
term.main_loop = loop
loop.run()
if __name__ == '__main__':
main()
|
from collections import OrderedDict
import pytest
from homeassistant.components.config import entity_registry
from homeassistant.const import ATTR_ICON
from homeassistant.helpers.entity_registry import RegistryEntry
from tests.common import MockEntity, MockEntityPlatform, mock_registry
@pytest.fixture
def client(hass, hass_ws_client):
"""Fixture that can interact with the config manager API."""
hass.loop.run_until_complete(entity_registry.async_setup(hass))
yield hass.loop.run_until_complete(hass_ws_client(hass))
async def test_list_entities(hass, client):
"""Test list entries."""
entities = OrderedDict()
entities["test_domain.name"] = RegistryEntry(
entity_id="test_domain.name",
unique_id="1234",
platform="test_platform",
name="Hello World",
)
entities["test_domain.no_name"] = RegistryEntry(
entity_id="test_domain.no_name", unique_id="6789", platform="test_platform"
)
mock_registry(hass, entities)
await client.send_json({"id": 5, "type": "config/entity_registry/list"})
msg = await client.receive_json()
assert msg["result"] == [
{
"config_entry_id": None,
"device_id": None,
"area_id": None,
"disabled_by": None,
"entity_id": "test_domain.name",
"name": "Hello World",
"icon": None,
"platform": "test_platform",
},
{
"config_entry_id": None,
"device_id": None,
"area_id": None,
"disabled_by": None,
"entity_id": "test_domain.no_name",
"name": None,
"icon": None,
"platform": "test_platform",
},
]
async def test_get_entity(hass, client):
"""Test get entry."""
mock_registry(
hass,
{
"test_domain.name": RegistryEntry(
entity_id="test_domain.name",
unique_id="1234",
platform="test_platform",
name="Hello World",
),
"test_domain.no_name": RegistryEntry(
entity_id="test_domain.no_name",
unique_id="6789",
platform="test_platform",
),
},
)
await client.send_json(
{"id": 5, "type": "config/entity_registry/get", "entity_id": "test_domain.name"}
)
msg = await client.receive_json()
assert msg["result"] == {
"config_entry_id": None,
"device_id": None,
"area_id": None,
"disabled_by": None,
"platform": "test_platform",
"entity_id": "test_domain.name",
"name": "Hello World",
"icon": None,
"original_name": None,
"original_icon": None,
"capabilities": None,
"unique_id": "1234",
}
await client.send_json(
{
"id": 6,
"type": "config/entity_registry/get",
"entity_id": "test_domain.no_name",
}
)
msg = await client.receive_json()
assert msg["result"] == {
"config_entry_id": None,
"device_id": None,
"area_id": None,
"disabled_by": None,
"platform": "test_platform",
"entity_id": "test_domain.no_name",
"name": None,
"icon": None,
"original_name": None,
"original_icon": None,
"capabilities": None,
"unique_id": "6789",
}
async def test_update_entity(hass, client):
"""Test updating entity."""
registry = mock_registry(
hass,
{
"test_domain.world": RegistryEntry(
entity_id="test_domain.world",
unique_id="1234",
# Using component.async_add_entities is equal to platform "domain"
platform="test_platform",
name="before update",
icon="icon:before update",
)
},
)
platform = MockEntityPlatform(hass)
entity = MockEntity(unique_id="1234")
await platform.async_add_entities([entity])
state = hass.states.get("test_domain.world")
assert state is not None
assert state.name == "before update"
assert state.attributes[ATTR_ICON] == "icon:before update"
# UPDATE NAME & ICON & AREA
await client.send_json(
{
"id": 6,
"type": "config/entity_registry/update",
"entity_id": "test_domain.world",
"name": "after update",
"icon": "icon:after update",
"area_id": "mock-area-id",
}
)
msg = await client.receive_json()
assert msg["result"] == {
"config_entry_id": None,
"device_id": None,
"area_id": "mock-area-id",
"disabled_by": None,
"platform": "test_platform",
"entity_id": "test_domain.world",
"name": "after update",
"icon": "icon:after update",
"original_name": None,
"original_icon": None,
"capabilities": None,
"unique_id": "1234",
}
state = hass.states.get("test_domain.world")
assert state.name == "after update"
assert state.attributes[ATTR_ICON] == "icon:after update"
# UPDATE DISABLED_BY TO USER
await client.send_json(
{
"id": 7,
"type": "config/entity_registry/update",
"entity_id": "test_domain.world",
"disabled_by": "user",
}
)
msg = await client.receive_json()
assert hass.states.get("test_domain.world") is None
assert registry.entities["test_domain.world"].disabled_by == "user"
# UPDATE DISABLED_BY TO NONE
await client.send_json(
{
"id": 8,
"type": "config/entity_registry/update",
"entity_id": "test_domain.world",
"disabled_by": None,
}
)
msg = await client.receive_json()
assert msg["result"] == {
"config_entry_id": None,
"device_id": None,
"area_id": "mock-area-id",
"disabled_by": None,
"platform": "test_platform",
"entity_id": "test_domain.world",
"name": "after update",
"icon": "icon:after update",
"original_name": None,
"original_icon": None,
"capabilities": None,
"unique_id": "1234",
}
async def test_update_entity_no_changes(hass, client):
"""Test update entity with no changes."""
mock_registry(
hass,
{
"test_domain.world": RegistryEntry(
entity_id="test_domain.world",
unique_id="1234",
# Using component.async_add_entities is equal to platform "domain"
platform="test_platform",
name="name of entity",
)
},
)
platform = MockEntityPlatform(hass)
entity = MockEntity(unique_id="1234")
await platform.async_add_entities([entity])
state = hass.states.get("test_domain.world")
assert state is not None
assert state.name == "name of entity"
await client.send_json(
{
"id": 6,
"type": "config/entity_registry/update",
"entity_id": "test_domain.world",
"name": "name of entity",
}
)
msg = await client.receive_json()
assert msg["result"] == {
"config_entry_id": None,
"device_id": None,
"area_id": None,
"disabled_by": None,
"platform": "test_platform",
"entity_id": "test_domain.world",
"name": "name of entity",
"icon": None,
"original_name": None,
"original_icon": None,
"capabilities": None,
"unique_id": "1234",
}
state = hass.states.get("test_domain.world")
assert state.name == "name of entity"
async def test_get_nonexisting_entity(client):
"""Test get entry with nonexisting entity."""
await client.send_json(
{
"id": 6,
"type": "config/entity_registry/get",
"entity_id": "test_domain.no_name",
}
)
msg = await client.receive_json()
assert not msg["success"]
async def test_update_nonexisting_entity(client):
"""Test update a nonexisting entity."""
await client.send_json(
{
"id": 6,
"type": "config/entity_registry/update",
"entity_id": "test_domain.no_name",
"name": "new-name",
}
)
msg = await client.receive_json()
assert not msg["success"]
async def test_update_entity_id(hass, client):
"""Test update entity id."""
mock_registry(
hass,
{
"test_domain.world": RegistryEntry(
entity_id="test_domain.world",
unique_id="1234",
# Using component.async_add_entities is equal to platform "domain"
platform="test_platform",
)
},
)
platform = MockEntityPlatform(hass)
entity = MockEntity(unique_id="1234")
await platform.async_add_entities([entity])
assert hass.states.get("test_domain.world") is not None
await client.send_json(
{
"id": 6,
"type": "config/entity_registry/update",
"entity_id": "test_domain.world",
"new_entity_id": "test_domain.planet",
}
)
msg = await client.receive_json()
assert msg["result"] == {
"config_entry_id": None,
"device_id": None,
"area_id": None,
"disabled_by": None,
"platform": "test_platform",
"entity_id": "test_domain.planet",
"name": None,
"icon": None,
"original_name": None,
"original_icon": None,
"capabilities": None,
"unique_id": "1234",
}
assert hass.states.get("test_domain.world") is None
assert hass.states.get("test_domain.planet") is not None
async def test_remove_entity(hass, client):
"""Test removing entity."""
registry = mock_registry(
hass,
{
"test_domain.world": RegistryEntry(
entity_id="test_domain.world",
unique_id="1234",
# Using component.async_add_entities is equal to platform "domain"
platform="test_platform",
name="before update",
)
},
)
await client.send_json(
{
"id": 6,
"type": "config/entity_registry/remove",
"entity_id": "test_domain.world",
}
)
msg = await client.receive_json()
assert msg["success"]
assert len(registry.entities) == 0
|
import mock
from paasta_tools import generate_deployments_for_service
from paasta_tools.marathon_tools import MarathonServiceConfig
def test_get_deploy_group_mappings():
fake_service = "fake_service"
fake_soa_dir = "/no/yes/maybe"
fake_service_configs = [
MarathonServiceConfig(
service=fake_service,
cluster="clusterA",
instance="main",
branch_dict=None,
config_dict={"deploy_group": "no_thanks"},
),
MarathonServiceConfig(
service=fake_service,
cluster="clusterB",
instance="main",
branch_dict=None,
config_dict={"deploy_group": "try_me"},
),
]
fake_remote_refs = {
"refs/tags/paasta-try_me-20160308T053933-deploy": "123456",
"refs/tags/paasta-clusterB.main-123-stop": "123456",
"refs/tags/paasta-okay-20160308T053933-deploy": "ijowarg",
"refs/tags/paasta-no_thanks-20160308T053933-deploy": "789009",
"refs/tags/paasta-nah-20160308T053933-deploy": "j8yiomwer",
}
expected = {
"fake_service:paasta-clusterA.main": {
"docker_image": "services-fake_service:paasta-789009",
"desired_state": "start",
"force_bounce": None,
},
"fake_service:paasta-clusterB.main": {
"docker_image": "services-fake_service:paasta-123456",
"desired_state": "stop",
"force_bounce": "123",
},
}
expected_v2 = {
"deployments": {
"try_me": {
"docker_image": "services-fake_service:paasta-123456",
"git_sha": "123456",
},
"no_thanks": {
"docker_image": "services-fake_service:paasta-789009",
"git_sha": "789009",
},
},
"controls": {
"fake_service:clusterA.main": {
"desired_state": "start",
"force_bounce": None,
},
"fake_service:clusterB.main": {
"desired_state": "stop",
"force_bounce": "123",
},
},
}
with mock.patch(
"paasta_tools.generate_deployments_for_service.get_instance_configs_for_service",
return_value=fake_service_configs,
autospec=True,
) as get_instance_configs_for_service_patch, mock.patch(
"paasta_tools.remote_git.list_remote_refs",
return_value=fake_remote_refs,
autospec=True,
) as list_remote_refs_patch:
actual, actual_v2 = generate_deployments_for_service.get_deploy_group_mappings(
fake_soa_dir, fake_service
)
get_instance_configs_for_service_patch.assert_called_once_with(
soa_dir=fake_soa_dir, service=fake_service
)
assert list_remote_refs_patch.call_count == 1
assert expected == actual
assert expected_v2 == actual_v2
def test_get_service_from_docker_image():
mock_image = (
"docker-paasta.yelpcorp.com:443/"
"services-example_service:paasta-591ae8a7b3224e3b3322370b858377dd6ef335b6"
)
actual = generate_deployments_for_service.get_service_from_docker_image(mock_image)
assert "example_service" == actual
def test_main():
fake_soa_dir = "/etc/true/null"
file_mock = mock.mock_open()
with mock.patch(
"paasta_tools.generate_deployments_for_service.parse_args",
return_value=mock.Mock(
verbose=False, soa_dir=fake_soa_dir, service="fake_service"
),
autospec=True,
) as parse_patch, mock.patch(
"os.path.abspath", return_value="ABSOLUTE", autospec=True
) as abspath_patch, mock.patch(
"paasta_tools.generate_deployments_for_service.get_deploy_group_mappings",
return_value=(
{"MAP": {"docker_image": "PINGS", "desired_state": "start"}},
mock.sentinel.v2_mappings,
),
autospec=True,
) as mappings_patch, mock.patch(
"os.path.join", return_value="JOIN", autospec=True
) as join_patch, mock.patch(
"builtins.open", file_mock, autospec=None
) as open_patch, mock.patch(
"json.dump", autospec=True
) as json_dump_patch, mock.patch(
"json.load", return_value={"OLD_MAP": "PINGS"}, autospec=True
) as json_load_patch, mock.patch(
"paasta_tools.generate_deployments_for_service.atomic_file_write", autospec=True
) as atomic_file_write_patch:
generate_deployments_for_service.main()
parse_patch.assert_called_once_with()
abspath_patch.assert_called_once_with(fake_soa_dir)
mappings_patch.assert_called_once_with(
soa_dir="ABSOLUTE", service="fake_service"
),
join_patch.assert_any_call(
"ABSOLUTE", "fake_service", generate_deployments_for_service.TARGET_FILE
),
assert join_patch.call_count == 2
atomic_file_write_patch.assert_called_once_with("JOIN")
open_patch.assert_called_once_with("JOIN", "r")
json_dump_patch.assert_called_once_with(
{
"v1": {"MAP": {"docker_image": "PINGS", "desired_state": "start"}},
"v2": mock.sentinel.v2_mappings,
},
atomic_file_write_patch.return_value.__enter__.return_value,
)
json_load_patch.assert_called_once_with(
file_mock.return_value.__enter__.return_value
)
# test no update to file if content unchanged
json_load_patch.return_value = {
"v1": {"MAP": {"docker_image": "PINGS", "desired_state": "start"}},
"v2": mock.sentinel.v2_mappings,
}
json_dump_patch.reset_mock()
generate_deployments_for_service.main()
assert not json_dump_patch.called
# test IOError path
open_patch.side_effect = IOError
generate_deployments_for_service.main()
assert json_dump_patch.called
def test_get_deployments_dict():
branch_mappings = {
"app1": {
"docker_image": "image1",
"desired_state": "start",
"force_bounce": "1418951213",
},
"app2": {
"docker_image": "image2",
"desired_state": "stop",
"force_bounce": "1412345678",
},
}
v2_mappings = mock.sentinel.v2_mappings
assert generate_deployments_for_service.get_deployments_dict_from_deploy_group_mappings(
branch_mappings, v2_mappings
) == {
"v1": branch_mappings,
"v2": mock.sentinel.v2_mappings,
}
def test_get_desired_state_understands_tags():
remote_refs = {
"refs/heads/master": "7894E99E6805E9DC8C1D8EB26229E3E2243878C9",
"refs/remotes/origin/HEAD": "EE8796C4E4295B7D4087E3EB73662B99218DAD94",
"refs/remotes/origin/master": "5F7C10B320A4EDBC4773C5FEFB1CD7B7A84FCB69",
"refs/tags/paasta-paasta-cluster.instance-20150721T183905-start": "4EF01B5A574B519AB546309E89F72972A33B6B75",
"refs/tags/paasta-paasta-cluster.instance-20151106T233211-stop": "A5AB2A012DC238D4F6DD269C40A4BD3A99D52B1F",
"refs/tags/paasta-cluster.instance-20160202T233805-start": "BE68473F98F619F26FD7824B8F56F9A7ABAEB860",
"refs/tags/paasta-cluster2.someinstance-20160202T233805-start": "D6B9A0F86DC54A132FBB7747460F53F48C9AEEAD",
"refs/tags/paasta-cluster2.someinstance-20160205T182601-stop": "9085FD67ED1BB5FADAFA7F2AFAF8DEDEE7342711",
"refs/tags/paasta-cluster.instance-20160308T053933-deploy": "4EF01B5A574B519AB546309E89F72972A33B6B75",
"refs/tags/paasta-cluster2.someinstance-20160308T053933-deploy": "9085FD67ED1BB5FADAFA7F2AFAF8DEDEE7342711",
}
branch = "cluster2.someinstance"
sha = "9085FD67ED1BB5FADAFA7F2AFAF8DEDEE7342711"
expected_desired_state = ("stop", "20160205T182601")
actual = generate_deployments_for_service.get_desired_state_by_branch_and_sha(
remote_refs
)[(branch, sha)]
assert actual == expected_desired_state
|
import json
from weblate.memory.models import Memory
from weblate.utils.management.base import BaseCommand
class Command(BaseCommand):
"""Command for exporting translation memory."""
help = "exports translation memory in JSON format"
def add_arguments(self, parser):
super().add_arguments(parser)
parser.add_argument(
"--indent",
default=2,
dest="indent",
type=int,
help=("Specifies the indent level to use when " "pretty-printing output."),
)
parser.add_argument(
"--backup",
action="store_true",
help="Store backup to the backups directory in the DATA_DIR",
)
def handle(self, *args, **options):
memory = Memory.objects.all().prefetch_lang()
self.stdout.ending = None
json.dump(
[item.as_dict() for item in memory], self.stdout, indent=options["indent"]
)
self.stdout.write("\n")
|
import os
YUM_PACKAGES = ('bc gengetopt libevent-devel '
'google-perftools-devel scons')
APT_PACKAGES = ('bc gengetopt libevent-dev '
'libgoogle-perftools-dev scons')
OLDISIM_GIT = 'https://github.com/GoogleCloudPlatform/oldisim.git'
OLDISIM_DIR = 'oldisim'
OLDISIM_VERSION = 'v0.1'
BINARY_BASE = 'release/workloads/search'
def _Install(vm, packages):
vm.Install('build_tools')
vm.InstallPackages(packages)
vm.RemoteCommand('git clone --recursive %s' % OLDISIM_GIT)
vm.RemoteCommand('cd %s && git checkout %s && '
'scons -j$(cat /proc/cpuinfo | grep processor | wc -l)' %
(OLDISIM_DIR, OLDISIM_VERSION))
def YumInstall(vm):
"""Installs oldisim dependencies on the VM."""
vm.InstallEpelRepo()
_Install(vm, YUM_PACKAGES)
def AptInstall(vm):
"""Installs oldisim dependencies on the VM."""
_Install(vm, APT_PACKAGES)
def Path(name):
"""Returns the path of a file within the package."""
return os.path.join(OLDISIM_DIR, name)
def BinaryPath(name):
"""Returns the path of a binary within the package."""
return os.path.join(OLDISIM_DIR, BINARY_BASE, name)
|
from .common import setup_platform
WIFI_ENABLED = False
async def test_sensor(hass, requests_mock):
"""Test the Ring sensors."""
await setup_platform(hass, "sensor")
front_battery_state = hass.states.get("sensor.front_battery")
assert front_battery_state is not None
assert front_battery_state.state == "80"
front_door_battery_state = hass.states.get("sensor.front_door_battery")
assert front_door_battery_state is not None
assert front_door_battery_state.state == "100"
downstairs_volume_state = hass.states.get("sensor.downstairs_volume")
assert downstairs_volume_state is not None
assert downstairs_volume_state.state == "2"
front_door_last_activity_state = hass.states.get("sensor.front_door_last_activity")
assert front_door_last_activity_state is not None
downstairs_wifi_signal_strength_state = hass.states.get(
"sensor.downstairs_wifi_signal_strength"
)
if not WIFI_ENABLED:
return
assert downstairs_wifi_signal_strength_state is not None
assert downstairs_wifi_signal_strength_state.state == "-39"
front_door_wifi_signal_category_state = hass.states.get(
"sensor.front_door_wifi_signal_category"
)
assert front_door_wifi_signal_category_state is not None
assert front_door_wifi_signal_category_state.state == "good"
front_door_wifi_signal_strength_state = hass.states.get(
"sensor.front_door_wifi_signal_strength"
)
assert front_door_wifi_signal_strength_state is not None
assert front_door_wifi_signal_strength_state.state == "-58"
|
import logging
import requests
import voluptuous as vol
from homeassistant.components.cover import PLATFORM_SCHEMA, CoverEntity
from homeassistant.const import (
CONF_ACCESS_TOKEN,
CONF_COVERS,
CONF_DEVICE,
CONF_NAME,
CONF_PASSWORD,
CONF_USERNAME,
STATE_CLOSED,
STATE_OPEN,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import track_utc_time_change
_LOGGER = logging.getLogger(__name__)
ATTR_AVAILABLE = "available"
ATTR_SENSOR_STRENGTH = "sensor_reflection_rate"
ATTR_SIGNAL_STRENGTH = "wifi_signal_strength"
ATTR_TIME_IN_STATE = "time_in_state"
DEFAULT_NAME = "Garadget"
STATE_CLOSING = "closing"
STATE_OFFLINE = "offline"
STATE_OPENING = "opening"
STATE_STOPPED = "stopped"
STATES_MAP = {
"open": STATE_OPEN,
"opening": STATE_OPENING,
"closed": STATE_CLOSED,
"closing": STATE_CLOSING,
"stopped": STATE_STOPPED,
}
COVER_SCHEMA = vol.Schema(
{
vol.Optional(CONF_ACCESS_TOKEN): cv.string,
vol.Optional(CONF_DEVICE): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PASSWORD): cv.string,
vol.Optional(CONF_USERNAME): cv.string,
}
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_COVERS): cv.schema_with_slug_keys(COVER_SCHEMA)}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Garadget covers."""
covers = []
devices = config.get(CONF_COVERS)
for device_id, device_config in devices.items():
args = {
"name": device_config.get(CONF_NAME),
"device_id": device_config.get(CONF_DEVICE, device_id),
"username": device_config.get(CONF_USERNAME),
"password": device_config.get(CONF_PASSWORD),
"access_token": device_config.get(CONF_ACCESS_TOKEN),
}
covers.append(GaradgetCover(hass, args))
add_entities(covers)
class GaradgetCover(CoverEntity):
"""Representation of a Garadget cover."""
def __init__(self, hass, args):
"""Initialize the cover."""
self.particle_url = "https://api.particle.io"
self.hass = hass
self._name = args["name"]
self.device_id = args["device_id"]
self.access_token = args["access_token"]
self.obtained_token = False
self._username = args["username"]
self._password = args["password"]
self._state = None
self.time_in_state = None
self.signal = None
self.sensor = None
self._unsub_listener_cover = None
self._available = True
if self.access_token is None:
self.access_token = self.get_token()
self._obtained_token = True
try:
if self._name is None:
doorconfig = self._get_variable("doorConfig")
if doorconfig["nme"] is not None:
self._name = doorconfig["nme"]
self.update()
except requests.exceptions.ConnectionError as ex:
_LOGGER.error("Unable to connect to server: %(reason)s", {"reason": ex})
self._state = STATE_OFFLINE
self._available = False
self._name = DEFAULT_NAME
except KeyError:
_LOGGER.warning(
"Garadget device %(device)s seems to be offline",
{"device": self.device_id},
)
self._name = DEFAULT_NAME
self._state = STATE_OFFLINE
self._available = False
def __del__(self):
"""Try to remove token."""
if self._obtained_token is True:
if self.access_token is not None:
self.remove_token()
@property
def name(self):
"""Return the name of the cover."""
return self._name
@property
def available(self):
"""Return True if entity is available."""
return self._available
@property
def device_state_attributes(self):
"""Return the device state attributes."""
data = {}
if self.signal is not None:
data[ATTR_SIGNAL_STRENGTH] = self.signal
if self.time_in_state is not None:
data[ATTR_TIME_IN_STATE] = self.time_in_state
if self.sensor is not None:
data[ATTR_SENSOR_STRENGTH] = self.sensor
if self.access_token is not None:
data[CONF_ACCESS_TOKEN] = self.access_token
return data
@property
def is_closed(self):
"""Return if the cover is closed."""
if self._state is None:
return None
return self._state == STATE_CLOSED
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
return "garage"
def get_token(self):
"""Get new token for usage during this session."""
args = {
"grant_type": "password",
"username": self._username,
"password": self._password,
}
url = f"{self.particle_url}/oauth/token"
ret = requests.post(url, auth=("particle", "particle"), data=args, timeout=10)
try:
return ret.json()["access_token"]
except KeyError:
_LOGGER.error("Unable to retrieve access token")
def remove_token(self):
"""Remove authorization token from API."""
url = f"{self.particle_url}/v1/access_tokens/{self.access_token}"
ret = requests.delete(url, auth=(self._username, self._password), timeout=10)
return ret.text
def _start_watcher(self, command):
"""Start watcher."""
_LOGGER.debug("Starting Watcher for command: %s ", command)
if self._unsub_listener_cover is None:
self._unsub_listener_cover = track_utc_time_change(
self.hass, self._check_state
)
def _check_state(self, now):
"""Check the state of the service during an operation."""
self.schedule_update_ha_state(True)
def close_cover(self, **kwargs):
"""Close the cover."""
if self._state not in ["close", "closing"]:
ret = self._put_command("setState", "close")
self._start_watcher("close")
return ret.get("return_value") == 1
def open_cover(self, **kwargs):
"""Open the cover."""
if self._state not in ["open", "opening"]:
ret = self._put_command("setState", "open")
self._start_watcher("open")
return ret.get("return_value") == 1
def stop_cover(self, **kwargs):
"""Stop the door where it is."""
if self._state not in ["stopped"]:
ret = self._put_command("setState", "stop")
self._start_watcher("stop")
return ret["return_value"] == 1
def update(self):
"""Get updated status from API."""
try:
status = self._get_variable("doorStatus")
_LOGGER.debug("Current Status: %s", status["status"])
self._state = STATES_MAP.get(status["status"])
self.time_in_state = status["time"]
self.signal = status["signal"]
self.sensor = status["sensor"]
self._available = True
except requests.exceptions.ConnectionError as ex:
_LOGGER.error("Unable to connect to server: %(reason)s", {"reason": ex})
self._state = STATE_OFFLINE
except KeyError:
_LOGGER.warning(
"Garadget device %(device)s seems to be offline",
{"device": self.device_id},
)
self._state = STATE_OFFLINE
if self._state not in [STATE_CLOSING, STATE_OPENING]:
if self._unsub_listener_cover is not None:
self._unsub_listener_cover()
self._unsub_listener_cover = None
def _get_variable(self, var):
"""Get latest status."""
url = f"{self.particle_url}/v1/devices/{self.device_id}/{var}?access_token={self.access_token}"
ret = requests.get(url, timeout=10)
result = {}
for pairs in ret.json()["result"].split("|"):
key = pairs.split("=")
result[key[0]] = key[1]
return result
def _put_command(self, func, arg=None):
"""Send commands to API."""
params = {"access_token": self.access_token}
if arg:
params["command"] = arg
url = f"{self.particle_url}/v1/devices/{self.device_id}/{func}"
ret = requests.post(url, data=params, timeout=10)
return ret.json()
|
import os
import re
try:
from rfc822 import Message
except ImportError:
# Python 3
from email import message_from_file as Message
import unittest
from lxml.tests.common_imports import doctest
from lxml.doctestcompare import LHTMLOutputChecker
from lxml.html.clean import clean, Cleaner
feed_dirs = [
os.path.join(os.path.dirname(__file__), 'feedparser-data'),
os.path.join(os.path.dirname(__file__), 'hackers-org-data'),
]
bar_re = re.compile(r"-----+")
class DummyInput:
def __init__(self, **kw):
for name, value in kw.items():
setattr(self, name, value)
class FeedTestCase(unittest.TestCase):
def __init__(self, filename):
self.filename = filename
unittest.TestCase.__init__(self)
def parse(self):
f = open(self.filename, 'r')
headers = Message(f)
c = f.read()
f.close()
if not c.strip():
c = headers.get_payload()
if not headers.keys():
raise Exception(
"File %s has no headers" % self.filename)
self.description = headers['Description']
self.expect = headers.get('Expect', '')
self.ignore = headers.get('Ignore')
self.options = [
o.strip() for o in headers.get('Options', '').split(',')
if o.strip()]
parts = bar_re.split(c)
self.input = parts[0].rstrip() + '\n'
if parts[1:]:
self.expect = parts[1].rstrip() + '\n'
else:
self.expect = None
def runTest(self):
self.parse()
if self.ignore:
# We've marked this test to be ignored.
return
kw = {}
for name in self.options:
if name.startswith('-'):
kw[name[1:]] = False
else:
kw[name] = True
if kw.get('clean', True):
transformed = Cleaner(**kw).clean_html(self.input)
else:
transformed = self.input
assert self.expect is not None, (
"No expected output in %s" % self.filename)
checker = LHTMLOutputChecker()
if not checker.check_output(self.expect, transformed, 0):
result = checker.output_difference(
DummyInput(want=self.expect), transformed, 0)
#result += '\noptions: %s %r' % (', '.join(self.options), kw)
#result += repr(transformed)
raise Exception("\n"+result)
def shortDescription(self):
return self.filename
def test_suite():
suite = unittest.TestSuite()
for dir in feed_dirs:
for fn in os.listdir(dir):
fn = os.path.join(dir, fn)
if fn.endswith('.data'):
case = FeedTestCase(fn)
suite.addTests([case])
# This is my lazy way of stopping on first error:
try:
case.runTest()
except:
break
return suite
|
from typing import Any, Awaitable, Callable, Dict, Optional, Union
from homeassistant import config_entries
from .typing import HomeAssistantType
DiscoveryFunctionType = Callable[[], Union[Awaitable[bool], bool]]
class DiscoveryFlowHandler(config_entries.ConfigFlow):
"""Handle a discovery config flow."""
VERSION = 1
def __init__(
self,
domain: str,
title: str,
discovery_function: DiscoveryFunctionType,
connection_class: str,
) -> None:
"""Initialize the discovery config flow."""
self._domain = domain
self._title = title
self._discovery_function = discovery_function
self.CONNECTION_CLASS = connection_class # pylint: disable=invalid-name
async def async_step_user(
self, user_input: Optional[Dict[str, Any]] = None
) -> Dict[str, Any]:
"""Handle a flow initialized by the user."""
if self._async_current_entries():
return self.async_abort(reason="single_instance_allowed")
await self.async_set_unique_id(self._domain, raise_on_progress=False)
return await self.async_step_confirm()
async def async_step_confirm(
self, user_input: Optional[Dict[str, Any]] = None
) -> Dict[str, Any]:
"""Confirm setup."""
if user_input is None:
return self.async_show_form(step_id="confirm")
if self.source == config_entries.SOURCE_USER:
# Get current discovered entries.
in_progress = self._async_in_progress()
has_devices = in_progress
if not has_devices:
has_devices = await self.hass.async_add_job( # type: ignore
self._discovery_function, self.hass
)
if not has_devices:
return self.async_abort(reason="no_devices_found")
# Cancel the discovered one.
assert self.hass is not None
for flow in in_progress:
self.hass.config_entries.flow.async_abort(flow["flow_id"])
if self._async_current_entries():
return self.async_abort(reason="single_instance_allowed")
return self.async_create_entry(title=self._title, data={})
async def async_step_discovery(
self, discovery_info: Dict[str, Any]
) -> Dict[str, Any]:
"""Handle a flow initialized by discovery."""
if self._async_in_progress() or self._async_current_entries():
return self.async_abort(reason="single_instance_allowed")
await self.async_set_unique_id(self._domain)
return await self.async_step_confirm()
async_step_zeroconf = async_step_discovery
async_step_ssdp = async_step_discovery
async_step_mqtt = async_step_discovery
async_step_homekit = async_step_discovery
async def async_step_import(self, _: Optional[Dict[str, Any]]) -> Dict[str, Any]:
"""Handle a flow initialized by import."""
if self._async_current_entries():
return self.async_abort(reason="single_instance_allowed")
# Cancel other flows.
assert self.hass is not None
in_progress = self._async_in_progress()
for flow in in_progress:
self.hass.config_entries.flow.async_abort(flow["flow_id"])
return self.async_create_entry(title=self._title, data={})
def register_discovery_flow(
domain: str,
title: str,
discovery_function: DiscoveryFunctionType,
connection_class: str,
) -> None:
"""Register flow for discovered integrations that not require auth."""
class DiscoveryFlow(DiscoveryFlowHandler):
"""Discovery flow handler."""
def __init__(self) -> None:
super().__init__(domain, title, discovery_function, connection_class)
config_entries.HANDLERS.register(domain)(DiscoveryFlow)
class WebhookFlowHandler(config_entries.ConfigFlow):
"""Handle a webhook config flow."""
VERSION = 1
def __init__(
self,
domain: str,
title: str,
description_placeholder: dict,
allow_multiple: bool,
) -> None:
"""Initialize the discovery config flow."""
self._domain = domain
self._title = title
self._description_placeholder = description_placeholder
self._allow_multiple = allow_multiple
async def async_step_user(
self, user_input: Optional[Dict[str, Any]] = None
) -> Dict[str, Any]:
"""Handle a user initiated set up flow to create a webhook."""
if not self._allow_multiple and self._async_current_entries():
return self.async_abort(reason="single_instance_allowed")
if user_input is None:
return self.async_show_form(step_id="user")
assert self.hass is not None
webhook_id = self.hass.components.webhook.async_generate_id()
if (
"cloud" in self.hass.config.components
and self.hass.components.cloud.async_active_subscription()
):
webhook_url = await self.hass.components.cloud.async_create_cloudhook(
webhook_id
)
cloudhook = True
else:
webhook_url = self.hass.components.webhook.async_generate_url(webhook_id)
cloudhook = False
self._description_placeholder["webhook_url"] = webhook_url
return self.async_create_entry(
title=self._title,
data={"webhook_id": webhook_id, "cloudhook": cloudhook},
description_placeholders=self._description_placeholder,
)
def register_webhook_flow(
domain: str, title: str, description_placeholder: dict, allow_multiple: bool = False
) -> None:
"""Register flow for webhook integrations."""
class WebhookFlow(WebhookFlowHandler):
"""Webhook flow handler."""
def __init__(self) -> None:
super().__init__(domain, title, description_placeholder, allow_multiple)
config_entries.HANDLERS.register(domain)(WebhookFlow)
async def webhook_async_remove_entry(
hass: HomeAssistantType, entry: config_entries.ConfigEntry
) -> None:
"""Remove a webhook config entry."""
if not entry.data.get("cloudhook") or "cloud" not in hass.config.components:
return
await hass.components.cloud.async_delete_cloudhook(entry.data["webhook_id"])
|
import base64
import binascii
import enum
from typing import cast, Optional
from PyQt5.QtWidgets import QWidget
from PyQt5.QtCore import pyqtSignal, pyqtSlot, QObject, QEvent
from PyQt5.QtGui import QCloseEvent
from qutebrowser.browser import eventfilter
from qutebrowser.config import configfiles
from qutebrowser.utils import log, usertypes, utils
from qutebrowser.keyinput import modeman
from qutebrowser.misc import miscwidgets, objects
def create(*, splitter: 'miscwidgets.InspectorSplitter',
win_id: int,
parent: QWidget = None) -> 'AbstractWebInspector':
"""Get a WebKitInspector/WebEngineInspector.
Args:
splitter: InspectorSplitter where the inspector can be placed.
win_id: The window ID this inspector is associated with.
parent: The Qt parent to set.
"""
# Importing modules here so we don't depend on QtWebEngine without the
# argument and to avoid circular imports.
if objects.backend == usertypes.Backend.QtWebEngine:
from qutebrowser.browser.webengine import webengineinspector
return webengineinspector.WebEngineInspector(splitter, win_id, parent)
elif objects.backend == usertypes.Backend.QtWebKit:
from qutebrowser.browser.webkit import webkitinspector
return webkitinspector.WebKitInspector(splitter, win_id, parent)
raise utils.Unreachable(objects.backend)
class Position(enum.Enum):
"""Where the inspector is shown."""
right = enum.auto()
left = enum.auto()
top = enum.auto()
bottom = enum.auto()
window = enum.auto()
class Error(Exception):
"""Raised when the inspector could not be initialized."""
class _EventFilter(QObject):
"""Event filter to enter insert mode when inspector was clicked.
We need to use this with a ChildEventFilter (rather than just overriding
mousePressEvent) for two reasons:
- For QtWebEngine, we need to listen for mouse events on its focusProxy(),
which can change when another page loads (which might be possible with an
inspector as well?)
- For QtWebKit, we need to listen for mouse events on the QWebView used by
the QWebInspector.
"""
clicked = pyqtSignal()
def eventFilter(self, _obj: QObject, event: QEvent) -> bool:
"""Translate mouse presses to a clicked signal."""
if event.type() == QEvent.MouseButtonPress:
self.clicked.emit()
return False
class AbstractWebInspector(QWidget):
"""Base class for QtWebKit/QtWebEngine inspectors.
Attributes:
_position: position of the inspector (right/left/top/bottom/window)
_splitter: InspectorSplitter where the inspector can be placed.
Signals:
recreate: Emitted when the inspector should be recreated.
"""
recreate = pyqtSignal()
def __init__(self, splitter: 'miscwidgets.InspectorSplitter',
win_id: int,
parent: QWidget = None) -> None:
super().__init__(parent)
self._widget = cast(QWidget, None)
self._layout = miscwidgets.WrapperLayout(self)
self._splitter = splitter
self._position: Optional[Position] = None
self._win_id = win_id
self._event_filter = _EventFilter(parent=self)
self._event_filter.clicked.connect(self._on_clicked)
self._child_event_filter = eventfilter.ChildEventFilter(
eventfilter=self._event_filter,
parent=self)
def _set_widget(self, widget: QWidget) -> None:
self._widget = widget
self._widget.setWindowTitle("Web Inspector")
self._widget.installEventFilter(self._child_event_filter)
self._layout.wrap(self, self._widget)
def _load_position(self) -> Position:
"""Get the last position the inspector was in."""
pos = configfiles.state['inspector'].get('position', 'right')
return Position[pos]
def _save_position(self, position: Position) -> None:
"""Save the last position the inspector was in."""
configfiles.state['inspector']['position'] = position.name
def _needs_recreate(self) -> bool:
"""Whether the inspector needs recreation when detaching to a window.
This is done due to an unknown QtWebEngine bug which sometimes prevents
inspector windows from showing up.
Needs to be overridden by subclasses.
"""
return False
@pyqtSlot()
def _on_clicked(self) -> None:
"""Enter insert mode if a docked inspector was clicked."""
if self._position != Position.window:
modeman.enter(self._win_id, usertypes.KeyMode.insert,
reason='Inspector clicked', only_if_normal=True)
def set_position(self, position: Optional[Position]) -> None:
"""Set the position of the inspector.
If the position is None, the last known position is used.
"""
if position is None:
position = self._load_position()
else:
self._save_position(position)
if position == self._position:
self.toggle()
return
if (position == Position.window and
self._position is not None and
self._needs_recreate()):
# Detaching to window
self.recreate.emit()
self.shutdown()
return
elif position == Position.window:
self.setParent(None) # type: ignore[call-overload]
self._load_state_geometry()
else:
self._splitter.set_inspector(self, position)
self._position = position
self._widget.show()
self.show()
def toggle(self) -> None:
"""Toggle visibility of the inspector."""
if self.isVisible():
self.hide()
else:
self.show()
def _load_state_geometry(self) -> None:
"""Load the geometry from the state file."""
try:
data = configfiles.state['inspector']['window']
geom = base64.b64decode(data, validate=True)
except KeyError:
# First start
pass
except binascii.Error:
log.misc.exception("Error while reading geometry")
else:
log.init.debug("Loading geometry from {!r}".format(geom))
ok = self._widget.restoreGeometry(geom)
if not ok:
log.init.warning("Error while loading geometry.")
def closeEvent(self, _e: QCloseEvent) -> None:
"""Save the geometry when closed."""
data = self._widget.saveGeometry().data()
geom = base64.b64encode(data).decode('ASCII')
configfiles.state['inspector']['window'] = geom
def inspect(self, page: QWidget) -> None:
"""Inspect the given QWeb(Engine)Page."""
raise NotImplementedError
@pyqtSlot()
def shutdown(self) -> None:
"""Clean up the inspector."""
self.close()
self.deleteLater()
|
from flexx import flx
# Prepare data array, preferably using Numpy
try:
import numpy as np
data_array = np.random.normal(0, 1, 1000)
except ImportError:
# Fallback to ctypes when numpy is not available
import random
import ctypes
from flexx.app import bsdf_lite
# Create data array
data_array = (ctypes.c_double * 1000)()
for i in range(len(data_array)):
data_array[i] = random.random()
# Add extension that encodes a ctypes array to ndarray extension data
@flx.serializer.add_extension
class CtypesArrayExtension(bsdf_lite.Extension):
name = 'ndarray'
cls = ctypes.Array
typemap = {
ctypes.c_bool: 'uint8', ctypes.c_int8: 'int8', ctypes.c_uint8: 'uint8',
ctypes.c_int16: 'int16', ctypes.c_uint16: 'uint16',
ctypes.c_int32: 'int32', ctypes.c_uint32: 'uint32',
ctypes.c_int64: 'int64', ctypes.c_uint64: 'uint64',
ctypes.c_float: 'float32', ctypes.c_double: 'float64',
}
def encode(self, s, v):
return dict(shape=(len(v), ),
dtype=self.typemap[v._type_],
data=bytes(v))
class SendData(flx.PyComponent):
""" A simple example demonstrating sending binary data from Python to JS.
"""
def init(self):
self.view = SendDataView()
self.view.set_data(data_array)
class SendDataView(flx.Widget):
""" A widget that displays array data.
"""
def init(self):
self.label = flx.Label()
self.apply_style('overflow-y: scroll;') # enable scrolling
@flx.action
def set_data(self, data):
# We receive the data as a typed array.
# If we would send raw bytes, we would receive it as a DataView, which
# we can map to e.g. a Int16Array like so:
# data = Int16Array(blob.buffer, blob.byteOffset, blob.byteLength/2)
# Show the data as text. We could also e.g. plot it.
text = ['%i: %f<br />' % (i, data[i]) for i in range(len(data))]
header = 'This data (%i elements) was send in binary form:<br />' % len(data)
self.label.set_html(header + ''.join(text))
if __name__ == '__main__':
m = flx.launch(SendData, 'app')
flx.run()
|
from __future__ import division
import math
import numpy as np
def tile_images(imgs, n_col, pad=2, fill=0):
"""Make a tile of images
Args:
imgs (numpy.ndarray): A batch of images whose shape is BCHW.
n_col (int): The number of columns in a tile.
pad (int or tuple of two ints): :obj:`pad_y, pad_x`. This is the
amounts of padding in y and x directions. If this is an integer,
the amounts of padding in the two directions are the same.
The default value is 2.
fill (float, tuple or ~numpy.ndarray): The value of padded pixels.
If it is :class:`numpy.ndarray`,
its shape should be :math:`(C, 1, 1)`,
where :math:`C` is the number of channels of :obj:`img`.
Returns:
~numpy.ndarray:
An image array in CHW format.
The size of this image is
:math:`((H + pad_{y}) \\times \\lceil B / n_{n_{col}} \\rceil,
(W + pad_{x}) \\times n_{col})`.
"""
if isinstance(pad, int):
pad = (pad, pad)
pad_y, pad_x = pad
B, C, H, W = imgs.shape
n_col = min(n_col, B)
n_row = int(math.ceil(B / n_col))
shape = (C,
(H + pad_y) * n_row,
(W + pad_x) * n_col)
tile = np.empty(shape, dtype=imgs.dtype)
tile[:] = np.array(fill).reshape((-1, 1, 1))
k = 0
for y in range(n_row):
for x in range(n_col):
if k >= B:
break
start_y = y * (H + pad_y) + pad_y // 2
start_x = x * (W + pad_x) + pad_x // 2
tile[:,
start_y: start_y + H,
start_x: start_x + W] = imgs[k]
k += 1
return tile
|
import os.path as op
import numpy as np
import pytest
from numpy.testing import assert_allclose
from mne.datasets import testing
from mne.io import read_raw_fif
from mne.preprocessing import regress_artifact, create_eog_epochs
data_path = testing.data_path(download=False)
raw_fname = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc_raw.fif')
@testing.requires_testing_data
def test_regress_artifact():
"""Test regressing data."""
raw = read_raw_fif(raw_fname).pick_types(meg=False, eeg=True, eog=True)
raw.load_data()
epochs = create_eog_epochs(raw)
epochs.apply_baseline((None, None))
orig_data = epochs.get_data('eeg')
orig_norm = np.linalg.norm(orig_data)
epochs_clean, betas = regress_artifact(epochs)
regress_artifact(epochs, betas=betas, copy=False) # inplace, and w/betas
assert_allclose(epochs_clean.get_data(), epochs.get_data())
clean_data = epochs_clean.get_data('eeg')
clean_norm = np.linalg.norm(clean_data)
assert orig_norm / 2 > clean_norm > orig_norm / 10
with pytest.raises(ValueError, match=r'Invalid value.*betas\.shape.*'):
regress_artifact(epochs, betas=betas[:-1])
with pytest.raises(ValueError, match='cannot be contained in'):
regress_artifact(epochs, picks='eog', picks_artifact='eog')
|
import urwid
palette = [('I say', 'default,bold', 'default', 'bold'),]
ask = urwid.Edit(('I say', u"What is your name?\n"))
reply = urwid.Text(u"")
button = urwid.Button(u'Exit')
div = urwid.Divider()
pile = urwid.Pile([ask, div, reply, div, button])
top = urwid.Filler(pile, valign='top')
def on_ask_change(edit, new_edit_text):
reply.set_text(('I say', u"Nice to meet you, %s" % new_edit_text))
def on_exit_clicked(button):
raise urwid.ExitMainLoop()
urwid.connect_signal(ask, 'change', on_ask_change)
urwid.connect_signal(button, 'click', on_exit_clicked)
urwid.MainLoop(top, palette).run()
|
from typing import List, Optional
from aioesphomeapi import (
ClimateAction,
ClimateFanMode,
ClimateInfo,
ClimateMode,
ClimateState,
ClimateSwingMode,
)
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
ATTR_HVAC_MODE,
ATTR_TARGET_TEMP_HIGH,
ATTR_TARGET_TEMP_LOW,
CURRENT_HVAC_COOL,
CURRENT_HVAC_DRY,
CURRENT_HVAC_FAN,
CURRENT_HVAC_HEAT,
CURRENT_HVAC_IDLE,
CURRENT_HVAC_OFF,
FAN_AUTO,
FAN_DIFFUSE,
FAN_FOCUS,
FAN_HIGH,
FAN_LOW,
FAN_MEDIUM,
FAN_MIDDLE,
FAN_OFF,
FAN_ON,
HVAC_MODE_COOL,
HVAC_MODE_DRY,
HVAC_MODE_FAN_ONLY,
HVAC_MODE_HEAT,
HVAC_MODE_HEAT_COOL,
HVAC_MODE_OFF,
PRESET_AWAY,
PRESET_HOME,
SUPPORT_FAN_MODE,
SUPPORT_PRESET_MODE,
SUPPORT_SWING_MODE,
SUPPORT_TARGET_TEMPERATURE,
SUPPORT_TARGET_TEMPERATURE_RANGE,
SWING_BOTH,
SWING_HORIZONTAL,
SWING_OFF,
SWING_VERTICAL,
)
from homeassistant.const import (
ATTR_TEMPERATURE,
PRECISION_HALVES,
PRECISION_TENTHS,
PRECISION_WHOLE,
TEMP_CELSIUS,
)
from . import (
EsphomeEntity,
esphome_map_enum,
esphome_state_property,
platform_async_setup_entry,
)
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up ESPHome climate devices based on a config entry."""
await platform_async_setup_entry(
hass,
entry,
async_add_entities,
component_key="climate",
info_type=ClimateInfo,
entity_type=EsphomeClimateEntity,
state_type=ClimateState,
)
@esphome_map_enum
def _climate_modes():
return {
ClimateMode.OFF: HVAC_MODE_OFF,
ClimateMode.AUTO: HVAC_MODE_HEAT_COOL,
ClimateMode.COOL: HVAC_MODE_COOL,
ClimateMode.HEAT: HVAC_MODE_HEAT,
ClimateMode.FAN_ONLY: HVAC_MODE_FAN_ONLY,
ClimateMode.DRY: HVAC_MODE_DRY,
}
@esphome_map_enum
def _climate_actions():
return {
ClimateAction.OFF: CURRENT_HVAC_OFF,
ClimateAction.COOLING: CURRENT_HVAC_COOL,
ClimateAction.HEATING: CURRENT_HVAC_HEAT,
ClimateAction.IDLE: CURRENT_HVAC_IDLE,
ClimateAction.DRYING: CURRENT_HVAC_DRY,
ClimateAction.FAN: CURRENT_HVAC_FAN,
}
@esphome_map_enum
def _fan_modes():
return {
ClimateFanMode.ON: FAN_ON,
ClimateFanMode.OFF: FAN_OFF,
ClimateFanMode.AUTO: FAN_AUTO,
ClimateFanMode.LOW: FAN_LOW,
ClimateFanMode.MEDIUM: FAN_MEDIUM,
ClimateFanMode.HIGH: FAN_HIGH,
ClimateFanMode.MIDDLE: FAN_MIDDLE,
ClimateFanMode.FOCUS: FAN_FOCUS,
ClimateFanMode.DIFFUSE: FAN_DIFFUSE,
}
@esphome_map_enum
def _swing_modes():
return {
ClimateSwingMode.OFF: SWING_OFF,
ClimateSwingMode.BOTH: SWING_BOTH,
ClimateSwingMode.VERTICAL: SWING_VERTICAL,
ClimateSwingMode.HORIZONTAL: SWING_HORIZONTAL,
}
class EsphomeClimateEntity(EsphomeEntity, ClimateEntity):
"""A climate implementation for ESPHome."""
@property
def _static_info(self) -> ClimateInfo:
return super()._static_info
@property
def _state(self) -> Optional[ClimateState]:
return super()._state
@property
def precision(self) -> float:
"""Return the precision of the climate device."""
precicions = [PRECISION_WHOLE, PRECISION_HALVES, PRECISION_TENTHS]
for prec in precicions:
if self._static_info.visual_temperature_step >= prec:
return prec
# Fall back to highest precision, tenths
return PRECISION_TENTHS
@property
def temperature_unit(self) -> str:
"""Return the unit of measurement used by the platform."""
return TEMP_CELSIUS
@property
def hvac_modes(self) -> List[str]:
"""Return the list of available operation modes."""
return [
_climate_modes.from_esphome(mode)
for mode in self._static_info.supported_modes
]
@property
def fan_modes(self):
"""Return the list of available fan modes."""
return [
_fan_modes.from_esphome(mode)
for mode in self._static_info.supported_fan_modes
]
@property
def preset_modes(self):
"""Return preset modes."""
return [PRESET_AWAY, PRESET_HOME] if self._static_info.supports_away else []
@property
def swing_modes(self):
"""Return the list of available swing modes."""
return [
_swing_modes.from_esphome(mode)
for mode in self._static_info.supported_swing_modes
]
@property
def target_temperature_step(self) -> float:
"""Return the supported step of target temperature."""
# Round to one digit because of floating point math
return round(self._static_info.visual_temperature_step, 1)
@property
def min_temp(self) -> float:
"""Return the minimum temperature."""
return self._static_info.visual_min_temperature
@property
def max_temp(self) -> float:
"""Return the maximum temperature."""
return self._static_info.visual_max_temperature
@property
def supported_features(self) -> int:
"""Return the list of supported features."""
features = 0
if self._static_info.supports_two_point_target_temperature:
features |= SUPPORT_TARGET_TEMPERATURE_RANGE
else:
features |= SUPPORT_TARGET_TEMPERATURE
if self._static_info.supports_away:
features |= SUPPORT_PRESET_MODE
if self._static_info.supported_fan_modes:
features |= SUPPORT_FAN_MODE
if self._static_info.supported_swing_modes:
features |= SUPPORT_SWING_MODE
return features
# https://github.com/PyCQA/pylint/issues/3150 for all @esphome_state_property
# pylint: disable=invalid-overridden-method
@esphome_state_property
def hvac_mode(self) -> Optional[str]:
"""Return current operation ie. heat, cool, idle."""
return _climate_modes.from_esphome(self._state.mode)
@esphome_state_property
def hvac_action(self) -> Optional[str]:
"""Return current action."""
# HA has no support feature field for hvac_action
if not self._static_info.supports_action:
return None
return _climate_actions.from_esphome(self._state.action)
@esphome_state_property
def fan_mode(self):
"""Return current fan setting."""
return _fan_modes.from_esphome(self._state.fan_mode)
@esphome_state_property
def preset_mode(self):
"""Return current preset mode."""
return PRESET_AWAY if self._state.away else PRESET_HOME
@esphome_state_property
def swing_mode(self):
"""Return current swing mode."""
return _swing_modes.from_esphome(self._state.swing_mode)
@esphome_state_property
def current_temperature(self) -> Optional[float]:
"""Return the current temperature."""
return self._state.current_temperature
@esphome_state_property
def target_temperature(self) -> Optional[float]:
"""Return the temperature we try to reach."""
return self._state.target_temperature
@esphome_state_property
def target_temperature_low(self) -> Optional[float]:
"""Return the lowbound target temperature we try to reach."""
return self._state.target_temperature_low
@esphome_state_property
def target_temperature_high(self) -> Optional[float]:
"""Return the highbound target temperature we try to reach."""
return self._state.target_temperature_high
async def async_set_temperature(self, **kwargs) -> None:
"""Set new target temperature (and operation mode if set)."""
data = {"key": self._static_info.key}
if ATTR_HVAC_MODE in kwargs:
data["mode"] = _climate_modes.from_hass(kwargs[ATTR_HVAC_MODE])
if ATTR_TEMPERATURE in kwargs:
data["target_temperature"] = kwargs[ATTR_TEMPERATURE]
if ATTR_TARGET_TEMP_LOW in kwargs:
data["target_temperature_low"] = kwargs[ATTR_TARGET_TEMP_LOW]
if ATTR_TARGET_TEMP_HIGH in kwargs:
data["target_temperature_high"] = kwargs[ATTR_TARGET_TEMP_HIGH]
await self._client.climate_command(**data)
async def async_set_hvac_mode(self, hvac_mode: str) -> None:
"""Set new target operation mode."""
await self._client.climate_command(
key=self._static_info.key, mode=_climate_modes.from_hass(hvac_mode)
)
async def async_set_preset_mode(self, preset_mode):
"""Set preset mode."""
away = preset_mode == PRESET_AWAY
await self._client.climate_command(key=self._static_info.key, away=away)
async def async_set_fan_mode(self, fan_mode: str) -> None:
"""Set new fan mode."""
await self._client.climate_command(
key=self._static_info.key, fan_mode=_fan_modes.from_hass(fan_mode)
)
async def async_set_swing_mode(self, swing_mode: str) -> None:
"""Set new swing mode."""
await self._client.climate_command(
key=self._static_info.key, swing_mode=_swing_modes.from_hass(swing_mode)
)
|
class Synapse(object):
"""
This Class is representing a Synapse with its name, and a dict of Neurons and a dict of signals
.. note:: must be defined in the brain.yml
"""
def __init__(self, name=None, neurons=None, signals=None, enabled=True):
self.name = name
self.neurons = neurons
self.signals = signals
self.enabled = enabled
def serialize(self):
"""
This method allows to serialize in a proper way this object
:return: A dict of name, neurons, signals
:rtype: Dict
"""
return {
'name': self.name,
'neurons': [e.serialize() for e in self.neurons],
'signals': [e.serialize() for e in self.signals],
'enabled': self.enabled
}
def __str__(self):
return str(self.serialize())
def __eq__(self, other):
"""
This is used to compare 2 objects
:param other:
:return:
"""
return self.__dict__ == other.__dict__
|
from __future__ import print_function
from __future__ import absolute_import
from hyperopt import Trials, rand
from hyperas.ensemble import VotingModel
from hyperas.optim import get_hyperopt_model_string, base_minimizer
import numpy as np
from keras.models import model_from_yaml
import six.moves.cPickle as pickle
from six.moves import range
# depend on hyperas, boto etc. is optional
class HyperParamModel(object):
"""HyperParamModel
Computes distributed hyper-parameter optimization using Hyperas and
Spark.
"""
def __init__(self, sc, num_workers=4):
self.spark_context = sc
self.num_workers = num_workers
def compute_trials(self, model, data, max_evals, notebook_name=None):
model_string = get_hyperopt_model_string(model=model, data=data, functions=None, notebook_name=notebook_name,
verbose=False, stack=3)
hyperas_worker = HyperasWorker(model_string, max_evals)
dummy_rdd = self.spark_context.parallelize([i for i in range(1, 1000)])
dummy_rdd = dummy_rdd.repartition(self.num_workers)
trials_list = dummy_rdd.mapPartitions(
hyperas_worker._minimize).collect()
return trials_list
def minimize(self, model, data, max_evals, notebook_name=None):
global best_model_yaml, best_model_weights
trials_list = self.compute_trials(
model, data, max_evals, notebook_name)
best_val = 1e7
for trials in trials_list:
for trial in trials:
val = trial.get('result').get('loss')
if val < best_val:
best_val = val
best_model_yaml = trial.get('result').get('model')
best_model_weights = trial.get('result').get('weights')
best_model = model_from_yaml(best_model_yaml)
best_model.set_weights(pickle.loads(best_model_weights))
return best_model
def best_ensemble(self, nb_ensemble_models, model, data, max_evals, voting='hard', weights=None):
model_list = self.best_models(nb_models=nb_ensemble_models, model=model,
data=data, max_evals=max_evals)
return VotingModel(model_list, voting, weights)
def best_models(self, nb_models, model, data, max_evals):
trials_list = self.compute_trials(model, data, max_evals)
num_trials = sum(len(trials) for trials in trials_list)
if num_trials < nb_models:
nb_models = len(trials_list)
scores = []
for trials in trials_list:
scores = scores + [trial.get('result').get('loss')
for trial in trials]
cut_off = sorted(scores, reverse=True)[nb_models - 1]
model_list = []
for trials in trials_list:
for trial in trials:
if trial.get('result').get('loss') >= cut_off:
model = model_from_yaml(trial.get('result').get('model'))
model.set_weights(pickle.loads(
trial.get('result').get('weights')))
model_list.append(model)
return model_list
class HyperasWorker(object):
""" HyperasWorker
Executes hyper-parameter search on each worker and returns results.
"""
def __init__(self, bc_model, bc_max_evals):
self.model_string = bc_model
self.max_evals = bc_max_evals
def _minimize(self, dummy_iterator):
trials = Trials()
algo = rand.suggest
elem = next(dummy_iterator)
import random
random.seed(elem)
rand_seed = np.random.randint(elem)
base_minimizer(model=None, data=None, functions=None, algo=algo, max_evals=self.max_evals,
trials=trials, rseed=rand_seed, full_model_string=self.model_string, notebook_name=None,
verbose=True, stack=3)
yield trials
|
from unittest import TestCase
from django.test.utils import override_settings
from weblate.lang.models import Language
from weblate.trans.specialchars import get_special_chars
class SpecialCharsTest(TestCase):
def check_chars(self, language, count, matches, *args, **kwargs):
result = get_special_chars(language, *args, **kwargs)
chars = {x[2] for x in result}
self.assertEqual(len(chars), count)
for match in matches:
self.assertIn(match, chars)
def test_af(self):
chars = list(get_special_chars(Language(code="af")))
self.assertEqual(len(chars), 11)
def test_cs(self):
chars = list(get_special_chars(Language(code="cs")))
self.assertEqual(len(chars), 10)
def test_brx(self):
chars = list(get_special_chars(Language(code="brx")))
self.assertEqual(len(chars), 10)
def test_brx_add(self):
chars = list(get_special_chars(Language(code="brx"), "ahoj"))
self.assertEqual(len(chars), 14)
@override_settings(SPECIAL_CHARS=[chr(x) for x in range(256)])
def test_settings(self):
chars = list(get_special_chars(Language(code="cs")))
self.assertEqual(len(chars), 262)
def test_additional(self):
self.check_chars(
Language(code="cs"), 14, ["a", "h", "o", "j"], additional="ahoj"
)
def test_arrows(self):
self.check_chars(Language(code="cs"), 12, ["→", "⇒"], source="→⇒→⇒")
def test_arrows_rtl(self):
self.check_chars(
Language(code="ar", direction="rtl"), 13, ["←", "⇐"], source="→⇒→⇒"
)
|
from copy import copy
from django.conf import settings
from django.db import models, transaction
from django.db.models import Q, Sum
from django.utils.translation import gettext as _
from weblate.checks.models import CHECKS, Check
from weblate.trans.mixins import UserDisplayMixin
from weblate.trans.models.change import Change
from weblate.trans.util import split_plural
from weblate.utils import messages
from weblate.utils.antispam import report_spam
from weblate.utils.fields import JSONField
from weblate.utils.request import get_ip_address, get_user_agent_raw
from weblate.utils.state import STATE_TRANSLATED
class SuggestionManager(models.Manager):
# pylint: disable=no-init
def add(self, unit, target, request, vote=False):
"""Create new suggestion for this unit."""
from weblate.auth.models import get_anonymous
user = request.user if request else get_anonymous()
if unit.translated and unit.target == target:
return False
same_suggestions = self.filter(target=target, unit=unit)
# Do not rely on the SQL as MySQL compares strings case insensitive
for same in same_suggestions:
if same.target == target:
if same.user == user or not vote:
return False
same.add_vote(request, Vote.POSITIVE)
return False
# Create the suggestion
suggestion = self.create(
target=target,
unit=unit,
user=user,
userdetails={
"address": get_ip_address(request),
"agent": get_user_agent_raw(request),
},
)
# Record in change
Change.objects.create(
unit=unit,
suggestion=suggestion,
action=Change.ACTION_SUGGESTION,
user=user,
target=target,
author=user,
)
# Add unit vote
if vote:
suggestion.add_vote(request, Vote.POSITIVE)
# Update suggestion stats
if user is not None:
user.profile.increase_count("suggested")
return suggestion
class SuggestionQuerySet(models.QuerySet):
def order(self):
return self.order_by("-timestamp")
def filter_access(self, user):
if user.is_superuser:
return self
return self.filter(
Q(unit__translation__component__project_id__in=user.allowed_project_ids)
& (
Q(unit__translation__component__restricted=False)
| Q(unit__translation__component_id__in=user.component_permissions)
)
)
class Suggestion(models.Model, UserDisplayMixin):
unit = models.ForeignKey("trans.Unit", on_delete=models.deletion.CASCADE)
target = models.TextField()
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
null=True,
blank=True,
on_delete=models.deletion.CASCADE,
)
userdetails = JSONField()
timestamp = models.DateTimeField(auto_now_add=True)
votes = models.ManyToManyField(
settings.AUTH_USER_MODEL, through="Vote", related_name="user_votes"
)
objects = SuggestionManager.from_queryset(SuggestionQuerySet)()
weblate_unsafe_delete = True
class Meta:
app_label = "trans"
verbose_name = "string suggestion"
verbose_name_plural = "string suggestions"
def __str__(self):
return "suggestion for {} by {}".format(
self.unit, self.user.username if self.user else "unknown"
)
@transaction.atomic
def accept(self, request, permission="suggestion.accept"):
if not request.user.has_perm(permission, self.unit):
messages.error(request, _("Failed to accept suggestion!"))
return
# Skip if there is no change
if self.unit.target != self.target or self.unit.state < STATE_TRANSLATED:
if self.user and not self.user.is_anonymous:
author = self.user
else:
author = request.user
self.unit.translate(
request.user,
split_plural(self.target),
STATE_TRANSLATED,
author=author,
change_action=Change.ACTION_ACCEPT,
)
# Delete the suggestion
self.delete()
def delete_log(self, user, change=Change.ACTION_SUGGESTION_DELETE, is_spam=False):
"""Delete with logging change."""
if is_spam and self.userdetails:
report_spam(
self.userdetails["address"], self.userdetails["agent"], self.target
)
Change.objects.create(
unit=self.unit, action=change, user=user, target=self.target, author=user
)
self.delete()
def get_num_votes(self):
"""Return number of votes."""
return self.vote_set.aggregate(Sum("value"))["value__sum"] or 0
def add_vote(self, request, value):
"""Add (or updates) vote for a suggestion."""
if request is None or not request.user.is_authenticated:
return
vote, created = Vote.objects.get_or_create(
suggestion=self, user=request.user, defaults={"value": value}
)
if not created or vote.value != value:
vote.value = value
vote.save()
# Automatic accepting
required_votes = self.unit.translation.component.suggestion_autoaccept
if required_votes and self.get_num_votes() >= required_votes:
self.accept(request, "suggestion.vote")
def get_checks(self):
# Build fake unit to run checks
fake_unit = copy(self.unit)
fake_unit.target = self.target
fake_unit.state = STATE_TRANSLATED
source = fake_unit.get_source_plurals()
target = fake_unit.get_target_plurals()
result = []
for check, check_obj in CHECKS.target.items():
if check_obj.check_target(source, target, fake_unit):
result.append(Check(unit=fake_unit, dismissed=False, check=check))
return result
class Vote(models.Model):
"""Suggestion voting."""
suggestion = models.ForeignKey(Suggestion, on_delete=models.deletion.CASCADE)
user = models.ForeignKey(
settings.AUTH_USER_MODEL, on_delete=models.deletion.CASCADE
)
value = models.SmallIntegerField(default=0)
weblate_unsafe_delete = True
POSITIVE = 1
NEGATIVE = -1
class Meta:
unique_together = ("suggestion", "user")
app_label = "trans"
verbose_name = "suggestion vote"
verbose_name_plural = "suggestion votes"
def __str__(self):
return f"{self.value:+d} for {self.suggestion} by {self.user.username}"
|
import unittest
import mock
from perfkitbenchmarker.providers.gcp import gcp_bigtable
from perfkitbenchmarker.providers.gcp import util
from tests import pkb_common_test_case
NAME = 'testcluster'
PROJECT = 'testproject'
ZONE = 'testzone'
VALID_JSON_BASE = """[
{{
"displayName": "not{name}",
"name": "projects/{project}/instances/not{name}",
"state": "READY"
}},
{{
"displayName": "{name}",
"name": "projects/{project}/instances/{name}",
"state": "READY"
}}
]"""
class GcpBigtableTestCase(pkb_common_test_case.PkbCommonTestCase):
def setUp(self):
super(GcpBigtableTestCase, self).setUp()
self.bigtable = gcp_bigtable.GcpBigtableInstance(NAME, PROJECT,
ZONE)
def testEmptyTableList(self):
with mock.patch.object(util.GcloudCommand, 'Issue',
return_value=('{}', '', 0)):
self.assertFalse(self.bigtable._Exists())
def testGcloudError(self):
with mock.patch.object(util.GcloudCommand, 'Issue',
return_value=('', '', 1)):
self.assertFalse(self.bigtable._Exists())
def testFoundTable(self):
stdout = VALID_JSON_BASE.format(project=PROJECT, name=NAME)
with mock.patch.object(util.GcloudCommand, 'Issue',
return_value=(stdout, '', 0)):
self.assertTrue(self.bigtable._Exists())
def testNotFoundTable(self):
stdout = VALID_JSON_BASE.format(project=PROJECT, name=NAME + 'nope')
with mock.patch.object(util.GcloudCommand, 'Issue',
return_value=(stdout, '', 0)):
self.assertFalse(self.bigtable._Exists())
if __name__ == '__main__':
unittest.main()
|
import datetime
import hashlib
from homeassistant.components.asterisk_mbox import (
DOMAIN as ASTERISK_DOMAIN,
SIGNAL_CDR_UPDATE,
)
from homeassistant.components.mailbox import Mailbox
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
MAILBOX_NAME = "asterisk_cdr"
async def async_get_handler(hass, config, discovery_info=None):
"""Set up the Asterix CDR platform."""
return AsteriskCDR(hass, MAILBOX_NAME)
class AsteriskCDR(Mailbox):
"""Asterisk VM Call Data Record mailbox."""
def __init__(self, hass, name):
"""Initialize Asterisk CDR."""
super().__init__(hass, name)
self.cdr = []
async_dispatcher_connect(self.hass, SIGNAL_CDR_UPDATE, self._update_callback)
@callback
def _update_callback(self, msg):
"""Update the message count in HA, if needed."""
self._build_message()
self.async_update()
def _build_message(self):
"""Build message structure."""
cdr = []
for entry in self.hass.data[ASTERISK_DOMAIN].cdr:
timestamp = datetime.datetime.strptime(
entry["time"], "%Y-%m-%d %H:%M:%S"
).timestamp()
info = {
"origtime": timestamp,
"callerid": entry["callerid"],
"duration": entry["duration"],
}
sha = hashlib.sha256(str(entry).encode("utf-8")).hexdigest()
msg = (
f"Destination: {entry['dest']}\n"
f"Application: {entry['application']}\n "
f"Context: {entry['context']}"
)
cdr.append({"info": info, "sha": sha, "text": msg})
self.cdr = cdr
async def async_get_messages(self):
"""Return a list of the current messages."""
if not self.cdr:
self._build_message()
return self.cdr
|
import numpy as np
from numpy.testing import assert_array_almost_equal
from mne.connectivity import phase_slope_index
def test_psi():
"""Test Phase Slope Index (PSI) estimation."""
sfreq = 50.
n_signals = 3
n_epochs = 10
n_times = 500
rng = np.random.RandomState(42)
data = rng.randn(n_epochs, n_signals, n_times)
# simulate time shifts
for i in range(n_epochs):
data[i, 1, 10:] = data[i, 0, :-10] # signal 0 is ahead
data[i, 2, :-10] = data[i, 0, 10:] # signal 2 is ahead
psi, freqs, times, n_epochs, n_tapers = phase_slope_index(
data, mode='fourier', sfreq=sfreq)
assert psi[1, 0, 0] < 0
assert psi[2, 0, 0] > 0
indices = (np.array([0]), np.array([1]))
psi_2, freqs, times, n_epochs, n_tapers = phase_slope_index(
data, mode='fourier', sfreq=sfreq, indices=indices)
# the measure is symmetric (sign flip)
assert_array_almost_equal(psi_2[0, 0], -psi[1, 0, 0])
cwt_freqs = np.arange(5., 20, 0.5)
psi_cwt, freqs, times, n_epochs, n_tapers = phase_slope_index(
data, mode='cwt_morlet', sfreq=sfreq, cwt_freqs=cwt_freqs,
indices=indices)
assert np.all(psi_cwt > 0)
assert psi_cwt.shape[-1] == n_times
|
from zeroconf import (
BadTypeInNameException,
InterfaceChoice,
IPVersion,
ServiceInfo,
ServiceStateChange,
)
from homeassistant.components import zeroconf
from homeassistant.components.zeroconf import CONF_DEFAULT_INTERFACE, CONF_IPV6
from homeassistant.const import (
EVENT_HOMEASSISTANT_START,
EVENT_HOMEASSISTANT_STARTED,
EVENT_HOMEASSISTANT_STOP,
)
from homeassistant.generated import zeroconf as zc_gen
from homeassistant.setup import async_setup_component
from tests.async_mock import patch
NON_UTF8_VALUE = b"ABCDEF\x8a"
NON_ASCII_KEY = b"non-ascii-key\x8a"
PROPERTIES = {
b"macaddress": b"ABCDEF012345",
b"non-utf8-value": NON_UTF8_VALUE,
NON_ASCII_KEY: None,
}
HOMEKIT_STATUS_UNPAIRED = b"1"
HOMEKIT_STATUS_PAIRED = b"0"
def service_update_mock(zeroconf, services, handlers):
"""Call service update handler."""
for service in services:
handlers[0](zeroconf, service, f"name.{service}", ServiceStateChange.Added)
def get_service_info_mock(service_type, name):
"""Return service info for get_service_info."""
return ServiceInfo(
service_type,
name,
addresses=[b"\n\x00\x00\x14"],
port=80,
weight=0,
priority=0,
server="name.local.",
properties=PROPERTIES,
)
def get_service_info_mock_without_an_address(service_type, name):
"""Return service info for get_service_info without any addresses."""
return ServiceInfo(
service_type,
name,
addresses=[],
port=80,
weight=0,
priority=0,
server="name.local.",
properties=PROPERTIES,
)
def get_homekit_info_mock(model, pairing_status):
"""Return homekit info for get_service_info for an homekit device."""
def mock_homekit_info(service_type, name):
return ServiceInfo(
service_type,
name,
addresses=[b"\n\x00\x00\x14"],
port=80,
weight=0,
priority=0,
server="name.local.",
properties={b"md": model.encode(), b"sf": pairing_status},
)
return mock_homekit_info
def get_zeroconf_info_mock(macaddress):
"""Return info for get_service_info for an zeroconf device."""
def mock_zc_info(service_type, name):
return ServiceInfo(
service_type,
name,
addresses=[b"\n\x00\x00\x14"],
port=80,
weight=0,
priority=0,
server="name.local.",
properties={b"macaddress": macaddress.encode()},
)
return mock_zc_info
async def test_setup(hass, mock_zeroconf):
"""Test configured options for a device are loaded via config entry."""
with patch.object(
hass.config_entries.flow, "async_init"
) as mock_config_flow, patch.object(
zeroconf, "HaServiceBrowser", side_effect=service_update_mock
) as mock_service_browser:
mock_zeroconf.get_service_info.side_effect = get_service_info_mock
assert await async_setup_component(hass, zeroconf.DOMAIN, {zeroconf.DOMAIN: {}})
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
await hass.async_block_till_done()
assert len(mock_service_browser.mock_calls) == 1
expected_flow_calls = 0
for matching_components in zc_gen.ZEROCONF.values():
domains = set()
for component in matching_components:
if len(component) == 1:
domains.add(component["domain"])
expected_flow_calls += len(domains)
assert len(mock_config_flow.mock_calls) == expected_flow_calls
# Test instance is set.
assert "zeroconf" in hass.data
assert await hass.components.zeroconf.async_get_instance() is mock_zeroconf
async def test_setup_with_overly_long_url_and_name(hass, mock_zeroconf, caplog):
"""Test we still setup with long urls and names."""
with patch.object(hass.config_entries.flow, "async_init"), patch.object(
zeroconf, "HaServiceBrowser", side_effect=service_update_mock
), patch(
"homeassistant.components.zeroconf.get_url",
return_value="https://this.url.is.way.too.long/very/deep/path/that/will/make/us/go/over/the/maximum/string/length/and/would/cause/zeroconf/to/fail/to/startup/because/the/key/and/value/can/only/be/255/bytes/and/this/string/is/a/bit/longer/than/the/maximum/length/that/we/allow/for/a/value",
), patch.object(
hass.config,
"location_name",
"\u00dcBER \u00dcber German Umlaut long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string",
):
mock_zeroconf.get_service_info.side_effect = get_service_info_mock
assert await async_setup_component(hass, zeroconf.DOMAIN, {zeroconf.DOMAIN: {}})
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
assert "https://this.url.is.way.too.long" in caplog.text
assert "German Umlaut" in caplog.text
async def test_setup_with_default_interface(hass, mock_zeroconf):
"""Test default interface config."""
with patch.object(hass.config_entries.flow, "async_init"), patch.object(
zeroconf, "HaServiceBrowser", side_effect=service_update_mock
):
mock_zeroconf.get_service_info.side_effect = get_service_info_mock
assert await async_setup_component(
hass, zeroconf.DOMAIN, {zeroconf.DOMAIN: {CONF_DEFAULT_INTERFACE: True}}
)
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
await hass.async_block_till_done()
assert mock_zeroconf.called_with(interface_choice=InterfaceChoice.Default)
async def test_setup_without_default_interface(hass, mock_zeroconf):
"""Test without default interface config."""
with patch.object(hass.config_entries.flow, "async_init"), patch.object(
zeroconf, "HaServiceBrowser", side_effect=service_update_mock
):
mock_zeroconf.get_service_info.side_effect = get_service_info_mock
assert await async_setup_component(
hass, zeroconf.DOMAIN, {zeroconf.DOMAIN: {CONF_DEFAULT_INTERFACE: False}}
)
assert mock_zeroconf.called_with()
async def test_setup_without_ipv6(hass, mock_zeroconf):
"""Test without ipv6."""
with patch.object(hass.config_entries.flow, "async_init"), patch.object(
zeroconf, "HaServiceBrowser", side_effect=service_update_mock
):
mock_zeroconf.get_service_info.side_effect = get_service_info_mock
assert await async_setup_component(
hass, zeroconf.DOMAIN, {zeroconf.DOMAIN: {CONF_IPV6: False}}
)
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
await hass.async_block_till_done()
assert mock_zeroconf.called_with(ip_version=IPVersion.V4Only)
async def test_setup_with_ipv6(hass, mock_zeroconf):
"""Test without ipv6."""
with patch.object(hass.config_entries.flow, "async_init"), patch.object(
zeroconf, "HaServiceBrowser", side_effect=service_update_mock
):
mock_zeroconf.get_service_info.side_effect = get_service_info_mock
assert await async_setup_component(
hass, zeroconf.DOMAIN, {zeroconf.DOMAIN: {CONF_IPV6: True}}
)
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
await hass.async_block_till_done()
assert mock_zeroconf.called_with()
async def test_setup_with_ipv6_default(hass, mock_zeroconf):
"""Test without ipv6 as default."""
with patch.object(hass.config_entries.flow, "async_init"), patch.object(
zeroconf, "HaServiceBrowser", side_effect=service_update_mock
):
mock_zeroconf.get_service_info.side_effect = get_service_info_mock
assert await async_setup_component(hass, zeroconf.DOMAIN, {zeroconf.DOMAIN: {}})
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
await hass.async_block_till_done()
assert mock_zeroconf.called_with()
async def test_service_with_invalid_name(hass, mock_zeroconf, caplog):
"""Test we do not crash on service with an invalid name."""
with patch.object(
zeroconf, "HaServiceBrowser", side_effect=service_update_mock
) as mock_service_browser:
mock_zeroconf.get_service_info.side_effect = BadTypeInNameException
assert await async_setup_component(hass, zeroconf.DOMAIN, {zeroconf.DOMAIN: {}})
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
await hass.async_block_till_done()
assert len(mock_service_browser.mock_calls) == 1
assert "Failed to get info for device name" in caplog.text
async def test_zeroconf_match(hass, mock_zeroconf):
"""Test configured options for a device are loaded via config entry."""
def http_only_service_update_mock(zeroconf, services, handlers):
"""Call service update handler."""
handlers[0](
zeroconf,
"_http._tcp.local.",
"shelly108._http._tcp.local.",
ServiceStateChange.Added,
)
with patch.dict(
zc_gen.ZEROCONF,
{"_http._tcp.local.": [{"domain": "shelly", "name": "shelly*"}]},
clear=True,
), patch.object(
hass.config_entries.flow, "async_init"
) as mock_config_flow, patch.object(
zeroconf, "HaServiceBrowser", side_effect=http_only_service_update_mock
) as mock_service_browser:
mock_zeroconf.get_service_info.side_effect = get_zeroconf_info_mock(
"FFAADDCC11DD"
)
assert await async_setup_component(hass, zeroconf.DOMAIN, {zeroconf.DOMAIN: {}})
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
await hass.async_block_till_done()
assert len(mock_service_browser.mock_calls) == 1
assert len(mock_config_flow.mock_calls) == 1
assert mock_config_flow.mock_calls[0][1][0] == "shelly"
async def test_zeroconf_no_match(hass, mock_zeroconf):
"""Test configured options for a device are loaded via config entry."""
def http_only_service_update_mock(zeroconf, services, handlers):
"""Call service update handler."""
handlers[0](
zeroconf,
"_http._tcp.local.",
"somethingelse._http._tcp.local.",
ServiceStateChange.Added,
)
with patch.dict(
zc_gen.ZEROCONF,
{"_http._tcp.local.": [{"domain": "shelly", "name": "shelly*"}]},
clear=True,
), patch.object(
hass.config_entries.flow, "async_init"
) as mock_config_flow, patch.object(
zeroconf, "HaServiceBrowser", side_effect=http_only_service_update_mock
) as mock_service_browser:
mock_zeroconf.get_service_info.side_effect = get_zeroconf_info_mock(
"FFAADDCC11DD"
)
assert await async_setup_component(hass, zeroconf.DOMAIN, {zeroconf.DOMAIN: {}})
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
await hass.async_block_till_done()
assert len(mock_service_browser.mock_calls) == 1
assert len(mock_config_flow.mock_calls) == 0
async def test_homekit_match_partial_space(hass, mock_zeroconf):
"""Test configured options for a device are loaded via config entry."""
with patch.dict(
zc_gen.ZEROCONF,
{zeroconf.HOMEKIT_TYPE: [{"domain": "homekit_controller"}]},
clear=True,
), patch.object(
hass.config_entries.flow, "async_init"
) as mock_config_flow, patch.object(
zeroconf, "HaServiceBrowser", side_effect=service_update_mock
) as mock_service_browser:
mock_zeroconf.get_service_info.side_effect = get_homekit_info_mock(
"LIFX bulb", HOMEKIT_STATUS_UNPAIRED
)
assert await async_setup_component(hass, zeroconf.DOMAIN, {zeroconf.DOMAIN: {}})
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
await hass.async_block_till_done()
assert len(mock_service_browser.mock_calls) == 1
assert len(mock_config_flow.mock_calls) == 1
assert mock_config_flow.mock_calls[0][1][0] == "lifx"
async def test_homekit_match_partial_dash(hass, mock_zeroconf):
"""Test configured options for a device are loaded via config entry."""
with patch.dict(
zc_gen.ZEROCONF,
{zeroconf.HOMEKIT_TYPE: [{"domain": "homekit_controller"}]},
clear=True,
), patch.object(
hass.config_entries.flow, "async_init"
) as mock_config_flow, patch.object(
zeroconf, "HaServiceBrowser", side_effect=service_update_mock
) as mock_service_browser:
mock_zeroconf.get_service_info.side_effect = get_homekit_info_mock(
"Rachio-fa46ba", HOMEKIT_STATUS_UNPAIRED
)
assert await async_setup_component(hass, zeroconf.DOMAIN, {zeroconf.DOMAIN: {}})
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
await hass.async_block_till_done()
assert len(mock_service_browser.mock_calls) == 1
assert len(mock_config_flow.mock_calls) == 1
assert mock_config_flow.mock_calls[0][1][0] == "rachio"
async def test_homekit_match_full(hass, mock_zeroconf):
"""Test configured options for a device are loaded via config entry."""
with patch.dict(
zc_gen.ZEROCONF,
{zeroconf.HOMEKIT_TYPE: [{"domain": "homekit_controller"}]},
clear=True,
), patch.object(
hass.config_entries.flow, "async_init"
) as mock_config_flow, patch.object(
zeroconf, "HaServiceBrowser", side_effect=service_update_mock
) as mock_service_browser:
mock_zeroconf.get_service_info.side_effect = get_homekit_info_mock(
"BSB002", HOMEKIT_STATUS_UNPAIRED
)
assert await async_setup_component(hass, zeroconf.DOMAIN, {zeroconf.DOMAIN: {}})
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
await hass.async_block_till_done()
assert len(mock_service_browser.mock_calls) == 1
assert len(mock_config_flow.mock_calls) == 1
assert mock_config_flow.mock_calls[0][1][0] == "hue"
async def test_homekit_already_paired(hass, mock_zeroconf):
"""Test that an already paired device is sent to homekit_controller."""
with patch.dict(
zc_gen.ZEROCONF,
{zeroconf.HOMEKIT_TYPE: [{"domain": "homekit_controller"}]},
clear=True,
), patch.object(
hass.config_entries.flow, "async_init"
) as mock_config_flow, patch.object(
zeroconf, "HaServiceBrowser", side_effect=service_update_mock
) as mock_service_browser:
mock_zeroconf.get_service_info.side_effect = get_homekit_info_mock(
"tado", HOMEKIT_STATUS_PAIRED
)
assert await async_setup_component(hass, zeroconf.DOMAIN, {zeroconf.DOMAIN: {}})
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
await hass.async_block_till_done()
assert len(mock_service_browser.mock_calls) == 1
assert len(mock_config_flow.mock_calls) == 2
assert mock_config_flow.mock_calls[0][1][0] == "tado"
assert mock_config_flow.mock_calls[1][1][0] == "homekit_controller"
async def test_homekit_invalid_paring_status(hass, mock_zeroconf):
"""Test that missing paring data is not sent to homekit_controller."""
with patch.dict(
zc_gen.ZEROCONF,
{zeroconf.HOMEKIT_TYPE: [{"domain": "homekit_controller"}]},
clear=True,
), patch.object(
hass.config_entries.flow, "async_init"
) as mock_config_flow, patch.object(
zeroconf, "HaServiceBrowser", side_effect=service_update_mock
) as mock_service_browser:
mock_zeroconf.get_service_info.side_effect = get_homekit_info_mock(
"tado", b"invalid"
)
assert await async_setup_component(hass, zeroconf.DOMAIN, {zeroconf.DOMAIN: {}})
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
await hass.async_block_till_done()
assert len(mock_service_browser.mock_calls) == 1
assert len(mock_config_flow.mock_calls) == 1
assert mock_config_flow.mock_calls[0][1][0] == "tado"
async def test_homekit_not_paired(hass, mock_zeroconf):
"""Test that an not paired device is sent to homekit_controller."""
with patch.dict(
zc_gen.ZEROCONF,
{zeroconf.HOMEKIT_TYPE: [{"domain": "homekit_controller"}]},
clear=True,
), patch.object(
hass.config_entries.flow, "async_init"
) as mock_config_flow, patch.object(
zeroconf, "HaServiceBrowser", side_effect=service_update_mock
) as mock_service_browser:
mock_zeroconf.get_service_info.side_effect = get_homekit_info_mock(
"this_will_not_match_any_integration", HOMEKIT_STATUS_UNPAIRED
)
assert await async_setup_component(hass, zeroconf.DOMAIN, {zeroconf.DOMAIN: {}})
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
await hass.async_block_till_done()
assert len(mock_service_browser.mock_calls) == 1
assert len(mock_config_flow.mock_calls) == 1
assert mock_config_flow.mock_calls[0][1][0] == "homekit_controller"
async def test_info_from_service_non_utf8(hass):
"""Test info_from_service handles non UTF-8 property keys and values correctly."""
service_type = "_test._tcp.local."
info = zeroconf.info_from_service(
get_service_info_mock(service_type, f"test.{service_type}")
)
raw_info = info["properties"].pop("_raw", False)
assert raw_info
assert len(raw_info) == len(PROPERTIES) - 1
assert NON_ASCII_KEY not in raw_info
assert len(info["properties"]) <= len(raw_info)
assert "non-utf8-value" not in info["properties"]
assert raw_info["non-utf8-value"] is NON_UTF8_VALUE
async def test_info_from_service_with_addresses(hass):
"""Test info_from_service does not throw when there are no addresses."""
service_type = "_test._tcp.local."
info = zeroconf.info_from_service(
get_service_info_mock_without_an_address(service_type, f"test.{service_type}")
)
assert info is None
async def test_get_instance(hass, mock_zeroconf):
"""Test we get an instance."""
assert await async_setup_component(hass, zeroconf.DOMAIN, {zeroconf.DOMAIN: {}})
assert await hass.components.zeroconf.async_get_instance() is mock_zeroconf
hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP)
await hass.async_block_till_done()
assert len(mock_zeroconf.ha_close.mock_calls) == 1
|
from datetime import timedelta
import pytest
import homeassistant.components.automation as automation
from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_OFF
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from tests.async_mock import Mock, patch
from tests.common import (
assert_setup_component,
async_fire_time_changed,
async_mock_service,
mock_component,
)
@pytest.fixture
def calls(hass):
"""Track calls to a mock service."""
return async_mock_service(hass, "test", "automation")
@pytest.fixture(autouse=True)
def setup_comp(hass):
"""Initialize components."""
mock_component(hass, "group")
async def test_if_fires_using_at(hass, calls):
"""Test for firing at."""
now = dt_util.now()
trigger_dt = now.replace(hour=5, minute=0, second=0, microsecond=0) + timedelta(2)
time_that_will_not_match_right_away = trigger_dt - timedelta(minutes=1)
with patch(
"homeassistant.util.dt.utcnow",
return_value=dt_util.as_utc(time_that_will_not_match_right_away),
):
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: {
"trigger": {"platform": "time", "at": "5:00:00"},
"action": {
"service": "test.automation",
"data_template": {
"some": "{{ trigger.platform }} - {{ trigger.now.hour }}"
},
},
}
},
)
await hass.async_block_till_done()
async_fire_time_changed(hass, trigger_dt + timedelta(seconds=1))
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data["some"] == "time - 5"
@pytest.mark.parametrize(
"has_date,has_time", [(True, True), (True, False), (False, True)]
)
async def test_if_fires_using_at_input_datetime(hass, calls, has_date, has_time):
"""Test for firing at input_datetime."""
await async_setup_component(
hass,
"input_datetime",
{"input_datetime": {"trigger": {"has_date": has_date, "has_time": has_time}}},
)
now = dt_util.now()
trigger_dt = now.replace(
hour=5 if has_time else 0, minute=0, second=0, microsecond=0
) + timedelta(2)
await hass.services.async_call(
"input_datetime",
"set_datetime",
{
ATTR_ENTITY_ID: "input_datetime.trigger",
"datetime": str(trigger_dt.replace(tzinfo=None)),
},
blocking=True,
)
time_that_will_not_match_right_away = trigger_dt - timedelta(minutes=1)
some_data = "{{ trigger.platform }}-{{ trigger.now.day }}-{{ trigger.now.hour }}-{{trigger.entity_id}}"
with patch(
"homeassistant.util.dt.utcnow",
return_value=dt_util.as_utc(time_that_will_not_match_right_away),
):
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: {
"trigger": {"platform": "time", "at": "input_datetime.trigger"},
"action": {
"service": "test.automation",
"data_template": {"some": some_data},
},
}
},
)
await hass.async_block_till_done()
async_fire_time_changed(hass, trigger_dt + timedelta(seconds=1))
await hass.async_block_till_done()
assert len(calls) == 1
assert (
calls[0].data["some"]
== f"time-{trigger_dt.day}-{trigger_dt.hour}-input_datetime.trigger"
)
if has_date:
trigger_dt += timedelta(days=1)
if has_time:
trigger_dt += timedelta(hours=1)
await hass.services.async_call(
"input_datetime",
"set_datetime",
{
ATTR_ENTITY_ID: "input_datetime.trigger",
"datetime": str(trigger_dt.replace(tzinfo=None)),
},
blocking=True,
)
async_fire_time_changed(hass, trigger_dt + timedelta(seconds=1))
await hass.async_block_till_done()
assert len(calls) == 2
assert (
calls[1].data["some"]
== f"time-{trigger_dt.day}-{trigger_dt.hour}-input_datetime.trigger"
)
async def test_if_fires_using_multiple_at(hass, calls):
"""Test for firing at."""
now = dt_util.now()
trigger_dt = now.replace(hour=5, minute=0, second=0, microsecond=0) + timedelta(2)
time_that_will_not_match_right_away = trigger_dt - timedelta(minutes=1)
with patch(
"homeassistant.util.dt.utcnow",
return_value=dt_util.as_utc(time_that_will_not_match_right_away),
):
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: {
"trigger": {"platform": "time", "at": ["5:00:00", "6:00:00"]},
"action": {
"service": "test.automation",
"data_template": {
"some": "{{ trigger.platform }} - {{ trigger.now.hour }}"
},
},
}
},
)
await hass.async_block_till_done()
async_fire_time_changed(hass, trigger_dt + timedelta(seconds=1))
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data["some"] == "time - 5"
async_fire_time_changed(hass, trigger_dt + timedelta(hours=1, seconds=1))
await hass.async_block_till_done()
assert len(calls) == 2
assert calls[1].data["some"] == "time - 6"
async def test_if_not_fires_using_wrong_at(hass, calls):
"""YAML translates time values to total seconds.
This should break the before rule.
"""
now = dt_util.utcnow()
time_that_will_not_match_right_away = now.replace(
year=now.year + 1, hour=1, minute=0, second=0
)
with patch(
"homeassistant.util.dt.utcnow", return_value=time_that_will_not_match_right_away
):
with assert_setup_component(0, automation.DOMAIN):
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: {
"trigger": {
"platform": "time",
"at": 3605,
# Total seconds. Hour = 3600 second
},
"action": {"service": "test.automation"},
}
},
)
await hass.async_block_till_done()
async_fire_time_changed(
hass, now.replace(year=now.year + 1, hour=1, minute=0, second=5)
)
await hass.async_block_till_done()
assert len(calls) == 0
async def test_if_action_before(hass, calls):
"""Test for if action before."""
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: {
"trigger": {"platform": "event", "event_type": "test_event"},
"condition": {"condition": "time", "before": "10:00"},
"action": {"service": "test.automation"},
}
},
)
await hass.async_block_till_done()
before_10 = dt_util.now().replace(hour=8)
after_10 = dt_util.now().replace(hour=14)
with patch("homeassistant.helpers.condition.dt_util.now", return_value=before_10):
hass.bus.async_fire("test_event")
await hass.async_block_till_done()
assert len(calls) == 1
with patch("homeassistant.helpers.condition.dt_util.now", return_value=after_10):
hass.bus.async_fire("test_event")
await hass.async_block_till_done()
assert len(calls) == 1
async def test_if_action_after(hass, calls):
"""Test for if action after."""
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: {
"trigger": {"platform": "event", "event_type": "test_event"},
"condition": {"condition": "time", "after": "10:00"},
"action": {"service": "test.automation"},
}
},
)
await hass.async_block_till_done()
before_10 = dt_util.now().replace(hour=8)
after_10 = dt_util.now().replace(hour=14)
with patch("homeassistant.helpers.condition.dt_util.now", return_value=before_10):
hass.bus.async_fire("test_event")
await hass.async_block_till_done()
assert len(calls) == 0
with patch("homeassistant.helpers.condition.dt_util.now", return_value=after_10):
hass.bus.async_fire("test_event")
await hass.async_block_till_done()
assert len(calls) == 1
async def test_if_action_one_weekday(hass, calls):
"""Test for if action with one weekday."""
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: {
"trigger": {"platform": "event", "event_type": "test_event"},
"condition": {"condition": "time", "weekday": "mon"},
"action": {"service": "test.automation"},
}
},
)
await hass.async_block_till_done()
days_past_monday = dt_util.now().weekday()
monday = dt_util.now() - timedelta(days=days_past_monday)
tuesday = monday + timedelta(days=1)
with patch("homeassistant.helpers.condition.dt_util.now", return_value=monday):
hass.bus.async_fire("test_event")
await hass.async_block_till_done()
assert len(calls) == 1
with patch("homeassistant.helpers.condition.dt_util.now", return_value=tuesday):
hass.bus.async_fire("test_event")
await hass.async_block_till_done()
assert len(calls) == 1
async def test_if_action_list_weekday(hass, calls):
"""Test for action with a list of weekdays."""
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: {
"trigger": {"platform": "event", "event_type": "test_event"},
"condition": {"condition": "time", "weekday": ["mon", "tue"]},
"action": {"service": "test.automation"},
}
},
)
await hass.async_block_till_done()
days_past_monday = dt_util.now().weekday()
monday = dt_util.now() - timedelta(days=days_past_monday)
tuesday = monday + timedelta(days=1)
wednesday = tuesday + timedelta(days=1)
with patch("homeassistant.helpers.condition.dt_util.now", return_value=monday):
hass.bus.async_fire("test_event")
await hass.async_block_till_done()
assert len(calls) == 1
with patch("homeassistant.helpers.condition.dt_util.now", return_value=tuesday):
hass.bus.async_fire("test_event")
await hass.async_block_till_done()
assert len(calls) == 2
with patch("homeassistant.helpers.condition.dt_util.now", return_value=wednesday):
hass.bus.async_fire("test_event")
await hass.async_block_till_done()
assert len(calls) == 2
async def test_untrack_time_change(hass):
"""Test for removing tracked time changes."""
mock_track_time_change = Mock()
with patch(
"homeassistant.components.homeassistant.triggers.time.async_track_time_change",
return_value=mock_track_time_change,
):
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: {
"alias": "test",
"trigger": {
"platform": "time",
"at": ["5:00:00", "6:00:00", "7:00:00"],
},
"action": {"service": "test.automation", "data": {"test": "test"}},
}
},
)
await hass.async_block_till_done()
await hass.services.async_call(
automation.DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: "automation.test"},
blocking=True,
)
assert len(mock_track_time_change.mock_calls) == 3
|
from chainer.backends import cuda
import numpy as np
def mask_to_bbox(mask):
"""Compute the bounding boxes around the masked regions.
This function accepts both :obj:`numpy.ndarray` and :obj:`cupy.ndarray` as
inputs.
Args:
mask (array): An array whose shape is :math:`(R, H, W)`.
:math:`R` is the number of masks.
The dtype should be :obj:`numpy.bool`.
Returns:
array:
The bounding boxes around the masked regions.
This is an array whose shape is :math:`(R, 4)`.
:math:`R` is the number of bounding boxes.
The dtype should be :obj:`numpy.float32`.
"""
R, H, W = mask.shape
xp = cuda.get_array_module(mask)
instance_index, ys, xs = xp.nonzero(mask)
bbox = xp.zeros((R, 4), dtype=np.float32)
for i in range(R):
ys_i = ys[instance_index == i]
xs_i = xs[instance_index == i]
if len(ys_i) == 0:
continue
y_min = ys_i.min()
x_min = xs_i.min()
y_max = ys_i.max() + 1
x_max = xs_i.max() + 1
bbox[i] = xp.array([y_min, x_min, y_max, x_max], dtype=np.float32)
return bbox
|
def get_manager(client, hostname=None, port=None, userid=None,
password=None):
"""Get pyrabbit manager."""
import pyrabbit
opt = client.transport_options.get
def get(name, val, default):
return (val if val is not None
else opt('manager_%s' % name) or
getattr(client, name, None) or default)
host = get('hostname', hostname, 'localhost')
port = port if port is not None else opt('manager_port', 15672)
userid = get('userid', userid, 'guest')
password = get('password', password, 'guest')
return pyrabbit.Client(f'{host}:{port}', userid, password)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import logging
import os
from absl import flags
from perfkitbenchmarker import events
from perfkitbenchmarker import sample
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.traces import base_collector
import six
flags.DEFINE_boolean(
'sar', False, 'Run sar (https://linux.die.net/man/1/sar) '
'on each VM to collect system performance metrics during '
'each benchmark run.')
flags.DEFINE_integer(
'sar_interval', 5, 'sar sample collection frequency, in seconds. Only '
'applicable when --sar is specified.')
flags.DEFINE_integer(
'sar_samples', None,
'Number of sar samples to collect. When undefined sar is '
'ran indefinitely. This must be set to record average '
'statistics. Only applicable when --sar is specified.')
flags.DEFINE_string(
'sar_output', None, 'Output directory for sar output. '
'Only applicable when --sar is specified. '
'Default: run temporary directory.')
flags.DEFINE_boolean('sar_publish', True,
'Whether to publish average sar statistics.')
FLAGS = flags.FLAGS
def _AddStealResults(metadata, output, samples):
"""Appends average Steal Time %'s to the samples list.
Sample data e.g.
...
Linux 4.4.0-1083-aws (ip-10-0-0-217) 05/21/2019 _x86_64_ (8 CPU)
12:12:36 AM CPU %user %nice %system %iowait %steal %idle
12:17:17 AM all 18.09 0.00 0.00 0.00 81.91 0.00
12:17:22 AM all 21.96 0.00 0.00 0.00 78.04 0.00
12:17:27 AM all 36.47 0.00 0.00 0.00 63.53 0.00
Average: all 33.73 0.00 0.00 0.00 66.27 0.00
Args:
metadata: metadata of the sample.
output: the output of the stress-ng benchmark.
samples: list of samples to return.
"""
output_lines = output.splitlines()
for line in output_lines:
line_split = line.split()
if not line_split:
continue
if line_split[0] == 'Linux':
continue
if line_split[-2] == '%steal':
continue
if line_split[0] == 'Average:':
metric = 'average_steal'
else:
metric = 'steal'
value = float(line_split[-2]) # parse %steal time
my_metadata = {'user_percent': float(line_split[3])}
my_metadata.update(metadata)
samples.append(
sample.Sample(
metric=metric, value=value, unit='%', metadata=my_metadata))
class _SarCollector(base_collector.BaseCollector):
"""sar collector.
Installs sysstat and runs sar on a collection of VMs.
"""
def _CollectorName(self):
return 'sar'
def _InstallCollector(self, vm):
vm.InstallPackages('sysstat')
def _CollectorRunCommand(self, vm, collector_file):
cmd = ('sar -u {sar_interval} {sar_samples} > {output} 2>&1 & '
'echo $!').format(
output=collector_file,
sar_interval=FLAGS.sar_interval,
sar_samples=FLAGS.sar_samples if FLAGS.sar_samples else '')
return cmd
def Analyze(self, sender, benchmark_spec, samples):
"""Analyze sar file and record samples."""
def _Analyze(role, f):
"""Parse file and record samples."""
with open(os.path.join(self.output_directory, os.path.basename(f)),
'r') as fp:
output = fp.read()
metadata = {
'event': 'sar',
'sender': 'run',
'sar_interval': self.interval,
'role': role,
}
_AddStealResults(metadata, output, samples)
vm_util.RunThreaded(
_Analyze, [((k, w), {}) for k, w in six.iteritems(self._role_mapping)])
def Register(parsed_flags):
"""Registers the sar collector if FLAGS.sar is set."""
if not parsed_flags.sar:
return
output_directory = (
parsed_flags.sar_output
if parsed_flags['sar_output'].present else vm_util.GetTempDir())
logging.debug('Registering sar collector with interval %s, output to %s.',
parsed_flags.sar_interval, output_directory)
if not os.path.isdir(output_directory):
os.makedirs(output_directory)
collector = _SarCollector(
interval=parsed_flags.sar_interval, output_directory=output_directory)
events.before_phase.connect(collector.Start, events.RUN_PHASE, weak=False)
events.after_phase.connect(collector.Stop, events.RUN_PHASE, weak=False)
if parsed_flags.sar_publish:
events.samples_created.connect(
collector.Analyze, events.RUN_PHASE, weak=False)
|
from functools import partial
import logging
import voluptuous as vol
import wakeonlan
from homeassistant.const import CONF_BROADCAST_ADDRESS, CONF_BROADCAST_PORT, CONF_MAC
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DOMAIN = "wake_on_lan"
SERVICE_SEND_MAGIC_PACKET = "send_magic_packet"
WAKE_ON_LAN_SEND_MAGIC_PACKET_SCHEMA = vol.Schema(
{
vol.Required(CONF_MAC): cv.string,
vol.Optional(CONF_BROADCAST_ADDRESS): cv.string,
vol.Optional(CONF_BROADCAST_PORT): cv.port,
}
)
async def async_setup(hass, config):
"""Set up the wake on LAN component."""
async def send_magic_packet(call):
"""Send magic packet to wake up a device."""
mac_address = call.data.get(CONF_MAC)
broadcast_address = call.data.get(CONF_BROADCAST_ADDRESS)
broadcast_port = call.data.get(CONF_BROADCAST_PORT)
service_kwargs = {}
if broadcast_address is not None:
service_kwargs["ip_address"] = broadcast_address
if broadcast_port is not None:
service_kwargs["port"] = broadcast_port
_LOGGER.info(
"Send magic packet to mac %s (broadcast: %s, port: %s)",
mac_address,
broadcast_address,
broadcast_port,
)
await hass.async_add_executor_job(
partial(wakeonlan.send_magic_packet, mac_address, **service_kwargs)
)
hass.services.async_register(
DOMAIN,
SERVICE_SEND_MAGIC_PACKET,
send_magic_packet,
schema=WAKE_ON_LAN_SEND_MAGIC_PACKET_SCHEMA,
)
return True
|
from cerberus import errors
from cerberus.tests import assert_fail, assert_success
def test_schema(validator):
field = 'a_dict'
subschema_field = 'address'
assert_success({field: {subschema_field: 'i live here', 'city': 'in my own town'}})
assert_fail(
schema={
field: {
'type': 'dict',
'schema': {
subschema_field: {'type': 'string'},
'city': {'type': 'string', 'required': True},
},
}
},
document={field: {subschema_field: 34}},
validator=validator,
error=(
field,
(field, 'schema'),
errors.SCHEMA,
validator.schema['a_dict']['schema'],
),
child_errors=[
(
(field, subschema_field),
(field, 'schema', subschema_field, 'type'),
errors.TYPE,
('string',),
),
(
(field, 'city'),
(field, 'schema', 'city', 'required'),
errors.REQUIRED_FIELD,
True,
),
],
)
assert field in validator.errors
assert subschema_field in validator.errors[field][-1]
assert (
errors.BasicErrorHandler.messages[errors.TYPE.code].format(
constraint=('string',)
)
in validator.errors[field][-1][subschema_field]
)
assert 'city' in validator.errors[field][-1]
assert (
errors.BasicErrorHandler.messages[errors.REQUIRED_FIELD.code]
in validator.errors[field][-1]['city']
)
def test_options_passed_to_nested_validators(validator):
validator.allow_unknown = True
assert_success(
schema={'sub_dict': {'type': 'dict', 'schema': {'foo': {'type': 'string'}}}},
document={'sub_dict': {'foo': 'bar', 'unknown': True}},
validator=validator,
)
|
from __future__ import print_function
import sys
import argparse
def main(args):
ap = argparse.ArgumentParser()
ap.add_argument('expr', nargs='?', help='name=value')
ns = ap.parse_args(args)
app = globals()['_stash']
""":type : StaSh"""
_, current_state = app.runtime.get_current_worker_and_state()
if ns.expr is None:
for k, v in current_state.aliases.items():
print('{}={}'.format(k, v[0]))
else:
if "=" in ns.expr:
name, value = ns.expr.split("=", 1)
if name == "" or value == "":
raise ValueError("alias: invalid name=value expression")
tokens, parsed = app.runtime.parser.parse(value)
# Ensure the actual form of an alias is fully expanded
tokens, _ = app.runtime.expander.alias_subs(tokens, parsed, exclude=name)
value_expanded = ' '.join(t.tok for t in tokens)
current_state.aliases[name] = (value, value_expanded)
sys.exit(0)
else:
try:
print('{}={}'.format(ns.expr, current_state.aliases[ns.expr]))
except KeyError as err:
raise KeyError('alias: {} not found'.format(err.message))
if __name__ == "__main__":
main(sys.argv[1:])
|
import logging
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_send
from .const import (
ATTR_EVENT_TYPE,
ATTR_FACE_URL,
ATTR_ID,
ATTR_IS_KNOWN,
ATTR_NAME,
ATTR_PERSONS,
DATA_PERSONS,
DEFAULT_PERSON,
DOMAIN,
NETATMO_EVENT,
)
_LOGGER = logging.getLogger(__name__)
EVENT_TYPE_MAP = {
"outdoor": "",
"therm_mode": "",
}
async def handle_webhook(hass, webhook_id, request):
"""Handle webhook callback."""
try:
data = await request.json()
except ValueError as err:
_LOGGER.error("Error in data: %s", err)
return None
_LOGGER.debug("Got webhook data: %s", data)
event_type = data.get(ATTR_EVENT_TYPE)
if event_type in EVENT_TYPE_MAP:
async_send_event(hass, event_type, data)
for event_data in data.get(EVENT_TYPE_MAP[event_type], []):
async_evaluate_event(hass, event_data)
else:
async_evaluate_event(hass, data)
@callback
def async_evaluate_event(hass, event_data):
"""Evaluate events from webhook."""
event_type = event_data.get(ATTR_EVENT_TYPE)
if event_type == "person":
for person in event_data.get(ATTR_PERSONS):
person_event_data = dict(event_data)
person_event_data[ATTR_ID] = person.get(ATTR_ID)
person_event_data[ATTR_NAME] = hass.data[DOMAIN][DATA_PERSONS].get(
person_event_data[ATTR_ID], DEFAULT_PERSON
)
person_event_data[ATTR_IS_KNOWN] = person.get(ATTR_IS_KNOWN)
person_event_data[ATTR_FACE_URL] = person.get(ATTR_FACE_URL)
async_send_event(hass, event_type, person_event_data)
else:
_LOGGER.debug("%s: %s", event_type, event_data)
async_send_event(hass, event_type, event_data)
@callback
def async_send_event(hass, event_type, data):
"""Send events."""
hass.bus.async_fire(
event_type=NETATMO_EVENT, event_data={"type": event_type, "data": data}
)
async_dispatcher_send(
hass,
f"signal-{DOMAIN}-webhook-{event_type}",
{"type": event_type, "data": data},
)
|
import numpy as np
import jax.numpy as jnp
from jax import config
import tensorflow as tf
import torch
import pytest
import tensornetwork
from tensornetwork.block_sparse.charge import charge_equal
from tensornetwork import backends
config.update("jax_enable_x64", True)
np_real = [np.float32, np.float64]
np_complex = [np.complex64, np.complex128]
np_float_dtypes = np_real + np_complex
np_int = [np.int8, np.int16, np.int32, np.int64]
np_uint = [np.uint8, np.uint16, np.uint32, np.uint64]
np_not_bool = np_float_dtypes + np_int + np_uint + [None, ]
np_not_half = [np.float32, np.float64] + np_complex
np_all_dtypes = np_not_bool + [np.bool, ]
torch_supported_dtypes = np_real + np_int + [np.uint8, np.bool, None]
# torch_supported_dtypes = [np.float32, np.float64]
def safe_randn(shape, backend, dtype):
"""
Creates a random tensor , catching errors that occur when the
dtype is not supported by the backend. Returns the Tensor and the backend
array, which are both None if the dtype and backend did not match.
"""
np.random.seed(seed=10)
init = np.random.randn(*shape)
if dtype == np.bool:
init = np.round(init)
init = init.astype(dtype)
if dtype in np_complex:
init_i = np.random.randn(*shape)
init = init + 1.0j * init_i.astype(dtype)
if backend == "pytorch" and dtype not in torch_supported_dtypes:
pytest.skip("dtype unsupported by PyTorch")
else:
A = tensornetwork.Tensor(init, backend=backend)
return (A, init)
def safe_zeros(shape, backend, dtype):
"""
Creates a tensor of zeros, catching errors that occur when the
dtype is not supported by the backend. Returns both the Tensor and the backend
array, which are both None if the dtype and backend did not match.
"""
init = np.zeros(shape, dtype=dtype)
if backend == "pytorch" and dtype not in torch_supported_dtypes:
pytest.skip("dtype unsupported by PyTorch")
else:
A = tensornetwork.Tensor(init, backend=backend)
return (A, init)
def np_dtype_to_backend(backend, dtype):
"""
Converts a given np dtype to the equivalent in the given backend. Skips
the present test if the dtype is not supported in the backend.
"""
backend_obj = backends.backend_factory.get_backend(backend)
if backend_obj.name in ("numpy", "symmetric"):
return dtype
A_np = np.ones([1], dtype=dtype)
if backend_obj.name == "jax":
A = jnp.array(A_np)
elif backend_obj.name == "tensorflow":
A = tf.convert_to_tensor(A_np, dtype=dtype)
elif backend_obj.name == "pytorch":
if dtype not in torch_supported_dtypes:
pytest.skip("dtype unsupported by PyTorch")
A = torch.tensor(A_np)
else:
raise ValueError("Invalid backend ", backend)
return A.dtype
def check_contraction_dtype(backend, dtype):
"""
Skips the test if the backend cannot perform multiply-add with the given
dtype.
"""
skip = False
if backend == "tensorflow":
if dtype in [np.uint8, tf.uint8, np.uint16, tf.uint16, np.int8, tf.int8,
np.int16, tf.int16, np.uint32, tf.uint32, np.uint64,
tf.uint64]:
skip = True
if backend == "pytorch":
if dtype in [np.float16, torch.float16]:
skip = True
if skip:
pytest.skip("backend does not support multiply-add with this dtype.")
def assert_allclose(expected, actual, backend, **kwargs):
if backend.name == 'symmetric':
exp = expected.contiguous()
act = actual.contiguous()
if exp.shape != act.shape:
raise ValueError(f"expected shape = {exp.shape}, "
f"actual shape = {act.shape}")
if len(exp.flat_charges) != len(act.flat_charges):
raise ValueError("expected charges differ from actual charges")
if len(exp.flat_flows) != len(act.flat_flows):
raise ValueError(f"expected flat flows = {exp.flat_flows}"
f" differ from actual flat flows = {act.flat_flows}")
for c1, c2 in zip(exp.flat_charges, act.flat_charges):
if not charge_equal(c1, c2):
raise ValueError("expected charges differ from actual charges")
if not np.all(np.array(exp.flat_flows) == np.array(act.flat_flows)):
raise ValueError(f"expected flat flows = {exp.flat_flows}"
f" differ from actual flat flows = {act.flat_flows}")
if not np.all(np.abs(exp.data - act.data) < 1E-10):
np.testing.assert_allclose(act.data, exp.data, **kwargs)
else:
np.testing.assert_allclose(actual, expected, **kwargs)
|
import time
from kazoo.exceptions import NoNodeError
from pyramid.view import view_config
from paasta_tools.api.views.exception import ApiFailure
from paasta_tools.long_running_service_tools import ZK_PAUSE_AUTOSCALE_PATH
from paasta_tools.utils import ZookeeperPool
@view_config(
route_name="service_autoscaler.pause.get", request_method="GET", renderer="json"
)
def get_service_autoscaler_pause(request):
with ZookeeperPool() as zk:
try:
pause_until = zk.get(ZK_PAUSE_AUTOSCALE_PATH)[0].decode("utf8")
except (NoNodeError, ValueError):
pause_until = "0"
except Exception as e:
raise ApiFailure(e, 500)
return pause_until
@view_config(
route_name="service_autoscaler.pause.post", request_method="POST", renderer="json"
)
def update_service_autoscaler_pause(request):
minutes = request.swagger_data.get("json_body")["minutes"]
current_time = time.time()
expiry_time = current_time + minutes * 60
with ZookeeperPool() as zk:
try:
zk.ensure_path(ZK_PAUSE_AUTOSCALE_PATH)
zk.set(ZK_PAUSE_AUTOSCALE_PATH, str(expiry_time).encode("utf-8"))
except Exception as e:
raise ApiFailure(e, 500)
return
@view_config(
route_name="service_autoscaler.pause.delete",
request_method="DELETE",
renderer="json",
)
def delete_service_autoscaler_pause(request):
with ZookeeperPool() as zk:
try:
zk.ensure_path(ZK_PAUSE_AUTOSCALE_PATH)
zk.delete(ZK_PAUSE_AUTOSCALE_PATH)
except Exception as e:
raise ApiFailure(e, 500)
return
|
import asyncio
from datetime import timedelta
from functools import partial
import logging
from amcrest import AmcrestError
from haffmpeg.camera import CameraMjpeg
import voluptuous as vol
from homeassistant.components.camera import (
CAMERA_SERVICE_SCHEMA,
SUPPORT_ON_OFF,
SUPPORT_STREAM,
Camera,
)
from homeassistant.components.ffmpeg import DATA_FFMPEG
from homeassistant.const import CONF_NAME, STATE_OFF, STATE_ON
from homeassistant.helpers.aiohttp_client import (
async_aiohttp_proxy_stream,
async_aiohttp_proxy_web,
async_get_clientsession,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .const import (
CAMERA_WEB_SESSION_TIMEOUT,
CAMERAS,
COMM_TIMEOUT,
DATA_AMCREST,
DEVICES,
SERVICE_UPDATE,
SNAPSHOT_TIMEOUT,
)
from .helpers import log_update_error, service_signal
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(seconds=15)
STREAM_SOURCE_LIST = ["snapshot", "mjpeg", "rtsp"]
_SRV_EN_REC = "enable_recording"
_SRV_DS_REC = "disable_recording"
_SRV_EN_AUD = "enable_audio"
_SRV_DS_AUD = "disable_audio"
_SRV_EN_MOT_REC = "enable_motion_recording"
_SRV_DS_MOT_REC = "disable_motion_recording"
_SRV_GOTO = "goto_preset"
_SRV_CBW = "set_color_bw"
_SRV_TOUR_ON = "start_tour"
_SRV_TOUR_OFF = "stop_tour"
_SRV_PTZ_CTRL = "ptz_control"
_ATTR_PTZ_TT = "travel_time"
_ATTR_PTZ_MOV = "movement"
_MOV = [
"zoom_out",
"zoom_in",
"right",
"left",
"up",
"down",
"right_down",
"right_up",
"left_down",
"left_up",
]
_ZOOM_ACTIONS = ["ZoomWide", "ZoomTele"]
_MOVE_1_ACTIONS = ["Right", "Left", "Up", "Down"]
_MOVE_2_ACTIONS = ["RightDown", "RightUp", "LeftDown", "LeftUp"]
_ACTION = _ZOOM_ACTIONS + _MOVE_1_ACTIONS + _MOVE_2_ACTIONS
_DEFAULT_TT = 0.2
_ATTR_PRESET = "preset"
_ATTR_COLOR_BW = "color_bw"
_CBW_COLOR = "color"
_CBW_AUTO = "auto"
_CBW_BW = "bw"
_CBW = [_CBW_COLOR, _CBW_AUTO, _CBW_BW]
_SRV_GOTO_SCHEMA = CAMERA_SERVICE_SCHEMA.extend(
{vol.Required(_ATTR_PRESET): vol.All(vol.Coerce(int), vol.Range(min=1))}
)
_SRV_CBW_SCHEMA = CAMERA_SERVICE_SCHEMA.extend(
{vol.Required(_ATTR_COLOR_BW): vol.In(_CBW)}
)
_SRV_PTZ_SCHEMA = CAMERA_SERVICE_SCHEMA.extend(
{
vol.Required(_ATTR_PTZ_MOV): vol.In(_MOV),
vol.Optional(_ATTR_PTZ_TT, default=_DEFAULT_TT): cv.small_float,
}
)
CAMERA_SERVICES = {
_SRV_EN_REC: (CAMERA_SERVICE_SCHEMA, "async_enable_recording", ()),
_SRV_DS_REC: (CAMERA_SERVICE_SCHEMA, "async_disable_recording", ()),
_SRV_EN_AUD: (CAMERA_SERVICE_SCHEMA, "async_enable_audio", ()),
_SRV_DS_AUD: (CAMERA_SERVICE_SCHEMA, "async_disable_audio", ()),
_SRV_EN_MOT_REC: (CAMERA_SERVICE_SCHEMA, "async_enable_motion_recording", ()),
_SRV_DS_MOT_REC: (CAMERA_SERVICE_SCHEMA, "async_disable_motion_recording", ()),
_SRV_GOTO: (_SRV_GOTO_SCHEMA, "async_goto_preset", (_ATTR_PRESET,)),
_SRV_CBW: (_SRV_CBW_SCHEMA, "async_set_color_bw", (_ATTR_COLOR_BW,)),
_SRV_TOUR_ON: (CAMERA_SERVICE_SCHEMA, "async_start_tour", ()),
_SRV_TOUR_OFF: (CAMERA_SERVICE_SCHEMA, "async_stop_tour", ()),
_SRV_PTZ_CTRL: (
_SRV_PTZ_SCHEMA,
"async_ptz_control",
(_ATTR_PTZ_MOV, _ATTR_PTZ_TT),
),
}
_BOOL_TO_STATE = {True: STATE_ON, False: STATE_OFF}
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up an Amcrest IP Camera."""
if discovery_info is None:
return
name = discovery_info[CONF_NAME]
device = hass.data[DATA_AMCREST][DEVICES][name]
async_add_entities([AmcrestCam(name, device, hass.data[DATA_FFMPEG])], True)
class CannotSnapshot(Exception):
"""Conditions are not valid for taking a snapshot."""
class AmcrestCommandFailed(Exception):
"""Amcrest camera command did not work."""
class AmcrestCam(Camera):
"""An implementation of an Amcrest IP camera."""
def __init__(self, name, device, ffmpeg):
"""Initialize an Amcrest camera."""
super().__init__()
self._name = name
self._api = device.api
self._ffmpeg = ffmpeg
self._ffmpeg_arguments = device.ffmpeg_arguments
self._stream_source = device.stream_source
self._resolution = device.resolution
self._token = self._auth = device.authentication
self._control_light = device.control_light
self._is_recording = False
self._motion_detection_enabled = None
self._brand = None
self._model = None
self._audio_enabled = None
self._motion_recording_enabled = None
self._color_bw = None
self._rtsp_url = None
self._snapshot_task = None
self._unsub_dispatcher = []
self._update_succeeded = False
def _check_snapshot_ok(self):
available = self.available
if not available or not self.is_on:
_LOGGER.warning(
"Attempt to take snapshot when %s camera is %s",
self.name,
"offline" if not available else "off",
)
raise CannotSnapshot
async def _async_get_image(self):
try:
# Send the request to snap a picture and return raw jpg data
# Snapshot command needs a much longer read timeout than other commands.
return await self.hass.async_add_executor_job(
partial(
self._api.snapshot,
timeout=(COMM_TIMEOUT, SNAPSHOT_TIMEOUT),
stream=False,
)
)
except AmcrestError as error:
log_update_error(_LOGGER, "get image from", self.name, "camera", error)
return None
finally:
self._snapshot_task = None
async def async_camera_image(self):
"""Return a still image response from the camera."""
_LOGGER.debug("Take snapshot from %s", self._name)
try:
# Amcrest cameras only support one snapshot command at a time.
# Hence need to wait if a previous snapshot has not yet finished.
# Also need to check that camera is online and turned on before each wait
# and before initiating shapshot.
while self._snapshot_task:
self._check_snapshot_ok()
_LOGGER.debug("Waiting for previous snapshot from %s ...", self._name)
await self._snapshot_task
self._check_snapshot_ok()
# Run snapshot command in separate Task that can't be cancelled so
# 1) it's not possible to send another snapshot command while camera is
# still working on a previous one, and
# 2) someone will be around to catch any exceptions.
self._snapshot_task = self.hass.async_create_task(self._async_get_image())
return await asyncio.shield(self._snapshot_task)
except CannotSnapshot:
return None
async def handle_async_mjpeg_stream(self, request):
"""Return an MJPEG stream."""
# The snapshot implementation is handled by the parent class
if self._stream_source == "snapshot":
return await super().handle_async_mjpeg_stream(request)
if not self.available:
_LOGGER.warning(
"Attempt to stream %s when %s camera is offline",
self._stream_source,
self.name,
)
return None
if self._stream_source == "mjpeg":
# stream an MJPEG image stream directly from the camera
websession = async_get_clientsession(self.hass)
streaming_url = self._api.mjpeg_url(typeno=self._resolution)
stream_coro = websession.get(
streaming_url, auth=self._token, timeout=CAMERA_WEB_SESSION_TIMEOUT
)
return await async_aiohttp_proxy_web(self.hass, request, stream_coro)
# streaming via ffmpeg
streaming_url = self._rtsp_url
stream = CameraMjpeg(self._ffmpeg.binary, loop=self.hass.loop)
await stream.open_camera(streaming_url, extra_cmd=self._ffmpeg_arguments)
try:
stream_reader = await stream.get_reader()
return await async_aiohttp_proxy_stream(
self.hass,
request,
stream_reader,
self._ffmpeg.ffmpeg_stream_content_type,
)
finally:
await stream.close()
# Entity property overrides
@property
def should_poll(self) -> bool:
"""Return True if entity has to be polled for state.
False if entity pushes its state to HA.
"""
return True
@property
def name(self):
"""Return the name of this camera."""
return self._name
@property
def device_state_attributes(self):
"""Return the Amcrest-specific camera state attributes."""
attr = {}
if self._audio_enabled is not None:
attr["audio"] = _BOOL_TO_STATE.get(self._audio_enabled)
if self._motion_recording_enabled is not None:
attr["motion_recording"] = _BOOL_TO_STATE.get(
self._motion_recording_enabled
)
if self._color_bw is not None:
attr[_ATTR_COLOR_BW] = self._color_bw
return attr
@property
def available(self):
"""Return True if entity is available."""
return self._api.available
@property
def supported_features(self):
"""Return supported features."""
return SUPPORT_ON_OFF | SUPPORT_STREAM
# Camera property overrides
@property
def is_recording(self):
"""Return true if the device is recording."""
return self._is_recording
@property
def brand(self):
"""Return the camera brand."""
return self._brand
@property
def motion_detection_enabled(self):
"""Return the camera motion detection status."""
return self._motion_detection_enabled
@property
def model(self):
"""Return the camera model."""
return self._model
async def stream_source(self):
"""Return the source of the stream."""
return self._rtsp_url
@property
def is_on(self):
"""Return true if on."""
return self.is_streaming
# Other Entity method overrides
async def async_on_demand_update(self):
"""Update state."""
self.async_schedule_update_ha_state(True)
async def async_added_to_hass(self):
"""Subscribe to signals and add camera to list."""
for service, params in CAMERA_SERVICES.items():
self._unsub_dispatcher.append(
async_dispatcher_connect(
self.hass,
service_signal(service, self.entity_id),
getattr(self, params[1]),
)
)
self._unsub_dispatcher.append(
async_dispatcher_connect(
self.hass,
service_signal(SERVICE_UPDATE, self._name),
self.async_on_demand_update,
)
)
self.hass.data[DATA_AMCREST][CAMERAS].append(self.entity_id)
async def async_will_remove_from_hass(self):
"""Remove camera from list and disconnect from signals."""
self.hass.data[DATA_AMCREST][CAMERAS].remove(self.entity_id)
for unsub_dispatcher in self._unsub_dispatcher:
unsub_dispatcher()
def update(self):
"""Update entity status."""
if not self.available or self._update_succeeded:
if not self.available:
self._update_succeeded = False
return
_LOGGER.debug("Updating %s camera", self.name)
try:
if self._brand is None:
resp = self._api.vendor_information.strip()
if resp.startswith("vendor="):
self._brand = resp.split("=")[-1]
else:
self._brand = "unknown"
if self._model is None:
resp = self._api.device_type.strip()
if resp.startswith("type="):
self._model = resp.split("=")[-1]
else:
self._model = "unknown"
self.is_streaming = self._get_video()
self._is_recording = self._get_recording()
self._motion_detection_enabled = self._get_motion_detection()
self._audio_enabled = self._get_audio()
self._motion_recording_enabled = self._get_motion_recording()
self._color_bw = self._get_color_mode()
self._rtsp_url = self._api.rtsp_url(typeno=self._resolution)
except AmcrestError as error:
log_update_error(_LOGGER, "get", self.name, "camera attributes", error)
self._update_succeeded = False
else:
self._update_succeeded = True
# Other Camera method overrides
def turn_off(self):
"""Turn off camera."""
self._enable_video(False)
def turn_on(self):
"""Turn on camera."""
self._enable_video(True)
def enable_motion_detection(self):
"""Enable motion detection in the camera."""
self._enable_motion_detection(True)
def disable_motion_detection(self):
"""Disable motion detection in camera."""
self._enable_motion_detection(False)
# Additional Amcrest Camera service methods
async def async_enable_recording(self):
"""Call the job and enable recording."""
await self.hass.async_add_executor_job(self._enable_recording, True)
async def async_disable_recording(self):
"""Call the job and disable recording."""
await self.hass.async_add_executor_job(self._enable_recording, False)
async def async_enable_audio(self):
"""Call the job and enable audio."""
await self.hass.async_add_executor_job(self._enable_audio, True)
async def async_disable_audio(self):
"""Call the job and disable audio."""
await self.hass.async_add_executor_job(self._enable_audio, False)
async def async_enable_motion_recording(self):
"""Call the job and enable motion recording."""
await self.hass.async_add_executor_job(self._enable_motion_recording, True)
async def async_disable_motion_recording(self):
"""Call the job and disable motion recording."""
await self.hass.async_add_executor_job(self._enable_motion_recording, False)
async def async_goto_preset(self, preset):
"""Call the job and move camera to preset position."""
await self.hass.async_add_executor_job(self._goto_preset, preset)
async def async_set_color_bw(self, color_bw):
"""Call the job and set camera color mode."""
await self.hass.async_add_executor_job(self._set_color_bw, color_bw)
async def async_start_tour(self):
"""Call the job and start camera tour."""
await self.hass.async_add_executor_job(self._start_tour, True)
async def async_stop_tour(self):
"""Call the job and stop camera tour."""
await self.hass.async_add_executor_job(self._start_tour, False)
async def async_ptz_control(self, movement, travel_time):
"""Move or zoom camera in specified direction."""
code = _ACTION[_MOV.index(movement)]
kwargs = {"code": code, "arg1": 0, "arg2": 0, "arg3": 0}
if code in _MOVE_1_ACTIONS:
kwargs["arg2"] = 1
elif code in _MOVE_2_ACTIONS:
kwargs["arg1"] = kwargs["arg2"] = 1
try:
await self.hass.async_add_executor_job(
partial(self._api.ptz_control_command, action="start", **kwargs)
)
await asyncio.sleep(travel_time)
await self.hass.async_add_executor_job(
partial(self._api.ptz_control_command, action="stop", **kwargs)
)
except AmcrestError as error:
log_update_error(
_LOGGER, "move", self.name, f"camera PTZ {movement}", error
)
# Methods to send commands to Amcrest camera and handle errors
def _change_setting(self, value, attr, description, action="set"):
func = description.replace(" ", "_")
description = f"camera {description} to {value}"
tries = 3
while True:
try:
getattr(self, f"_set_{func}")(value)
new_value = getattr(self, f"_get_{func}")()
if new_value != value:
raise AmcrestCommandFailed
except (AmcrestError, AmcrestCommandFailed) as error:
if tries == 1:
log_update_error(_LOGGER, action, self.name, description, error)
return
log_update_error(
_LOGGER, action, self.name, description, error, logging.DEBUG
)
else:
if attr:
setattr(self, attr, new_value)
self.schedule_update_ha_state()
return
tries -= 1
def _get_video(self):
return self._api.video_enabled
def _set_video(self, enable):
self._api.video_enabled = enable
def _enable_video(self, enable):
"""Enable or disable camera video stream."""
# Given the way the camera's state is determined by
# is_streaming and is_recording, we can't leave
# recording on if video stream is being turned off.
if self.is_recording and not enable:
self._enable_recording(False)
self._change_setting(enable, "is_streaming", "video")
if self._control_light:
self._change_light()
def _get_recording(self):
return self._api.record_mode == "Manual"
def _set_recording(self, enable):
rec_mode = {"Automatic": 0, "Manual": 1}
self._api.record_mode = rec_mode["Manual" if enable else "Automatic"]
def _enable_recording(self, enable):
"""Turn recording on or off."""
# Given the way the camera's state is determined by
# is_streaming and is_recording, we can't leave
# video stream off if recording is being turned on.
if not self.is_streaming and enable:
self._enable_video(True)
self._change_setting(enable, "_is_recording", "recording")
def _get_motion_detection(self):
return self._api.is_motion_detector_on()
def _set_motion_detection(self, enable):
self._api.motion_detection = str(enable).lower()
def _enable_motion_detection(self, enable):
"""Enable or disable motion detection."""
self._change_setting(enable, "_motion_detection_enabled", "motion detection")
def _get_audio(self):
return self._api.audio_enabled
def _set_audio(self, enable):
self._api.audio_enabled = enable
def _enable_audio(self, enable):
"""Enable or disable audio stream."""
self._change_setting(enable, "_audio_enabled", "audio")
if self._control_light:
self._change_light()
def _get_indicator_light(self):
return "true" in self._api.command(
"configManager.cgi?action=getConfig&name=LightGlobal"
).content.decode("utf-8")
def _set_indicator_light(self, enable):
self._api.command(
f"configManager.cgi?action=setConfig&LightGlobal[0].Enable={str(enable).lower()}"
)
def _change_light(self):
"""Enable or disable indicator light."""
self._change_setting(
self._audio_enabled or self.is_streaming, None, "indicator light"
)
def _get_motion_recording(self):
return self._api.is_record_on_motion_detection()
def _set_motion_recording(self, enable):
self._api.motion_recording = str(enable).lower()
def _enable_motion_recording(self, enable):
"""Enable or disable motion recording."""
self._change_setting(enable, "_motion_recording_enabled", "motion recording")
def _goto_preset(self, preset):
"""Move camera position and zoom to preset."""
try:
self._api.go_to_preset(action="start", preset_point_number=preset)
except AmcrestError as error:
log_update_error(
_LOGGER, "move", self.name, f"camera to preset {preset}", error
)
def _get_color_mode(self):
return _CBW[self._api.day_night_color]
def _set_color_mode(self, cbw):
self._api.day_night_color = _CBW.index(cbw)
def _set_color_bw(self, cbw):
"""Set camera color mode."""
self._change_setting(cbw, "_color_bw", "color mode")
def _start_tour(self, start):
"""Start camera tour."""
try:
self._api.tour(start=start)
except AmcrestError as error:
log_update_error(
_LOGGER, "start" if start else "stop", self.name, "camera tour", error
)
|
import json
from pathlib import Path
from typing import Set
import attr
from .const import COMPONENT_DIR, TESTS_DIR
@attr.s
class Info:
"""Info about new integration."""
domain: str = attr.ib()
name: str = attr.ib()
is_new: bool = attr.ib()
codeowner: str = attr.ib(default=None)
requirement: str = attr.ib(default=None)
authentication: str = attr.ib(default=None)
discoverable: str = attr.ib(default=None)
oauth2: str = attr.ib(default=None)
files_added: Set[Path] = attr.ib(factory=set)
tests_added: Set[Path] = attr.ib(factory=set)
examples_added: Set[Path] = attr.ib(factory=set)
@property
def integration_dir(self) -> Path:
"""Return directory if integration."""
return COMPONENT_DIR / self.domain
@property
def tests_dir(self) -> Path:
"""Return test directory."""
return TESTS_DIR / self.domain
@property
def manifest_path(self) -> Path:
"""Path to the manifest."""
return COMPONENT_DIR / self.domain / "manifest.json"
def manifest(self) -> dict:
"""Return integration manifest."""
return json.loads(self.manifest_path.read_text())
def update_manifest(self, **kwargs) -> None:
"""Update the integration manifest."""
print(f"Updating {self.domain} manifest: {kwargs}")
self.manifest_path.write_text(
json.dumps({**self.manifest(), **kwargs}, indent=2)
)
@property
def strings_path(self) -> Path:
"""Path to the strings."""
return COMPONENT_DIR / self.domain / "strings.json"
def strings(self) -> dict:
"""Return integration strings."""
if not self.strings_path.exists():
return {}
return json.loads(self.strings_path.read_text())
def update_strings(self, **kwargs) -> None:
"""Update the integration strings."""
print(f"Updating {self.domain} strings: {list(kwargs)}")
self.strings_path.write_text(json.dumps({**self.strings(), **kwargs}, indent=2))
|
from rest_framework import serializers
from shop.conf import app_settings
from shop.models.delivery import DeliveryModel, DeliveryItemModel
from shop.modifiers.pool import cart_modifiers_pool
class DeliveryItemSerializer(serializers.ModelSerializer):
class Meta:
model = DeliveryItemModel
exclude = ['id', 'delivery', 'item']
def to_representation(self, instance):
data = app_settings.ORDER_ITEM_SERIALIZER(instance.item, context=self.context).data
data['ordered_quantity'] = data.pop('quantity', None)
data.update(super().to_representation(instance))
return data
class DeliverySerializer(serializers.ModelSerializer):
items = DeliveryItemSerializer(
many=True,
read_only=True,
)
number = serializers.CharField(source='get_number')
shipping_method = serializers.SerializerMethodField()
class Meta:
model = DeliveryModel
exclude = ['id', 'order']
def get_shipping_method(self, instance):
for shipping_modifier in cart_modifiers_pool.get_shipping_modifiers():
value, label = shipping_modifier.get_choice()
if value == shipping_modifier.identifier:
break
else:
value, label = instance.shipping_method, instance.shipping_method
return {'value': value, 'label': label}
|
import unittest
from optional_django import six
import mock
from react.conf import Conf
from react import render_server
from react.render import render_component
from react.render_server import RenderedComponent
from react.exceptions import ReactRenderingError, ComponentSourceFileNotFound
from .settings import Components
import json
class TestRendering(unittest.TestCase):
__test__ = True
def test_can_render_a_component_in_js(self):
component = render_component(Components.HELLO_WORLD_JS, to_static_markup=True)
self.assertEqual(str(component), '<span>Hello </span>')
def test_can_render_a_component_in_jsx(self):
component = render_component(Components.HELLO_WORLD_JSX, to_static_markup=True)
self.assertEqual(str(component), '<span>Hello </span>')
def test_can_render_a_component_requiring_another_component(self):
component = render_component(
Components.HELLO_WORLD_JSX_WRAPPER,
{
'name': 'world!',
'numbers': [1, 2, 3, 4, 5],
},
to_static_markup=True
)
self.assertEqual(str(component), '<div><span>Hello world!</span><span>10, 20, 30, 40, 50</span></div>')
def test_can_render_a_component_to_a_string_with_props(self):
component = render_component(
Components.HELLO_WORLD_JSX,
{'name': 'world!'},
)
markup = str(component)
self.assertNotEqual(markup, '<span>Hello world!</span>')
self.assertIn('Hello ', markup)
self.assertIn('world!', markup)
def test_render_component_returns_a_rendered_component(self):
component = render_component(
Components.HELLO_WORLD_JSX,
{
'name': 'world!'
},
to_static_markup=True,
)
self.assertIsInstance(component, RenderedComponent)
self.assertEqual(component.markup, '<span>Hello world!</span>')
self.assertEqual(component.markup, str(component))
if six.PY2:
self.assertEqual(component.markup, unicode(component))
def test_can_get_a_components_serialized_props(self):
component = render_component(
Components.HELLO_WORLD_JSX,
{
'name': 'world!',
},
)
self.assertEqual(component.props, '{"name": "world!"}')
def test_component_js_rendering_errors_raise_an_exception(self):
self.assertRaises(ReactRenderingError, render_component, Components.ERROR_THROWING)
self.assertRaises(ReactRenderingError, render_component, Components.ERROR_THROWING, to_static_markup=True)
def test_components_with_syntax_errors_raise_exceptions(self):
self.assertRaises(ReactRenderingError, render_component, Components.SYNTAX_ERROR)
self.assertRaises(ReactRenderingError, render_component, Components.SYNTAX_ERROR, to_static_markup=True)
def test_unserializable_props_raise_an_exception(self):
self.assertRaises(
TypeError,
render_component,
Components.HELLO_WORLD_JSX,
{'name': lambda: None}
)
self.assertRaises(
TypeError,
render_component,
Components.HELLO_WORLD_JSX,
{'name': self}
)
def test_missing_paths_throw_an_exception(self):
self.assertRaises(ComponentSourceFileNotFound, render_component, '/path/to/nothing.jsx')
# Ensure that relative paths are handled as well
self.assertRaises(ComponentSourceFileNotFound, render_component, 'path/to/nothing.jsx')
def test_render_setting_is_respected(self):
mock_settings = Conf()
mock_settings.configure(RENDER=False)
with mock.patch('react.conf.settings', mock_settings):
rendered = render_component(
Components.HELLO_WORLD_JSX,
{'name': 'world!'},
to_static_markup=True,
)
self.assertIsInstance(rendered, RenderedComponent)
self.assertEqual(rendered.markup, '')
self.assertEqual(str(rendered), '')
self.assertEqual(rendered.props, '{"name": "world!"}')
@mock.patch('requests.post')
def test_can_pass_additional_request_headers(self, requests_post_mock):
mock_json = {
'markup': '<div>Hello</div>',
}
mock_url = 'http://localhost/render'
response_mock = mock.Mock()
response_mock.status_code = 200
response_mock.text = json.dumps(mock_json)
response_mock.json = mock.Mock(return_value=mock_json)
requests_post_mock.return_value = response_mock
mock_settings = Conf()
mock_settings.configure(RENDER_URL=mock_url)
with mock.patch('react.conf.settings', mock_settings):
component = render_component(
path=Components.HELLO_WORLD_JSX,
props={'name': 'world!'},
request_headers={
'Accept-language': 'fr-FR,en-US,en',
},
)
requests_post_mock.assert_called_with(
mock_url,
data=mock.ANY,
params=mock.ANY,
headers={
'content-type': 'application/json',
'Accept-language': 'fr-FR,en-US,en',
},
)
|
import numpy as np
from tensornetwork import (connect, contract, contract_between,
flatten_edges_between, Node)
import torch
def test_basic_graphmode():
a = Node(torch.ones(10), backend="pytorch")
b = Node(torch.ones(10), backend="pytorch")
e = connect(a[0], b[0])
actual = contract(e).get_tensor()
assert actual == 10.0
def test_gradient_decent():
a = Node(
torch.autograd.Variable(torch.ones(10), requires_grad=True),
backend="pytorch")
b = Node(torch.ones(10), backend="pytorch")
e = connect(a[0], b[0])
final_tensor = contract(e).get_tensor()
opt = torch.optim.SGD([a.tensor], lr=0.001)
opt.zero_grad()
final_tensor.norm().backward()
opt.step()
np.testing.assert_allclose(final_tensor.data, 10)
np.testing.assert_allclose(a.tensor.data, 0.999 * np.ones((10,)))
assert final_tensor == 10
def test_dynamic_network_sizes():
def f(x, n):
x_slice = x[:n]
n1 = Node(x_slice, backend="pytorch")
n2 = Node(x_slice, backend="pytorch")
e = connect(n1[0], n2[0])
return contract(e).get_tensor()
x = torch.ones(10)
assert f(x, 2) == 2.
assert f(x, 3) == 3.
def test_dynamic_network_sizes_contract_between():
def f(x, n):
x_slice = x[..., :n]
n1 = Node(x_slice, backend="pytorch")
n2 = Node(x_slice, backend="pytorch")
connect(n1[0], n2[0])
connect(n1[1], n2[1])
connect(n1[2], n2[2])
return contract_between(n1, n2).get_tensor()
x = torch.ones((3, 4, 5))
assert f(x, 2) == 24.
assert f(x, 3) == 36.
def test_dynamic_network_sizes_flatten_standard():
def f(x, n):
x_slice = x[..., :n]
n1 = Node(x_slice, backend="pytorch")
n2 = Node(x_slice, backend="pytorch")
connect(n1[0], n2[0])
connect(n1[1], n2[1])
connect(n1[2], n2[2])
return contract(flatten_edges_between(n1, n2)).get_tensor()
x = torch.ones((3, 4, 5))
assert f(x, 2) == 24.
assert f(x, 3) == 36.
def test_dynamic_network_sizes_flatten_trace():
def f(x, n):
x_slice = x[..., :n]
n1 = Node(x_slice, backend="pytorch")
connect(n1[0], n1[2])
connect(n1[1], n1[3])
return contract(flatten_edges_between(n1, n1)).get_tensor()
x = torch.ones((3, 4, 3, 4, 5))
np.testing.assert_allclose(f(x, 2), np.ones((2,)) * 12)
np.testing.assert_allclose(f(x, 3), np.ones((3,)) * 12)
|
from datetime import timedelta
import logging
import voluptuous as vol
from homeassistant.const import (
DEVICE_CLASS_BATTERY,
DEVICE_CLASS_CURRENT,
DEVICE_CLASS_ENERGY,
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_ILLUMINANCE,
DEVICE_CLASS_POWER,
DEVICE_CLASS_POWER_FACTOR,
DEVICE_CLASS_PRESSURE,
DEVICE_CLASS_SIGNAL_STRENGTH,
DEVICE_CLASS_TEMPERATURE,
DEVICE_CLASS_TIMESTAMP,
DEVICE_CLASS_VOLTAGE,
)
from homeassistant.helpers.config_validation import ( # noqa: F401
PLATFORM_SCHEMA,
PLATFORM_SCHEMA_BASE,
)
from homeassistant.helpers.entity_component import EntityComponent
# mypy: allow-untyped-defs, no-check-untyped-defs
_LOGGER = logging.getLogger(__name__)
DOMAIN = "sensor"
ENTITY_ID_FORMAT = DOMAIN + ".{}"
SCAN_INTERVAL = timedelta(seconds=30)
DEVICE_CLASSES = [
DEVICE_CLASS_BATTERY, # % of battery that is left
DEVICE_CLASS_CURRENT, # current (A)
DEVICE_CLASS_ENERGY, # energy (kWh, Wh)
DEVICE_CLASS_HUMIDITY, # % of humidity in the air
DEVICE_CLASS_ILLUMINANCE, # current light level (lx/lm)
DEVICE_CLASS_SIGNAL_STRENGTH, # signal strength (dB/dBm)
DEVICE_CLASS_TEMPERATURE, # temperature (C/F)
DEVICE_CLASS_TIMESTAMP, # timestamp (ISO8601)
DEVICE_CLASS_PRESSURE, # pressure (hPa/mbar)
DEVICE_CLASS_POWER, # power (W/kW)
DEVICE_CLASS_POWER_FACTOR, # power factor (%)
DEVICE_CLASS_VOLTAGE, # voltage (V)
]
DEVICE_CLASSES_SCHEMA = vol.All(vol.Lower, vol.In(DEVICE_CLASSES))
async def async_setup(hass, config):
"""Track states and offer events for sensors."""
component = hass.data[DOMAIN] = EntityComponent(
_LOGGER, DOMAIN, hass, SCAN_INTERVAL
)
await component.async_setup(config)
return True
async def async_setup_entry(hass, entry):
"""Set up a config entry."""
return await hass.data[DOMAIN].async_setup_entry(entry)
async def async_unload_entry(hass, entry):
"""Unload a config entry."""
return await hass.data[DOMAIN].async_unload_entry(entry)
|
import numpy as np
import pandas as pd
import pytest
import xarray as xr
from xarray.core.groupby import _consolidate_slices
from . import assert_allclose, assert_equal, assert_identical, raises_regex
@pytest.fixture
def dataset():
ds = xr.Dataset(
{"foo": (("x", "y", "z"), np.random.randn(3, 4, 2))},
{"x": ["a", "b", "c"], "y": [1, 2, 3, 4], "z": [1, 2]},
)
ds["boo"] = (("z", "y"), [["f", "g", "h", "j"]] * 2)
return ds
@pytest.fixture
def array(dataset):
return dataset["foo"]
def test_consolidate_slices():
assert _consolidate_slices([slice(3), slice(3, 5)]) == [slice(5)]
assert _consolidate_slices([slice(2, 3), slice(3, 6)]) == [slice(2, 6)]
assert _consolidate_slices([slice(2, 3, 1), slice(3, 6, 1)]) == [slice(2, 6, 1)]
slices = [slice(2, 3), slice(5, 6)]
assert _consolidate_slices(slices) == slices
with pytest.raises(ValueError):
_consolidate_slices([slice(3), 4])
def test_groupby_dims_property(dataset):
assert dataset.groupby("x").dims == dataset.isel(x=1).dims
assert dataset.groupby("y").dims == dataset.isel(y=1).dims
stacked = dataset.stack({"xy": ("x", "y")})
assert stacked.groupby("xy").dims == stacked.isel(xy=0).dims
def test_multi_index_groupby_map(dataset):
# regression test for GH873
ds = dataset.isel(z=1, drop=True)[["foo"]]
expected = 2 * ds
actual = (
ds.stack(space=["x", "y"])
.groupby("space")
.map(lambda x: 2 * x)
.unstack("space")
)
assert_equal(expected, actual)
def test_multi_index_groupby_sum():
# regression test for GH873
ds = xr.Dataset(
{"foo": (("x", "y", "z"), np.ones((3, 4, 2)))},
{"x": ["a", "b", "c"], "y": [1, 2, 3, 4]},
)
expected = ds.sum("z")
actual = ds.stack(space=["x", "y"]).groupby("space").sum("z").unstack("space")
assert_equal(expected, actual)
def test_groupby_da_datetime():
# test groupby with a DataArray of dtype datetime for GH1132
# create test data
times = pd.date_range("2000-01-01", periods=4)
foo = xr.DataArray([1, 2, 3, 4], coords=dict(time=times), dims="time")
# create test index
dd = times.to_pydatetime()
reference_dates = [dd[0], dd[2]]
labels = reference_dates[0:1] * 2 + reference_dates[1:2] * 2
ind = xr.DataArray(
labels, coords=dict(time=times), dims="time", name="reference_date"
)
g = foo.groupby(ind)
actual = g.sum(dim="time")
expected = xr.DataArray(
[3, 7], coords=dict(reference_date=reference_dates), dims="reference_date"
)
assert_equal(expected, actual)
def test_groupby_duplicate_coordinate_labels():
# fix for http://stackoverflow.com/questions/38065129
array = xr.DataArray([1, 2, 3], [("x", [1, 1, 2])])
expected = xr.DataArray([3, 3], [("x", [1, 2])])
actual = array.groupby("x").sum()
assert_equal(expected, actual)
def test_groupby_input_mutation():
# regression test for GH2153
array = xr.DataArray([1, 2, 3], [("x", [2, 2, 1])])
array_copy = array.copy()
expected = xr.DataArray([3, 3], [("x", [1, 2])])
actual = array.groupby("x").sum()
assert_identical(expected, actual)
assert_identical(array, array_copy) # should not modify inputs
@pytest.mark.parametrize(
"obj",
[
xr.DataArray([1, 2, 3, 4, 5, 6], [("x", [1, 1, 1, 2, 2, 2])]),
xr.Dataset({"foo": ("x", [1, 2, 3, 4, 5, 6])}, {"x": [1, 1, 1, 2, 2, 2]}),
],
)
def test_groupby_map_shrink_groups(obj):
expected = obj.isel(x=[0, 1, 3, 4])
actual = obj.groupby("x").map(lambda f: f.isel(x=[0, 1]))
assert_identical(expected, actual)
@pytest.mark.parametrize(
"obj",
[
xr.DataArray([1, 2, 3], [("x", [1, 2, 2])]),
xr.Dataset({"foo": ("x", [1, 2, 3])}, {"x": [1, 2, 2]}),
],
)
def test_groupby_map_change_group_size(obj):
def func(group):
if group.sizes["x"] == 1:
result = group.isel(x=[0, 0])
else:
result = group.isel(x=[0])
return result
expected = obj.isel(x=[0, 0, 1])
actual = obj.groupby("x").map(func)
assert_identical(expected, actual)
def test_da_groupby_map_func_args():
def func(arg1, arg2, arg3=0):
return arg1 + arg2 + arg3
array = xr.DataArray([1, 1, 1], [("x", [1, 2, 3])])
expected = xr.DataArray([3, 3, 3], [("x", [1, 2, 3])])
actual = array.groupby("x").map(func, args=(1,), arg3=1)
assert_identical(expected, actual)
def test_ds_groupby_map_func_args():
def func(arg1, arg2, arg3=0):
return arg1 + arg2 + arg3
dataset = xr.Dataset({"foo": ("x", [1, 1, 1])}, {"x": [1, 2, 3]})
expected = xr.Dataset({"foo": ("x", [3, 3, 3])}, {"x": [1, 2, 3]})
actual = dataset.groupby("x").map(func, args=(1,), arg3=1)
assert_identical(expected, actual)
def test_da_groupby_empty():
empty_array = xr.DataArray([], dims="dim")
with pytest.raises(ValueError):
empty_array.groupby("dim")
def test_da_groupby_quantile():
array = xr.DataArray(
data=[1, 2, 3, 4, 5, 6], coords={"x": [1, 1, 1, 2, 2, 2]}, dims="x"
)
# Scalar quantile
expected = xr.DataArray(
data=[2, 5], coords={"x": [1, 2], "quantile": 0.5}, dims="x"
)
actual = array.groupby("x").quantile(0.5)
assert_identical(expected, actual)
# Vector quantile
expected = xr.DataArray(
data=[[1, 3], [4, 6]],
coords={"x": [1, 2], "quantile": [0, 1]},
dims=("x", "quantile"),
)
actual = array.groupby("x").quantile([0, 1])
assert_identical(expected, actual)
# Multiple dimensions
array = xr.DataArray(
data=[[1, 11, 26], [2, 12, 22], [3, 13, 23], [4, 16, 24], [5, 15, 25]],
coords={"x": [1, 1, 1, 2, 2], "y": [0, 0, 1]},
dims=("x", "y"),
)
actual_x = array.groupby("x").quantile(0, dim=...)
expected_x = xr.DataArray(
data=[1, 4], coords={"x": [1, 2], "quantile": 0}, dims="x"
)
assert_identical(expected_x, actual_x)
actual_y = array.groupby("y").quantile(0, dim=...)
expected_y = xr.DataArray(
data=[1, 22], coords={"y": [0, 1], "quantile": 0}, dims="y"
)
assert_identical(expected_y, actual_y)
actual_xx = array.groupby("x").quantile(0)
expected_xx = xr.DataArray(
data=[[1, 11, 22], [4, 15, 24]],
coords={"x": [1, 2], "y": [0, 0, 1], "quantile": 0},
dims=("x", "y"),
)
assert_identical(expected_xx, actual_xx)
actual_yy = array.groupby("y").quantile(0)
expected_yy = xr.DataArray(
data=[[1, 26], [2, 22], [3, 23], [4, 24], [5, 25]],
coords={"x": [1, 1, 1, 2, 2], "y": [0, 1], "quantile": 0},
dims=("x", "y"),
)
assert_identical(expected_yy, actual_yy)
times = pd.date_range("2000-01-01", periods=365)
x = [0, 1]
foo = xr.DataArray(
np.reshape(np.arange(365 * 2), (365, 2)),
coords={"time": times, "x": x},
dims=("time", "x"),
)
g = foo.groupby(foo.time.dt.month)
actual = g.quantile(0, dim=...)
expected = xr.DataArray(
data=[
0.0,
62.0,
120.0,
182.0,
242.0,
304.0,
364.0,
426.0,
488.0,
548.0,
610.0,
670.0,
],
coords={"month": np.arange(1, 13), "quantile": 0},
dims="month",
)
assert_identical(expected, actual)
actual = g.quantile(0, dim="time")[:2]
expected = xr.DataArray(
data=[[0.0, 1], [62.0, 63]],
coords={"month": [1, 2], "x": [0, 1], "quantile": 0},
dims=("month", "x"),
)
assert_identical(expected, actual)
def test_ds_groupby_quantile():
ds = xr.Dataset(
data_vars={"a": ("x", [1, 2, 3, 4, 5, 6])}, coords={"x": [1, 1, 1, 2, 2, 2]}
)
# Scalar quantile
expected = xr.Dataset(
data_vars={"a": ("x", [2, 5])}, coords={"quantile": 0.5, "x": [1, 2]}
)
actual = ds.groupby("x").quantile(0.5)
assert_identical(expected, actual)
# Vector quantile
expected = xr.Dataset(
data_vars={"a": (("x", "quantile"), [[1, 3], [4, 6]])},
coords={"x": [1, 2], "quantile": [0, 1]},
)
actual = ds.groupby("x").quantile([0, 1])
assert_identical(expected, actual)
# Multiple dimensions
ds = xr.Dataset(
data_vars={
"a": (
("x", "y"),
[[1, 11, 26], [2, 12, 22], [3, 13, 23], [4, 16, 24], [5, 15, 25]],
)
},
coords={"x": [1, 1, 1, 2, 2], "y": [0, 0, 1]},
)
actual_x = ds.groupby("x").quantile(0, dim=...)
expected_x = xr.Dataset({"a": ("x", [1, 4])}, coords={"x": [1, 2], "quantile": 0})
assert_identical(expected_x, actual_x)
actual_y = ds.groupby("y").quantile(0, dim=...)
expected_y = xr.Dataset({"a": ("y", [1, 22])}, coords={"y": [0, 1], "quantile": 0})
assert_identical(expected_y, actual_y)
actual_xx = ds.groupby("x").quantile(0)
expected_xx = xr.Dataset(
{"a": (("x", "y"), [[1, 11, 22], [4, 15, 24]])},
coords={"x": [1, 2], "y": [0, 0, 1], "quantile": 0},
)
assert_identical(expected_xx, actual_xx)
actual_yy = ds.groupby("y").quantile(0)
expected_yy = xr.Dataset(
{"a": (("x", "y"), [[1, 26], [2, 22], [3, 23], [4, 24], [5, 25]])},
coords={"x": [1, 1, 1, 2, 2], "y": [0, 1], "quantile": 0},
).transpose()
assert_identical(expected_yy, actual_yy)
times = pd.date_range("2000-01-01", periods=365)
x = [0, 1]
foo = xr.Dataset(
{"a": (("time", "x"), np.reshape(np.arange(365 * 2), (365, 2)))},
coords=dict(time=times, x=x),
)
g = foo.groupby(foo.time.dt.month)
actual = g.quantile(0, dim=...)
expected = xr.Dataset(
{
"a": (
"month",
[
0.0,
62.0,
120.0,
182.0,
242.0,
304.0,
364.0,
426.0,
488.0,
548.0,
610.0,
670.0,
],
)
},
coords={"month": np.arange(1, 13), "quantile": 0},
)
assert_identical(expected, actual)
actual = g.quantile(0, dim="time").isel(month=slice(None, 2))
expected = xr.Dataset(
data_vars={"a": (("month", "x"), [[0.0, 1], [62.0, 63]])},
coords={"month": [1, 2], "x": [0, 1], "quantile": 0},
)
assert_identical(expected, actual)
def test_da_groupby_assign_coords():
actual = xr.DataArray(
[[3, 4, 5], [6, 7, 8]], dims=["y", "x"], coords={"y": range(2), "x": range(3)}
)
actual1 = actual.groupby("x").assign_coords({"y": [-1, -2]})
actual2 = actual.groupby("x").assign_coords(y=[-1, -2])
expected = xr.DataArray(
[[3, 4, 5], [6, 7, 8]], dims=["y", "x"], coords={"y": [-1, -2], "x": range(3)}
)
assert_identical(expected, actual1)
assert_identical(expected, actual2)
repr_da = xr.DataArray(
np.random.randn(10, 20, 6, 24),
dims=["x", "y", "z", "t"],
coords={
"z": ["a", "b", "c", "a", "b", "c"],
"x": [1, 1, 1, 2, 2, 3, 4, 5, 3, 4],
"t": pd.date_range("2001-01-01", freq="M", periods=24),
"month": ("t", list(range(1, 13)) * 2),
},
)
@pytest.mark.parametrize("dim", ["x", "y", "z", "month"])
@pytest.mark.parametrize("obj", [repr_da, repr_da.to_dataset(name="a")])
def test_groupby_repr(obj, dim):
actual = repr(obj.groupby(dim))
expected = "%sGroupBy" % obj.__class__.__name__
expected += ", grouped over %r " % dim
expected += "\n%r groups with labels " % (len(np.unique(obj[dim])))
if dim == "x":
expected += "1, 2, 3, 4, 5."
elif dim == "y":
expected += "0, 1, 2, 3, 4, 5, ..., 15, 16, 17, 18, 19."
elif dim == "z":
expected += "'a', 'b', 'c'."
elif dim == "month":
expected += "1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12."
assert actual == expected
@pytest.mark.parametrize("obj", [repr_da, repr_da.to_dataset(name="a")])
def test_groupby_repr_datetime(obj):
actual = repr(obj.groupby("t.month"))
expected = "%sGroupBy" % obj.__class__.__name__
expected += ", grouped over 'month' "
expected += "\n%r groups with labels " % (len(np.unique(obj.t.dt.month)))
expected += "1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12."
assert actual == expected
def test_groupby_drops_nans():
# GH2383
# nan in 2D data variable (requires stacking)
ds = xr.Dataset(
{
"variable": (("lat", "lon", "time"), np.arange(60.0).reshape((4, 3, 5))),
"id": (("lat", "lon"), np.arange(12.0).reshape((4, 3))),
},
coords={"lat": np.arange(4), "lon": np.arange(3), "time": np.arange(5)},
)
ds["id"].values[0, 0] = np.nan
ds["id"].values[3, 0] = np.nan
ds["id"].values[-1, -1] = np.nan
grouped = ds.groupby(ds.id)
# non reduction operation
expected = ds.copy()
expected.variable.values[0, 0, :] = np.nan
expected.variable.values[-1, -1, :] = np.nan
expected.variable.values[3, 0, :] = np.nan
actual = grouped.map(lambda x: x).transpose(*ds.variable.dims)
assert_identical(actual, expected)
# reduction along grouped dimension
actual = grouped.mean()
stacked = ds.stack({"xy": ["lat", "lon"]})
expected = (
stacked.variable.where(stacked.id.notnull()).rename({"xy": "id"}).to_dataset()
)
expected["id"] = stacked.id.values
assert_identical(actual, expected.dropna("id").transpose(*actual.dims))
# reduction operation along a different dimension
actual = grouped.mean("time")
expected = ds.mean("time").where(ds.id.notnull())
assert_identical(actual, expected)
# NaN in non-dimensional coordinate
array = xr.DataArray([1, 2, 3], [("x", [1, 2, 3])])
array["x1"] = ("x", [1, 1, np.nan])
expected = xr.DataArray(3, [("x1", [1])])
actual = array.groupby("x1").sum()
assert_equal(expected, actual)
# NaT in non-dimensional coordinate
array["t"] = (
"x",
[
np.datetime64("2001-01-01"),
np.datetime64("2001-01-01"),
np.datetime64("NaT"),
],
)
expected = xr.DataArray(3, [("t", [np.datetime64("2001-01-01")])])
actual = array.groupby("t").sum()
assert_equal(expected, actual)
# test for repeated coordinate labels
array = xr.DataArray([0, 1, 2, 4, 3, 4], [("x", [np.nan, 1, 1, np.nan, 2, np.nan])])
expected = xr.DataArray([3, 3], [("x", [1, 2])])
actual = array.groupby("x").sum()
assert_equal(expected, actual)
def test_groupby_grouping_errors():
dataset = xr.Dataset({"foo": ("x", [1, 1, 1])}, {"x": [1, 2, 3]})
with raises_regex(ValueError, "None of the data falls within bins with edges"):
dataset.groupby_bins("x", bins=[0.1, 0.2, 0.3])
with raises_regex(ValueError, "None of the data falls within bins with edges"):
dataset.to_array().groupby_bins("x", bins=[0.1, 0.2, 0.3])
with raises_regex(ValueError, "All bin edges are NaN."):
dataset.groupby_bins("x", bins=[np.nan, np.nan, np.nan])
with raises_regex(ValueError, "All bin edges are NaN."):
dataset.to_array().groupby_bins("x", bins=[np.nan, np.nan, np.nan])
with raises_regex(ValueError, "Failed to group data."):
dataset.groupby(dataset.foo * np.nan)
with raises_regex(ValueError, "Failed to group data."):
dataset.to_array().groupby(dataset.foo * np.nan)
def test_groupby_reduce_dimension_error(array):
grouped = array.groupby("y")
with raises_regex(ValueError, "cannot reduce over dimensions"):
grouped.mean()
with raises_regex(ValueError, "cannot reduce over dimensions"):
grouped.mean("huh")
with raises_regex(ValueError, "cannot reduce over dimensions"):
grouped.mean(("x", "y", "asd"))
grouped = array.groupby("y", squeeze=False)
assert_identical(array, grouped.mean())
assert_identical(array.mean("x"), grouped.reduce(np.mean, "x"))
assert_allclose(array.mean(["x", "z"]), grouped.reduce(np.mean, ["x", "z"]))
def test_groupby_multiple_string_args(array):
with pytest.raises(TypeError):
array.groupby("x", "y")
def test_groupby_bins_timeseries():
ds = xr.Dataset()
ds["time"] = xr.DataArray(
pd.date_range("2010-08-01", "2010-08-15", freq="15min"), dims="time"
)
ds["val"] = xr.DataArray(np.ones(*ds["time"].shape), dims="time")
time_bins = pd.date_range(start="2010-08-01", end="2010-08-15", freq="24H")
actual = ds.groupby_bins("time", time_bins).sum()
expected = xr.DataArray(
96 * np.ones((14,)),
dims=["time_bins"],
coords={"time_bins": pd.cut(time_bins, time_bins).categories},
).to_dataset(name="val")
assert_identical(actual, expected)
def test_groupby_none_group_name():
# GH158
# xarray should not fail if a DataArray's name attribute is None
data = np.arange(10) + 10
da = xr.DataArray(data) # da.name = None
key = xr.DataArray(np.floor_divide(data, 2))
mean = da.groupby(key).mean()
assert "group" in mean.dims
# TODO: move other groupby tests from test_dataset and test_dataarray over here
|
from __future__ import print_function
import argparse
import string
import sys
import fileinput
def filter_non_printable(s):
return ''.join([c if c.isalnum() or c.isspace() or c in string.punctuation else ' ' for c in s])
def head(f, nlines):
if nlines >= 0:
for i, line in enumerate(f):
if i >= nlines:
break
print(line, end='')
else:
buf = []
print(1)
line = f.readline()
print(2)
while line:
buf.append(line)
if len(buf) > -nlines:
del buf[0]
line = f.readline()
for line in buf:
print(line, end='')
def main(args):
p = argparse.ArgumentParser(description=__doc__)
p.add_argument(
"-n",
"--lines",
default=10,
type=int,
help="""print the first K lines instead of 10;
if negative, print the last -K lines"""
)
p.add_argument("-q", "--quiet", "--silent", action='store_true', help="never print headers for each file")
p.add_argument("-v", "--verbose", action='store_true', help="always print headers for each file")
p.add_argument("files", action="store", nargs="*", help="files to print")
ns = p.parse_args(args)
status = 0
header_fmt = '==> {} <==\n'
if len(ns.files) == 0:
ns.files = ['-']
try:
for fname in ns.files:
if ns.verbose or (len(ns.files) > 1 and not ns.quiet):
if fname == '-':
print(header_fmt.format('standard input'), end='')
else:
print(header_fmt.format(fname), end='')
fileinput.close()
inp = fileinput.input(fname, openhook=fileinput.hook_encoded("utf-8"))
if ns.lines >= 0:
buf = []
for i, line in enumerate(inp):
if i >= ns.lines:
break
buf.append(line)
for line in buf:
print(line, end='')
else:
buf = []
for line in fileinput.input(inp, openhook=fileinput.hook_encoded("utf-8")):
buf.append(line)
if len(buf) > -ns.lines:
del buf[0]
for line in buf:
print(line, end='')
except Exception as e:
print('head :%s' % str(e))
status = 1
finally:
fileinput.close()
sys.exit(status)
if __name__ == "__main__":
main(sys.argv[1:])
|
from __future__ import absolute_import
import unittest
import logging
import os
import codecs
from lark import logger
from lark.tools.nearley import create_code_for_nearley_grammar, main as nearley_tool_main
logger.setLevel(logging.INFO)
TEST_PATH = os.path.abspath(os.path.dirname(__file__))
NEARLEY_PATH = os.path.join(TEST_PATH, 'nearley')
BUILTIN_PATH = os.path.join(NEARLEY_PATH, 'builtin')
if not os.path.exists(NEARLEY_PATH):
logger.warn("Nearley not installed. Skipping Nearley tests!")
raise ImportError("Skipping Nearley tests!")
import js2py # Ensures that js2py exists, to avoid failing tests
class TestNearley(unittest.TestCase):
def test_css(self):
fn = os.path.join(NEARLEY_PATH, 'examples/csscolor.ne')
with open(fn) as f:
grammar = f.read()
code = create_code_for_nearley_grammar(grammar, 'csscolor', BUILTIN_PATH, os.path.dirname(fn))
d = {}
exec (code, d)
parse = d['parse']
c = parse('#a199ff')
assert c['r'] == 161
assert c['g'] == 153
assert c['b'] == 255
c = parse('rgb(255, 70%, 3)')
assert c['r'] == 255
assert c['g'] == 178
assert c['b'] == 3
def test_include(self):
fn = os.path.join(NEARLEY_PATH, 'test/grammars/folder-test.ne')
with open(fn) as f:
grammar = f.read()
code = create_code_for_nearley_grammar(grammar, 'main', BUILTIN_PATH, os.path.dirname(fn))
d = {}
exec (code, d)
parse = d['parse']
parse('a')
parse('b')
def test_multi_include(self):
fn = os.path.join(NEARLEY_PATH, 'test/grammars/multi-include-test.ne')
with open(fn) as f:
grammar = f.read()
code = create_code_for_nearley_grammar(grammar, 'main', BUILTIN_PATH, os.path.dirname(fn))
d = {}
exec (code, d)
parse = d['parse']
parse('a')
parse('b')
parse('c')
def test_utf8(self):
grammar = u'main -> "±a"'
code = create_code_for_nearley_grammar(grammar, 'main', BUILTIN_PATH, './')
d = {}
exec (code, d)
parse = d['parse']
parse(u'±a')
def test_backslash(self):
grammar = r'main -> "\""'
code = create_code_for_nearley_grammar(grammar, 'main', BUILTIN_PATH, './')
d = {}
exec (code, d)
parse = d['parse']
parse(u'"')
def test_null(self):
grammar = r'main -> "a" | null'
code = create_code_for_nearley_grammar(grammar, 'main', BUILTIN_PATH, './')
d = {}
exec (code, d)
parse = d['parse']
parse('a')
parse('')
def test_utf8_2(self):
fn = os.path.join(TEST_PATH, 'grammars/unicode.ne')
nearley_tool_main(fn, 'x', NEARLEY_PATH)
def test_include_utf8(self):
fn = os.path.join(TEST_PATH, 'grammars/include_unicode.ne')
nearley_tool_main(fn, 'main', NEARLEY_PATH)
if __name__ == '__main__':
unittest.main()
|
import pytest
from homeassistant import config_entries
from homeassistant.components import hue
from homeassistant.components.hue import bridge, errors
from homeassistant.components.hue.const import (
CONF_ALLOW_HUE_GROUPS,
CONF_ALLOW_UNREACHABLE,
)
from homeassistant.exceptions import ConfigEntryNotReady
from tests.async_mock import AsyncMock, Mock, patch
async def test_bridge_setup(hass):
"""Test a successful setup."""
entry = Mock()
api = Mock(initialize=AsyncMock())
entry.data = {"host": "1.2.3.4", "username": "mock-username"}
entry.options = {CONF_ALLOW_HUE_GROUPS: False, CONF_ALLOW_UNREACHABLE: False}
hue_bridge = bridge.HueBridge(hass, entry)
with patch("aiohue.Bridge", return_value=api), patch.object(
hass.config_entries, "async_forward_entry_setup"
) as mock_forward:
assert await hue_bridge.async_setup() is True
assert hue_bridge.api is api
assert len(mock_forward.mock_calls) == 3
forward_entries = {c[1][1] for c in mock_forward.mock_calls}
assert forward_entries == {"light", "binary_sensor", "sensor"}
async def test_bridge_setup_invalid_username(hass):
"""Test we start config flow if username is no longer whitelisted."""
entry = Mock()
entry.data = {"host": "1.2.3.4", "username": "mock-username"}
entry.options = {CONF_ALLOW_HUE_GROUPS: False, CONF_ALLOW_UNREACHABLE: False}
hue_bridge = bridge.HueBridge(hass, entry)
with patch.object(
bridge, "authenticate_bridge", side_effect=errors.AuthenticationRequired
), patch.object(hass.config_entries.flow, "async_init") as mock_init:
assert await hue_bridge.async_setup() is False
assert len(mock_init.mock_calls) == 1
assert mock_init.mock_calls[0][2]["data"] == {"host": "1.2.3.4"}
async def test_bridge_setup_timeout(hass):
"""Test we retry to connect if we cannot connect."""
entry = Mock()
entry.data = {"host": "1.2.3.4", "username": "mock-username"}
entry.options = {CONF_ALLOW_HUE_GROUPS: False, CONF_ALLOW_UNREACHABLE: False}
hue_bridge = bridge.HueBridge(hass, entry)
with patch.object(
bridge, "authenticate_bridge", side_effect=errors.CannotConnect
), pytest.raises(ConfigEntryNotReady):
await hue_bridge.async_setup()
async def test_reset_if_entry_had_wrong_auth(hass):
"""Test calling reset when the entry contained wrong auth."""
entry = Mock()
entry.data = {"host": "1.2.3.4", "username": "mock-username"}
entry.options = {CONF_ALLOW_HUE_GROUPS: False, CONF_ALLOW_UNREACHABLE: False}
hue_bridge = bridge.HueBridge(hass, entry)
with patch.object(
bridge, "authenticate_bridge", side_effect=errors.AuthenticationRequired
), patch.object(bridge, "create_config_flow") as mock_create:
assert await hue_bridge.async_setup() is False
assert len(mock_create.mock_calls) == 1
assert await hue_bridge.async_reset()
async def test_reset_unloads_entry_if_setup(hass):
"""Test calling reset while the entry has been setup."""
entry = Mock()
entry.data = {"host": "1.2.3.4", "username": "mock-username"}
entry.options = {CONF_ALLOW_HUE_GROUPS: False, CONF_ALLOW_UNREACHABLE: False}
hue_bridge = bridge.HueBridge(hass, entry)
with patch.object(bridge, "authenticate_bridge", return_value=Mock()), patch(
"aiohue.Bridge", return_value=Mock()
), patch.object(hass.config_entries, "async_forward_entry_setup") as mock_forward:
assert await hue_bridge.async_setup() is True
assert len(hass.services.async_services()) == 0
assert len(mock_forward.mock_calls) == 3
with patch.object(
hass.config_entries, "async_forward_entry_unload", return_value=True
) as mock_forward:
assert await hue_bridge.async_reset()
assert len(mock_forward.mock_calls) == 3
assert len(hass.services.async_services()) == 0
async def test_handle_unauthorized(hass):
"""Test handling an unauthorized error on update."""
entry = Mock(async_setup=AsyncMock())
entry.data = {"host": "1.2.3.4", "username": "mock-username"}
entry.options = {CONF_ALLOW_HUE_GROUPS: False, CONF_ALLOW_UNREACHABLE: False}
hue_bridge = bridge.HueBridge(hass, entry)
with patch.object(bridge, "authenticate_bridge", return_value=Mock()), patch(
"aiohue.Bridge", return_value=Mock()
):
assert await hue_bridge.async_setup() is True
assert hue_bridge.authorized is True
with patch.object(bridge, "create_config_flow") as mock_create:
await hue_bridge.handle_unauthorized_error()
assert hue_bridge.authorized is False
assert len(mock_create.mock_calls) == 1
assert mock_create.mock_calls[0][1][1] == "1.2.3.4"
GROUP_RESPONSE = {
"group_1": {
"name": "Group 1",
"lights": ["1", "2"],
"type": "LightGroup",
"action": {
"on": True,
"bri": 254,
"hue": 10000,
"sat": 254,
"effect": "none",
"xy": [0.5, 0.5],
"ct": 250,
"alert": "select",
"colormode": "ct",
},
"state": {"any_on": True, "all_on": False},
}
}
SCENE_RESPONSE = {
"scene_1": {
"name": "Cozy dinner",
"lights": ["1", "2"],
"owner": "ffffffffe0341b1b376a2389376a2389",
"recycle": True,
"locked": False,
"appdata": {"version": 1, "data": "myAppData"},
"picture": "",
"lastupdated": "2015-12-03T10:09:22",
"version": 2,
}
}
async def test_hue_activate_scene(hass, mock_api):
"""Test successful hue_activate_scene."""
config_entry = config_entries.ConfigEntry(
1,
hue.DOMAIN,
"Mock Title",
{"host": "mock-host", "username": "mock-username"},
"test",
config_entries.CONN_CLASS_LOCAL_POLL,
system_options={},
options={CONF_ALLOW_HUE_GROUPS: True, CONF_ALLOW_UNREACHABLE: False},
)
hue_bridge = bridge.HueBridge(hass, config_entry)
mock_api.mock_group_responses.append(GROUP_RESPONSE)
mock_api.mock_scene_responses.append(SCENE_RESPONSE)
with patch("aiohue.Bridge", return_value=mock_api), patch.object(
hass.config_entries, "async_forward_entry_setup"
):
assert await hue_bridge.async_setup() is True
assert hue_bridge.api is mock_api
call = Mock()
call.data = {"group_name": "Group 1", "scene_name": "Cozy dinner"}
with patch("aiohue.Bridge", return_value=mock_api):
assert await hue_bridge.hue_activate_scene(call) is None
assert len(mock_api.mock_requests) == 3
assert mock_api.mock_requests[2]["json"]["scene"] == "scene_1"
assert mock_api.mock_requests[2]["path"] == "groups/group_1/action"
async def test_hue_activate_scene_group_not_found(hass, mock_api):
"""Test failed hue_activate_scene due to missing group."""
config_entry = config_entries.ConfigEntry(
1,
hue.DOMAIN,
"Mock Title",
{"host": "mock-host", "username": "mock-username"},
"test",
config_entries.CONN_CLASS_LOCAL_POLL,
system_options={},
options={CONF_ALLOW_HUE_GROUPS: True, CONF_ALLOW_UNREACHABLE: False},
)
hue_bridge = bridge.HueBridge(hass, config_entry)
mock_api.mock_group_responses.append({})
mock_api.mock_scene_responses.append(SCENE_RESPONSE)
with patch("aiohue.Bridge", return_value=mock_api), patch.object(
hass.config_entries, "async_forward_entry_setup"
):
assert await hue_bridge.async_setup() is True
assert hue_bridge.api is mock_api
call = Mock()
call.data = {"group_name": "Group 1", "scene_name": "Cozy dinner"}
with patch("aiohue.Bridge", return_value=mock_api):
assert await hue_bridge.hue_activate_scene(call) is False
async def test_hue_activate_scene_scene_not_found(hass, mock_api):
"""Test failed hue_activate_scene due to missing scene."""
config_entry = config_entries.ConfigEntry(
1,
hue.DOMAIN,
"Mock Title",
{"host": "mock-host", "username": "mock-username"},
"test",
config_entries.CONN_CLASS_LOCAL_POLL,
system_options={},
options={CONF_ALLOW_HUE_GROUPS: True, CONF_ALLOW_UNREACHABLE: False},
)
hue_bridge = bridge.HueBridge(hass, config_entry)
mock_api.mock_group_responses.append(GROUP_RESPONSE)
mock_api.mock_scene_responses.append({})
with patch("aiohue.Bridge", return_value=mock_api), patch.object(
hass.config_entries, "async_forward_entry_setup"
):
assert await hue_bridge.async_setup() is True
assert hue_bridge.api is mock_api
call = Mock()
call.data = {"group_name": "Group 1", "scene_name": "Cozy dinner"}
with patch("aiohue.Bridge", return_value=mock_api):
assert await hue_bridge.hue_activate_scene(call) is False
|
import argparse
import json
from .const import FRONTEND_DIR
from .download import DOWNLOAD_DIR, run_download_docker
from .util import get_base_arg_parser
FRONTEND_BACKEND_TRANSLATIONS = FRONTEND_DIR / "translations/backend"
def get_arguments() -> argparse.Namespace:
"""Get parsed passed in arguments."""
parser = get_base_arg_parser()
parser.add_argument(
"--skip-download", action="store_true", help="Skip downloading translations."
)
return parser.parse_args()
def run():
"""Update frontend translations with backend data.
We use the downloaded Docker files because it gives us each language in 1 file.
"""
args = get_arguments()
if not args.skip_download:
run_download_docker()
for lang_file in DOWNLOAD_DIR.glob("*.json"):
translations = json.loads(lang_file.read_text())
to_write_translations = {"component": {}}
for domain, domain_translations in translations["component"].items():
if "state" not in domain_translations:
continue
to_write_translations["component"][domain] = {
"state": domain_translations["state"]
}
(FRONTEND_BACKEND_TRANSLATIONS / lang_file.name).write_text(
json.dumps(to_write_translations, indent=2)
)
|
import os
import argparse
from twisted.internet import reactor
from twisted.internet.defer import inlineCallbacks
from autobahn.twisted.wamp import ApplicationSession, ApplicationRunner
class ClientSession(ApplicationSession):
"""
An application component that subscribes and receives events, and
stop after having received 5 events.
"""
@inlineCallbacks
def onJoin(self, details):
print("session attached")
self.received = 0
sub = yield self.subscribe(self.on_event, 'com.myapp.hello')
print("Subscribed to com.myapp.hello with {}".format(sub.id))
def on_event(self, i):
print("Got event: {}".format(i))
self.received += 1
# self.config.extra for configuration, etc. (see [A])
if self.received > self.config.extra['max_events']:
print("Received enough events; disconnecting.")
self.leave()
def onDisconnect(self):
print("disconnected")
if reactor.running:
reactor.stop()
if __name__ == '__main__':
url = os.environ.get('CBURL', 'ws://localhost:8080/ws')
realm = os.environ.get('CBREALM', 'realm1')
# parse command line parameters
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--debug', action='store_true', help='Enable debug output.')
parser.add_argument('--url', dest='url', type=str, default=url, help='The router URL (default: "ws://localhost:8080/ws").')
parser.add_argument('--realm', dest='realm', type=str, default=realm, help='The realm to join (default: "realm1").')
args = parser.parse_args()
# any extra info we want to forward to our ClientSession (in self.config.extra)
extra=dict(
max_events=5, # [A] pass in additional configuration
)
#url = environ.get("AUTOBAHN_DEMO_ROUTER", "ws://192.168.0.15:8080/ws")
#if six.PY2 and type(url) == six.binary_type:
# url = url.decode('utf8')
#realm = "realm1"
runner = ApplicationRunner(url=args.url, realm=args.realm, extra=extra)
runner.run(ClientSession, auto_reconnect=True)
|
from . import roomba_reported_state
from .braava import BraavaJet
from .const import BLID, DOMAIN, ROOMBA_SESSION
from .roomba import RoombaVacuum, RoombaVacuumCarpetBoost
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the iRobot Roomba vacuum cleaner."""
domain_data = hass.data[DOMAIN][config_entry.entry_id]
roomba = domain_data[ROOMBA_SESSION]
blid = domain_data[BLID]
# Get the capabilities of our unit
state = roomba_reported_state(roomba)
capabilities = state.get("cap", {})
cap_carpet_boost = capabilities.get("carpetBoost")
detected_pad = state.get("detectedPad")
if detected_pad is not None:
constructor = BraavaJet
elif cap_carpet_boost == 1:
constructor = RoombaVacuumCarpetBoost
else:
constructor = RoombaVacuum
roomba_vac = constructor(roomba, blid)
async_add_entities([roomba_vac], True)
|
import os
import sys
import random
import re
import tensorflow as tf
import cv2
import numpy as np
from datasets.dataset_utils import int64_feature, float_feature, bytes_feature
# TFRecords convertion parameters.
RANDOM_SEED = 4242
SAMPLES_PER_FILES = 200
CLASSES = {
'Pedestrian': 0,
'Cyclist': 1,
'Car': 2,
}
def _process_image(directory, split, name):
# Read the image file.
filename = os.path.join(directory, 'image_2', name + '.png')
image_data = tf.gfile.FastGFile(filename, 'r').read()
# Get shape
img = cv2.imread(filename)
shape = np.shape(img)
label_list = []
type_list = []
bbox_x1_list = []
bbox_y1_list = []
bbox_x2_list = []
bbox_y2_list = []
# If 'test' split, skip annotations
if re.findall(r'train', split):
# Read the txt annotation file.
filename = os.path.join(directory, 'label_2', name + '.txt')
with open(filename) as anno_file:
objects = anno_file.readlines()
for object in objects:
obj_anno = object.split(' ')
type_txt = obj_anno[0].encode('ascii')
if type_txt in CLASSES:
label_list.append(CLASSES[type_txt])
type_list.append(type_txt)
# Bounding Box
bbox_x1 = float(obj_anno[4])
bbox_y1 = float(obj_anno[5])
bbox_x2 = float(obj_anno[6])
bbox_y2 = float(obj_anno[7])
bbox_x1_list.append(bbox_x1)
bbox_y1_list.append(bbox_y1)
bbox_x2_list.append(bbox_x2)
bbox_y2_list.append(bbox_y2)
image_format = b'PNG'
example = tf.train.Example(features=tf.train.Features(feature={
'image/encoded': bytes_feature(image_data),
'image/height': int64_feature(shape[0]),
'image/width': int64_feature(shape[1]),
'image/channels': int64_feature(shape[2]),
'image/shape': int64_feature(shape),
'image/object/bbox/xmin': float_feature(bbox_x1_list),
'image/object/bbox/xmax': float_feature(bbox_x2_list),
'image/object/bbox/ymin': float_feature(bbox_y1_list),
'image/object/bbox/ymax': float_feature(bbox_y2_list),
'image/object/bbox/label': int64_feature(label_list),
'image/object/bbox/label_text': bytes_feature(type_list),
}))
return example
def _add_to_tfrecord(dataset_dir, split, name, tfrecord_writer):
"""Loads data from image and annotations files and add them to a TFRecord.
Args:
dataset_dir: Dataset directory;
split: train/val/test
name: Image name;
tfrecord_writer: The TFRecord writer to use for writing.
"""
example = _process_image(dataset_dir, split, name)
tfrecord_writer.write(example.SerializeToString())
def _get_output_filename(output_dir, name, idx):
return '%s/%s_%03d.tfrecord' % (output_dir, name, idx)
def run(kitti_root, split, output_dir, shuffling=False):
"""Runs the conversion operation.
Args:
kitti_root: KITTI dataset root dir.
split: trainval/train/val
output_dir: Output directory.
"""
if not tf.gfile.Exists(output_dir):
tf.gfile.MakeDirs(output_dir)
# Dataset filenames, and shuffling.
split_file_path = os.path.join(kitti_root,
'ImageSets',
'%s.txt'%split)
with open(split_file_path) as f:
filenames = f.readlines()
if shuffling:
random.seed(RANDOM_SEED)
random.shuffle(filenames)
# Process dataset files.
i = 0
fidx = 0
image_dir = os.path.join(kitti_root, '%sing'%split)
if split == 'val':
image_dir = os.path.join(kitti_root, '%sing' % 'train')
while i < len(filenames):
# Open new TFRecord file.
tf_filename = _get_output_filename(output_dir, split, fidx)
with tf.python_io.TFRecordWriter(tf_filename) as tfrecord_writer:
j = 0
while i < len(filenames) and j < SAMPLES_PER_FILES:
sys.stdout.write('\r>> Converting image %d/%d' % (i+1, len(filenames)))
sys.stdout.flush()
filename = filenames[i].strip()
_add_to_tfrecord(image_dir, split, filename, tfrecord_writer)
i += 1
j += 1
fidx += 1
print('\nFinished converting the KITTI dataset!')
|
import binascii
import numpy as np
import pytest
from mock import sentinel
from arctic.store._version_store_utils import _split_arrs, checksum, version_base_or_id
def test_split_arrs_empty():
split = _split_arrs(np.empty(0), [])
assert np.all(split == np.empty(0, dtype=np.object))
def test_split_arrs():
to_split = np.ones(10)
split = _split_arrs(to_split, [3])
assert len(split) == 2
assert np.all(split[0] == np.ones(3))
assert np.all(split[1] == np.ones(7))
def test_checksum():
digest = checksum('test_my_market_data_$ymB0l', {})
expected = b"""4OZ*3DO'$>XV['VW1MT4I^+7-3H,"""
assert binascii.b2a_uu(digest).strip() == expected
def test_checksum_handles_p3strs_and_binary():
digest = checksum('test_my_market_data_$ymB0l', {'key1': u'unicode',
'key2': b'binary_data'})
expected = b'4O11 ;<[email protected](JRB1.?D[ZEN!8'
assert binascii.b2a_uu(digest).strip() == expected
def test_version_base_or_id():
with pytest.raises(KeyError):
version_base_or_id({})
assert version_base_or_id({'_id': sentinel._id}) == sentinel._id
assert version_base_or_id({
'_id': sentinel._id,
'base_version_id': sentinel.base_version_id
}) == sentinel.base_version_id
|
from xml.etree import ElementTree
from weblate.trans.tests.test_views import FixtureTestCase
class SitemapTest(FixtureTestCase):
def test_sitemaps(self):
# Get root sitemap
response = self.client.get("/sitemap.xml")
self.assertContains(response, "<sitemapindex")
# Parse it
tree = ElementTree.fromstring(response.content)
sitemaps = tree.findall("{http://www.sitemaps.org/schemas/sitemap/0.9}sitemap")
for sitemap in sitemaps:
location = sitemap.find("{http://www.sitemaps.org/schemas/sitemap/0.9}loc")
response = self.client.get(location.text)
self.assertContains(response, "<urlset")
# Try if it's a valid XML
ElementTree.fromstring(response.content)
|
from datetime import timedelta
import logging
from hydrawiser.core import Hydrawiser
from requests.exceptions import ConnectTimeout, HTTPError
import voluptuous as vol
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_CONNECTIVITY,
DEVICE_CLASS_MOISTURE,
)
from homeassistant.components.sensor import DEVICE_CLASS_TIMESTAMP
from homeassistant.components.switch import DEVICE_CLASS_SWITCH
from homeassistant.const import (
ATTR_ATTRIBUTION,
CONF_ACCESS_TOKEN,
CONF_SCAN_INTERVAL,
TIME_MINUTES,
)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_connect, dispatcher_send
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.event import track_time_interval
_LOGGER = logging.getLogger(__name__)
ALLOWED_WATERING_TIME = [5, 10, 15, 30, 45, 60]
ATTRIBUTION = "Data provided by hydrawise.com"
CONF_WATERING_TIME = "watering_minutes"
NOTIFICATION_ID = "hydrawise_notification"
NOTIFICATION_TITLE = "Hydrawise Setup"
DATA_HYDRAWISE = "hydrawise"
DOMAIN = "hydrawise"
DEFAULT_WATERING_TIME = 15
DEVICE_MAP_INDEX = [
"KEY_INDEX",
"ICON_INDEX",
"DEVICE_CLASS_INDEX",
"UNIT_OF_MEASURE_INDEX",
]
DEVICE_MAP = {
"auto_watering": ["Automatic Watering", None, DEVICE_CLASS_SWITCH, None],
"is_watering": ["Watering", None, DEVICE_CLASS_MOISTURE, None],
"manual_watering": ["Manual Watering", None, DEVICE_CLASS_SWITCH, None],
"next_cycle": ["Next Cycle", None, DEVICE_CLASS_TIMESTAMP, None],
"status": ["Status", None, DEVICE_CLASS_CONNECTIVITY, None],
"watering_time": ["Watering Time", "mdi:water-pump", None, TIME_MINUTES],
}
BINARY_SENSORS = ["is_watering", "status"]
SENSORS = ["next_cycle", "watering_time"]
SWITCHES = ["auto_watering", "manual_watering"]
SCAN_INTERVAL = timedelta(seconds=30)
SIGNAL_UPDATE_HYDRAWISE = "hydrawise_update"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_ACCESS_TOKEN): cv.string,
vol.Optional(CONF_SCAN_INTERVAL, default=SCAN_INTERVAL): cv.time_period,
}
)
},
extra=vol.ALLOW_EXTRA,
)
def setup(hass, config):
"""Set up the Hunter Hydrawise component."""
conf = config[DOMAIN]
access_token = conf[CONF_ACCESS_TOKEN]
scan_interval = conf.get(CONF_SCAN_INTERVAL)
try:
hydrawise = Hydrawiser(user_token=access_token)
hass.data[DATA_HYDRAWISE] = HydrawiseHub(hydrawise)
except (ConnectTimeout, HTTPError) as ex:
_LOGGER.error("Unable to connect to Hydrawise cloud service: %s", str(ex))
hass.components.persistent_notification.create(
f"Error: {ex}<br />You will need to restart hass after fixing.",
title=NOTIFICATION_TITLE,
notification_id=NOTIFICATION_ID,
)
return False
def hub_refresh(event_time):
"""Call Hydrawise hub to refresh information."""
_LOGGER.debug("Updating Hydrawise Hub component")
hass.data[DATA_HYDRAWISE].data.update_controller_info()
dispatcher_send(hass, SIGNAL_UPDATE_HYDRAWISE)
# Call the Hydrawise API to refresh updates
track_time_interval(hass, hub_refresh, scan_interval)
return True
class HydrawiseHub:
"""Representation of a base Hydrawise device."""
def __init__(self, data):
"""Initialize the entity."""
self.data = data
class HydrawiseEntity(Entity):
"""Entity class for Hydrawise devices."""
def __init__(self, data, sensor_type):
"""Initialize the Hydrawise entity."""
self.data = data
self._sensor_type = sensor_type
self._name = f"{self.data['name']} {DEVICE_MAP[self._sensor_type][DEVICE_MAP_INDEX.index('KEY_INDEX')]}"
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
async def async_added_to_hass(self):
"""Register callbacks."""
self.async_on_remove(
async_dispatcher_connect(
self.hass, SIGNAL_UPDATE_HYDRAWISE, self._update_callback
)
)
@callback
def _update_callback(self):
"""Call update method."""
self.async_schedule_update_ha_state(True)
@property
def unit_of_measurement(self):
"""Return the units of measurement."""
return DEVICE_MAP[self._sensor_type][
DEVICE_MAP_INDEX.index("UNIT_OF_MEASURE_INDEX")
]
@property
def device_state_attributes(self):
"""Return the state attributes."""
return {ATTR_ATTRIBUTION: ATTRIBUTION, "identifier": self.data.get("relay")}
@property
def device_class(self):
"""Return the device class of the sensor type."""
return DEVICE_MAP[self._sensor_type][
DEVICE_MAP_INDEX.index("DEVICE_CLASS_INDEX")
]
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return DEVICE_MAP[self._sensor_type][DEVICE_MAP_INDEX.index("ICON_INDEX")]
|
from __future__ import print_function
import optparse
from .utils import setup_logging
from ..arctic import Arctic
print = print
def main():
usage = """usage: %prog [options] [prefix ...]
Lists the libraries available in a user's database. If any prefix parameters
are given, list only libraries with names that start with one of the prefixes.
Example:
%prog --host=hostname rgautier
"""
setup_logging()
parser = optparse.OptionParser(usage=usage)
parser.add_option("--host", default='localhost', help="Hostname, or clustername. Default: localhost")
(opts, args) = parser.parse_args()
store = Arctic(opts.host)
for name in sorted(store.list_libraries()):
if (not args) or [n for n in args if name.startswith(n)]:
print(name)
if __name__ == '__main__':
main()
|
from unittest import mock
import pytest
from homeassistant.components import opnsense
from homeassistant.components.opnsense import CONF_API_SECRET, DOMAIN
from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL
from homeassistant.setup import async_setup_component
@pytest.fixture(name="mocked_opnsense")
def mocked_opnsense():
"""Mock for pyopnense.diagnostics."""
with mock.patch.object(opnsense, "diagnostics") as mocked_opn:
yield mocked_opn
async def test_get_scanner(hass, mocked_opnsense, mock_device_tracker_conf):
"""Test creating an opnsense scanner."""
interface_client = mock.MagicMock()
mocked_opnsense.InterfaceClient.return_value = interface_client
interface_client.get_arp.return_value = [
{
"hostname": "",
"intf": "igb1",
"intf_description": "LAN",
"ip": "192.168.0.123",
"mac": "ff:ff:ff:ff:ff:ff",
"manufacturer": "",
},
{
"hostname": "Desktop",
"intf": "igb1",
"intf_description": "LAN",
"ip": "192.168.0.167",
"mac": "ff:ff:ff:ff:ff:fe",
"manufacturer": "OEM",
},
]
network_insight_client = mock.MagicMock()
mocked_opnsense.NetworkInsightClient.return_value = network_insight_client
network_insight_client.get_interfaces.return_value = {"igb0": "WAN", "igb1": "LAN"}
result = await async_setup_component(
hass,
DOMAIN,
{
DOMAIN: {
CONF_URL: "https://fake_host_fun/api",
CONF_API_KEY: "fake_key",
CONF_API_SECRET: "fake_secret",
CONF_VERIFY_SSL: False,
}
},
)
await hass.async_block_till_done()
assert result
device_1 = hass.states.get("device_tracker.desktop")
assert device_1 is not None
assert device_1.state == "home"
device_2 = hass.states.get("device_tracker.ff_ff_ff_ff_ff_ff")
assert device_2.state == "home"
|
import urwid
BLACK_FGS = ('light gray', 'dark cyan', 'dark red', 'dark green',
'dark magenta', 'white', 'light blue', 'light cyan', 'light red',
'light green', 'yellow', 'light magenta')
GRAY_FGS = ('black', 'dark blue', 'dark cyan', 'dark red', 'dark green',
'dark magenta', 'white', 'light red', 'yellow',
'light magenta')
BLUE_FGS = ('light gray', 'dark cyan', 'white',
'light cyan', 'light red', 'light green', 'yellow',
'light magenta')
CYAN_FGS = ('black', 'light gray', 'dark blue', 'white', 'light cyan',
'light green', 'yellow')
BG_FGS =[
('black', BLACK_FGS),
('light gray', GRAY_FGS),
('dark blue', BLUE_FGS),
('dark cyan', CYAN_FGS),
]
body = urwid.SimpleFocusListWalker([])
for bg, fgs in BG_FGS:
spec = urwid.AttrSpec(fgs[0], bg)
def s(w):
return urwid.AttrMap(w, spec)
body.append(s(urwid.Divider()))
body.append(s(
urwid.GridFlow(
[urwid.AttrMap(urwid.Text("'{0}' on '{1}'".format(fg, bg)),
urwid.AttrSpec(fg, bg)) for fg in fgs],
35, 0, 0, 'left')))
body.append(s(urwid.Divider()))
urwid.MainLoop(urwid.ListBox(body)).run()
|
from datetime import datetime, timedelta
import unittest
import requests_mock
from homeassistant import core as ha
import homeassistant.components.google_wifi.sensor as google_wifi
from homeassistant.const import STATE_UNKNOWN
from homeassistant.setup import setup_component
from homeassistant.util import dt as dt_util
from tests.async_mock import Mock, patch
from tests.common import assert_setup_component, get_test_home_assistant
NAME = "foo"
MOCK_DATA = (
'{"software": {"softwareVersion":"initial",'
'"updateNewVersion":"initial"},'
'"system": {"uptime":86400},'
'"wan": {"localIpAddress":"initial", "online":true,'
'"ipAddress":true}}'
)
MOCK_DATA_NEXT = (
'{"software": {"softwareVersion":"next",'
'"updateNewVersion":"0.0.0.0"},'
'"system": {"uptime":172800},'
'"wan": {"localIpAddress":"next", "online":false,'
'"ipAddress":false}}'
)
MOCK_DATA_MISSING = '{"software": {},' '"system": {},' '"wan": {}}'
class TestGoogleWifiSetup(unittest.TestCase):
"""Tests for setting up the Google Wifi sensor platform."""
def setUp(self):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.addCleanup(self.hass.stop)
@requests_mock.Mocker()
def test_setup_minimum(self, mock_req):
"""Test setup with minimum configuration."""
resource = f"http://{google_wifi.DEFAULT_HOST}{google_wifi.ENDPOINT}"
mock_req.get(resource, status_code=200)
assert setup_component(
self.hass,
"sensor",
{"sensor": {"platform": "google_wifi", "monitored_conditions": ["uptime"]}},
)
assert_setup_component(1, "sensor")
@requests_mock.Mocker()
def test_setup_get(self, mock_req):
"""Test setup with full configuration."""
resource = f"http://localhost{google_wifi.ENDPOINT}"
mock_req.get(resource, status_code=200)
assert setup_component(
self.hass,
"sensor",
{
"sensor": {
"platform": "google_wifi",
"host": "localhost",
"name": "Test Wifi",
"monitored_conditions": [
"current_version",
"new_version",
"uptime",
"last_restart",
"local_ip",
"status",
],
}
},
)
assert_setup_component(6, "sensor")
class TestGoogleWifiSensor(unittest.TestCase):
"""Tests for Google Wifi sensor platform."""
def setUp(self):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
with requests_mock.Mocker() as mock_req:
self.setup_api(MOCK_DATA, mock_req)
self.addCleanup(self.hass.stop)
def setup_api(self, data, mock_req):
"""Set up API with fake data."""
resource = f"http://localhost{google_wifi.ENDPOINT}"
now = datetime(1970, month=1, day=1)
with patch("homeassistant.util.dt.now", return_value=now):
mock_req.get(resource, text=data, status_code=200)
conditions = google_wifi.MONITORED_CONDITIONS.keys()
self.api = google_wifi.GoogleWifiAPI("localhost", conditions)
self.name = NAME
self.sensor_dict = {}
for condition, cond_list in google_wifi.MONITORED_CONDITIONS.items():
sensor = google_wifi.GoogleWifiSensor(self.api, self.name, condition)
name = f"{self.name}_{condition}"
units = cond_list[1]
icon = cond_list[2]
self.sensor_dict[condition] = {
"sensor": sensor,
"name": name,
"units": units,
"icon": icon,
}
def fake_delay(self, ha_delay):
"""Fake delay to prevent update throttle."""
hass_now = dt_util.utcnow()
shifted_time = hass_now + timedelta(seconds=ha_delay)
self.hass.bus.fire(ha.EVENT_TIME_CHANGED, {ha.ATTR_NOW: shifted_time})
def test_name(self):
"""Test the name."""
for name in self.sensor_dict:
sensor = self.sensor_dict[name]["sensor"]
test_name = self.sensor_dict[name]["name"]
assert test_name == sensor.name
def test_unit_of_measurement(self):
"""Test the unit of measurement."""
for name in self.sensor_dict:
sensor = self.sensor_dict[name]["sensor"]
assert self.sensor_dict[name]["units"] == sensor.unit_of_measurement
def test_icon(self):
"""Test the icon."""
for name in self.sensor_dict:
sensor = self.sensor_dict[name]["sensor"]
assert self.sensor_dict[name]["icon"] == sensor.icon
@requests_mock.Mocker()
def test_state(self, mock_req):
"""Test the initial state."""
self.setup_api(MOCK_DATA, mock_req)
now = datetime(1970, month=1, day=1)
with patch("homeassistant.util.dt.now", return_value=now):
for name in self.sensor_dict:
sensor = self.sensor_dict[name]["sensor"]
self.fake_delay(2)
sensor.update()
if name == google_wifi.ATTR_LAST_RESTART:
assert "1969-12-31 00:00:00" == sensor.state
elif name == google_wifi.ATTR_UPTIME:
assert 1 == sensor.state
elif name == google_wifi.ATTR_STATUS:
assert "Online" == sensor.state
else:
assert "initial" == sensor.state
@requests_mock.Mocker()
def test_update_when_value_is_none(self, mock_req):
"""Test state gets updated to unknown when sensor returns no data."""
self.setup_api(None, mock_req)
for name in self.sensor_dict:
sensor = self.sensor_dict[name]["sensor"]
self.fake_delay(2)
sensor.update()
assert sensor.state is None
@requests_mock.Mocker()
def test_update_when_value_changed(self, mock_req):
"""Test state gets updated when sensor returns a new status."""
self.setup_api(MOCK_DATA_NEXT, mock_req)
now = datetime(1970, month=1, day=1)
with patch("homeassistant.util.dt.now", return_value=now):
for name in self.sensor_dict:
sensor = self.sensor_dict[name]["sensor"]
self.fake_delay(2)
sensor.update()
if name == google_wifi.ATTR_LAST_RESTART:
assert "1969-12-30 00:00:00" == sensor.state
elif name == google_wifi.ATTR_UPTIME:
assert 2 == sensor.state
elif name == google_wifi.ATTR_STATUS:
assert "Offline" == sensor.state
elif name == google_wifi.ATTR_NEW_VERSION:
assert "Latest" == sensor.state
elif name == google_wifi.ATTR_LOCAL_IP:
assert STATE_UNKNOWN == sensor.state
else:
assert "next" == sensor.state
@requests_mock.Mocker()
def test_when_api_data_missing(self, mock_req):
"""Test state logs an error when data is missing."""
self.setup_api(MOCK_DATA_MISSING, mock_req)
now = datetime(1970, month=1, day=1)
with patch("homeassistant.util.dt.now", return_value=now):
for name in self.sensor_dict:
sensor = self.sensor_dict[name]["sensor"]
self.fake_delay(2)
sensor.update()
assert STATE_UNKNOWN == sensor.state
def test_update_when_unavailable(self):
"""Test state updates when Google Wifi unavailable."""
self.api.update = Mock(
"google_wifi.GoogleWifiAPI.update", side_effect=self.update_side_effect()
)
for name in self.sensor_dict:
sensor = self.sensor_dict[name]["sensor"]
sensor.update()
assert sensor.state is None
def update_side_effect(self):
"""Mock representation of update function."""
self.api.data = None
self.api.available = False
|
import os.path as op
import pytest
from mne.io import read_raw_fif
from mne import pick_types
from mne.preprocessing import find_ecg_events, create_ecg_epochs
data_path = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data')
raw_fname = op.join(data_path, 'test_raw.fif')
event_fname = op.join(data_path, 'test-eve.fif')
proj_fname = op.join(data_path, 'test-proj.fif')
def test_find_ecg():
"""Test find ECG peaks."""
# Test if ECG analysis will work on data that is not preloaded
raw = read_raw_fif(raw_fname, preload=False).pick_types(meg=True)
raw.pick(raw.ch_names[::10] + ['MEG 2641'])
raw.info.normalize_proj()
# once with mag-trick
# once with characteristic channel
raw_bad = raw.copy().load_data()
ecg_idx = raw.ch_names.index('MEG 1531')
raw_bad._data[ecg_idx, :1] = 1e6 # this will break the detector
raw_bad.annotations.append(raw.first_samp / raw.info['sfreq'],
1. / raw.info['sfreq'], 'BAD_values')
raw_noload = raw.copy()
raw.resample(100)
for ch_name, tstart in zip(['MEG 1531', None],
[raw.times[-1] / 2, 0]):
events, ch_ECG, average_pulse, ecg = find_ecg_events(
raw, event_id=999, ch_name=ch_name, tstart=tstart,
return_ecg=True)
assert raw.n_times == ecg.shape[-1]
assert 40 < average_pulse < 60
n_events = len(events)
# with annotations
average_pulse = find_ecg_events(raw_bad, ch_name=ch_name,
tstart=tstart,
reject_by_annotation=False)[2]
assert average_pulse < 1.
average_pulse = find_ecg_events(raw_bad, ch_name=ch_name,
tstart=tstart,
reject_by_annotation=True)[2]
assert 55 < average_pulse < 60
picks = pick_types(
raw.info, meg='grad', eeg=False, stim=False,
eog=False, ecg=True, emg=False, ref_meg=False,
exclude='bads')
# There should be no ECG channels, or else preloading will not be
# tested
assert 'ecg' not in raw
ecg_epochs = create_ecg_epochs(raw_noload, picks=picks, keep_ecg=True)
assert len(ecg_epochs.events) == n_events
assert 'ECG-SYN' not in raw.ch_names
assert 'ECG-SYN' in ecg_epochs.ch_names
assert len(ecg_epochs) == 23
picks = pick_types(
ecg_epochs.info, meg=False, eeg=False, stim=False,
eog=False, ecg=True, emg=False, ref_meg=False,
exclude='bads')
assert len(picks) == 1
ecg_epochs = create_ecg_epochs(raw, ch_name='MEG 2641')
assert 'MEG 2641' in ecg_epochs.ch_names
# test with user provided ecg channel
raw.info['projs'] = list()
assert 'MEG 2641' in raw.ch_names
with pytest.warns(RuntimeWarning, match='unit for channel'):
raw.set_channel_types({'MEG 2641': 'ecg'})
create_ecg_epochs(raw)
raw.pick_types(meg=True) # remove ECG
assert 'MEG 2641' not in raw.ch_names
ecg_epochs = create_ecg_epochs(raw, keep_ecg=False)
assert len(ecg_epochs.events) == n_events
assert 'ECG-SYN' not in raw.ch_names
assert 'ECG-SYN' not in ecg_epochs.ch_names
|
import logging
import os
import time
from functools import reduce
from typing import Any
from typing import Dict
from typing import Iterable
from typing import List
from typing import Optional
from typing import Set
from typing import Tuple
import pyinotify
from kazoo.protocol.states import EventType
from kazoo.protocol.states import WatchedEvent
from kazoo.protocol.states import ZnodeStat
from kazoo.recipe.watchers import ChildrenWatch
from kazoo.recipe.watchers import DataWatch
from requests.exceptions import RequestException
from paasta_tools.deployd.common import DelayDeadlineQueueProtocol
from paasta_tools.deployd.common import get_marathon_clients_from_config
from paasta_tools.deployd.common import get_service_instances_needing_update
from paasta_tools.deployd.common import PaastaThread
from paasta_tools.deployd.common import ServiceInstance
from paasta_tools.marathon_tools import AUTOSCALING_ZK_ROOT
from paasta_tools.marathon_tools import DEFAULT_SOA_DIR
from paasta_tools.marathon_tools import deformat_job_id
from paasta_tools.marathon_tools import get_marathon_apps_with_clients
from paasta_tools.marathon_tools import MarathonServiceConfig
from paasta_tools.mesos_maintenance import get_draining_hosts
from paasta_tools.utils import AUTO_SOACONFIG_SUBDIR
from paasta_tools.utils import get_services_for_cluster
from paasta_tools.utils import list_all_instances_for_service
from paasta_tools.utils import load_system_paasta_config
from paasta_tools.utils import PATH_TO_SYSTEM_PAASTA_CONFIG_DIR
from paasta_tools.utils import SystemPaastaConfig
class PaastaWatcher(PaastaThread):
def __init__(
self,
instances_to_bounce: DelayDeadlineQueueProtocol,
cluster: str,
config: SystemPaastaConfig,
**kwargs: Any,
) -> None:
super().__init__()
self.daemon = True
self.instances_to_bounce = instances_to_bounce
self.cluster = cluster
self.config = config
self.is_ready = False
class AutoscalerWatcher(PaastaWatcher):
def __init__(
self,
instances_to_bounce: DelayDeadlineQueueProtocol,
cluster: str,
config: SystemPaastaConfig,
**kwargs: Any,
) -> None:
super().__init__(instances_to_bounce, cluster, config)
self.zk = kwargs.pop("zookeeper_client")
self.watchers: Dict[str, PaastaWatcher] = {}
def watch_folder(self, path: str, enqueue_children: bool = False) -> None:
"""recursive nonsense"""
if "autoscaling.lock" in path:
return
if path.split("/")[-1] == "instances":
self.watch_node(path, enqueue=enqueue_children)
return
self.log.info(f"Adding folder watch on {path}")
watcher = ChildrenWatch(
self.zk, path, func=self.process_folder_event, send_event=True
)
self.watchers[path] = watcher
children = watcher._client.get_children(watcher._path)
if children:
for child in children:
self.watch_folder(f"{path}/{child}", enqueue_children=enqueue_children)
def _enqueue_service_instance(self, path: str) -> None:
service, instance = path.split("/")[-3:-1]
self.log.info(
f"Number of instances changed for {service}.{instance} by the autoscaler."
)
service_instance = ServiceInstance(
service=service,
instance=instance,
bounce_by=time.time(),
wait_until=time.time(),
watcher=type(self).__name__,
failures=0,
enqueue_time=time.time(),
bounce_start_time=time.time(),
)
self.instances_to_bounce.put(service_instance)
def watch_node(self, path: str, enqueue: bool = False) -> None:
self.log.info(f"Adding zk node watch on {path}")
DataWatch(self.zk, path, func=self.process_node_event, send_event=True)
if enqueue:
self._enqueue_service_instance(path)
def process_node_event(
self, data: Optional[bytes], stat: ZnodeStat, event: WatchedEvent
) -> None:
self.log.debug(f"zk node change: {event}")
if event and (
event.type == EventType.CREATED or event.type == EventType.CHANGED
):
self._enqueue_service_instance(event.path)
def process_folder_event(
self, children: Iterable[str], event: WatchedEvent
) -> None:
self.log.debug(f"Folder change: {event}")
if event and (event.type == EventType.CHILD):
fq_children = [f"{event.path}/{child}" for child in children]
for child in fq_children:
if child not in self.watchers:
self.watch_folder(child, enqueue_children=True)
def run(self) -> None:
if not self.zk.exists(AUTOSCALING_ZK_ROOT):
self.zk.ensure_path(AUTOSCALING_ZK_ROOT)
self.watch_folder(AUTOSCALING_ZK_ROOT)
self.is_ready = True
while True:
time.sleep(0.1)
class SoaFileWatcher(PaastaWatcher):
def __init__(
self,
instances_to_bounce: DelayDeadlineQueueProtocol,
cluster: str,
config: SystemPaastaConfig,
**kwargs: Any,
) -> None:
super().__init__(instances_to_bounce, cluster, config)
self.wm = pyinotify.WatchManager()
self.wm.add_watch(DEFAULT_SOA_DIR, self.mask, rec=True)
self.notifier = pyinotify.Notifier(
watch_manager=self.wm,
default_proc_fun=YelpSoaEventHandler(filewatcher=self),
)
@property
def mask(self) -> int:
boring_flags = ["IN_CLOSE_NOWRITE", "IN_OPEN", "IN_ACCESS", "IN_ATTRIB"]
return reduce(
lambda x, y: x | y,
[
v
for k, v in pyinotify.EventsCodes.OP_FLAGS.items()
if k not in boring_flags
],
)
def run(self) -> None:
self.notifier.loop(callback=self.startup_checker)
def startup_checker(self, obj: Any) -> None:
self.is_ready = True
class PublicConfigFileWatcher(PaastaWatcher):
def __init__(
self,
instances_to_bounce: DelayDeadlineQueueProtocol,
cluster: str,
config: SystemPaastaConfig,
**kwargs: Any,
) -> None:
super().__init__(instances_to_bounce, cluster, config)
self.wm = pyinotify.WatchManager()
self.wm.add_watch(PATH_TO_SYSTEM_PAASTA_CONFIG_DIR, self.mask, rec=True)
self.notifier = pyinotify.Notifier(
watch_manager=self.wm,
default_proc_fun=PublicConfigEventHandler(filewatcher=self),
)
@property
def mask(self) -> int:
boring_flags = ["IN_CLOSE_NOWRITE", "IN_OPEN", "IN_ACCESS", "IN_ATTRIB"]
return reduce(
lambda x, y: x | y,
[
v
for k, v in pyinotify.EventsCodes.OP_FLAGS.items()
if k not in boring_flags
],
)
def run(self) -> None:
self.notifier.loop(callback=self.startup_checker)
def startup_checker(self, obj: Any) -> None:
self.is_ready = True
class MaintenanceWatcher(PaastaWatcher):
def __init__(
self,
instances_to_bounce: DelayDeadlineQueueProtocol,
cluster: str,
config: SystemPaastaConfig,
**kwargs: Any,
) -> None:
super().__init__(instances_to_bounce, cluster, config)
self.draining: Set[str] = set()
self.marathon_clients = get_marathon_clients_from_config()
def get_new_draining_hosts(self) -> List[str]:
try:
draining_hosts = get_draining_hosts()
except RequestException as e:
self.log.error(f"Unable to get list of draining hosts from mesos: {e}")
draining_hosts = list(self.draining)
new_draining_hosts = [
host for host in draining_hosts if host not in self.draining
]
for host in new_draining_hosts:
self.draining.add(host)
hosts_finished_draining = [
host for host in self.draining if host not in draining_hosts
]
for host in hosts_finished_draining:
self.draining.remove(host)
return new_draining_hosts
def run(self) -> None:
self.is_ready = True
while True:
new_draining_hosts = self.get_new_draining_hosts()
service_instances: List[ServiceInstance] = []
if new_draining_hosts:
self.log.info(f"Found new draining hosts: {new_draining_hosts}")
service_instances = self.get_at_risk_service_instances(
new_draining_hosts
)
for service_instance in service_instances:
self.instances_to_bounce.put(service_instance)
time.sleep(self.config.get_deployd_maintenance_polling_frequency())
def get_at_risk_service_instances(
self, draining_hosts: List[str]
) -> List[ServiceInstance]:
marathon_apps_with_clients = get_marathon_apps_with_clients(
clients=self.marathon_clients.get_all_clients(), embed_tasks=True
)
at_risk_tasks = []
for app, client in marathon_apps_with_clients:
for task in app.tasks:
if task.host in draining_hosts:
at_risk_tasks.append(task)
self.log.info(f"At risk tasks: {at_risk_tasks}")
service_instances: List[ServiceInstance] = []
for task in at_risk_tasks:
app_id = task.app_id.strip("/")
service, instance, _, __ = deformat_job_id(app_id)
# check we haven't already added this instance,
# no need to add the same instance to the bounce queue
# more than once
if not any(
[
(service, instance) == (si.service, si.instance)
for si in service_instances
]
):
service_instances.append(
ServiceInstance(
service=service,
instance=instance,
bounce_by=time.time(),
wait_until=time.time(),
watcher=type(self).__name__,
failures=0,
enqueue_time=time.time(),
bounce_start_time=time.time(),
)
)
return service_instances
class PublicConfigEventHandler(pyinotify.ProcessEvent):
def my_init(self, filewatcher: PublicConfigFileWatcher) -> None:
self.filewatcher = filewatcher
self.public_config = load_system_paasta_config()
self.marathon_clients = get_marathon_clients_from_config()
@property
def log(self) -> logging.Logger:
name = ".".join([__name__, type(self).__name__])
return logging.getLogger(name)
def filter_event(self, event: pyinotify.Event) -> Optional[pyinotify.Event]:
if event.name.endswith(".json") or event.maskname == "IN_CREATE|IN_ISDIR":
return event
return None
def watch_new_folder(self, event: pyinotify.Event) -> None:
if event.maskname == "IN_CREATE|IN_ISDIR" and ".~tmp~" not in event.pathname:
self.filewatcher.wm.add_watch(
event.pathname, self.filewatcher.mask, rec=True
)
def process_default(self, event: pyinotify.Event) -> None:
self.log.debug(event)
self.watch_new_folder(event)
event = self.filter_event(event)
if event:
self.log.debug("Public config changed on disk, loading new config.")
try:
new_config = load_system_paasta_config()
except ValueError:
self.log.error("Couldn't load public config, the JSON is invalid!")
return
service_instance_configs: List[
Tuple[str, str, MarathonServiceConfig, str]
] = []
if new_config != self.public_config:
self.log.info(
"Public config has changed, now checking if it affects any services config shas."
)
self.public_config = new_config
all_service_instances = get_services_for_cluster(
cluster=self.public_config.get_cluster(),
instance_type="marathon",
soa_dir=DEFAULT_SOA_DIR,
)
service_instance_configs = get_service_instances_needing_update(
self.marathon_clients,
all_service_instances,
self.public_config.get_cluster(),
)
if service_instance_configs:
self.log.info(
f"{len(service_instance_configs)} service instances affected. Doing a staggered bounce."
)
for service, instance, config, _ in service_instance_configs:
self.filewatcher.instances_to_bounce.put(
ServiceInstance(
service=service,
instance=instance,
watcher=type(self).__name__,
bounce_by=time.time()
+ self.public_config.get_deployd_big_bounce_deadline(),
wait_until=time.time(),
enqueue_time=time.time(),
bounce_start_time=time.time(),
)
)
class YelpSoaEventHandler(pyinotify.ProcessEvent):
def my_init(self, filewatcher: SoaFileWatcher) -> None:
self.filewatcher = filewatcher
self.marathon_clients = get_marathon_clients_from_config()
@property
def log(self) -> logging.Logger:
name = ".".join([__name__, type(self).__name__])
return logging.getLogger(name)
def get_service_name_from_event(self, event: pyinotify.Event) -> str:
"""Get service_name from the file inotify event,
returns None if it is not an event we're interested in"""
starts_with = ["marathon-", "deployments.json"]
if any([event.name.startswith(x) for x in starts_with]):
dir_name = event.path.split("/")[-1]
# we also have a subdir for autotuned_defaults
if dir_name == AUTO_SOACONFIG_SUBDIR:
service_name = event.path.split("/")[-2]
else:
service_name = dir_name
elif event.name.endswith(".json") and event.path.split("/")[-1] == "secrets":
# this is needed because we put the secrets json files in a
# subdirectory so the service name would be "secrets" otherwise
service_name = event.path.split("/")[-2]
else:
service_name = None
return service_name
def watch_new_folder(self, event: pyinotify.Event) -> None:
if event.maskname == "IN_CREATE|IN_ISDIR" and ".~tmp~" not in event.pathname:
self.filewatcher.wm.add_watch(
event.pathname, self.filewatcher.mask, rec=True
)
try:
file_names = os.listdir(event.pathname)
except OSError:
return
if any(["marathon-" in file_name for file_name in file_names]):
self.log.info(f"New folder with marathon files: {event.name}.")
self.bounce_service(event.name)
def process_default(self, event: pyinotify.Event) -> None:
self.log.debug(event)
self.watch_new_folder(event)
service_name = self.get_service_name_from_event(event)
if service_name:
self.log.info(
f"Looking for things to bounce for {service_name} because {event.path}/{event.name} changed."
)
self.bounce_service(service_name)
def bounce_service(self, service_name: str) -> None:
self.log.info(
f"Checking if any marathon instances of {service_name} need bouncing."
)
instances = list_all_instances_for_service(
service=service_name,
clusters=[self.filewatcher.cluster],
instance_type="marathon",
cache=False,
)
self.log.debug(instances)
service_instance_configs = get_service_instances_needing_update(
self.marathon_clients,
[(service_name, instance) for instance in instances],
self.filewatcher.cluster,
)
for service, instance, config, app_id in service_instance_configs:
self.log.info(
f"{service}.{instance} has a new marathon app ID ({app_id}). Enqueuing it to be bounced."
)
now = time.time()
self.filewatcher.instances_to_bounce.put(
ServiceInstance(
service=service,
instance=instance,
bounce_by=now + config.get_bounce_start_deadline(),
wait_until=now,
watcher=type(self).__name__,
failures=0,
enqueue_time=time.time(),
bounce_start_time=time.time(),
)
)
|
from homeassistant.components.remote import (
ATTR_COMMAND,
DOMAIN as REMOTE_DOMAIN,
SERVICE_SEND_COMMAND,
)
from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON
from homeassistant.helpers.typing import HomeAssistantType
from tests.async_mock import patch
from tests.components.roku import UPNP_SERIAL, setup_integration
from tests.test_util.aiohttp import AiohttpClientMocker
MAIN_ENTITY_ID = f"{REMOTE_DOMAIN}.my_roku_3"
# pylint: disable=redefined-outer-name
async def test_setup(
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test setup with basic config."""
await setup_integration(hass, aioclient_mock)
assert hass.states.get(MAIN_ENTITY_ID)
async def test_unique_id(
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test unique id."""
await setup_integration(hass, aioclient_mock)
entity_registry = await hass.helpers.entity_registry.async_get_registry()
main = entity_registry.async_get(MAIN_ENTITY_ID)
assert main.unique_id == UPNP_SERIAL
async def test_main_services(
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test platform services."""
await setup_integration(hass, aioclient_mock)
with patch("homeassistant.components.roku.Roku.remote") as remote_mock:
await hass.services.async_call(
REMOTE_DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: MAIN_ENTITY_ID},
blocking=True,
)
remote_mock.assert_called_once_with("poweroff")
with patch("homeassistant.components.roku.Roku.remote") as remote_mock:
await hass.services.async_call(
REMOTE_DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: MAIN_ENTITY_ID},
blocking=True,
)
remote_mock.assert_called_once_with("poweron")
with patch("homeassistant.components.roku.Roku.remote") as remote_mock:
await hass.services.async_call(
REMOTE_DOMAIN,
SERVICE_SEND_COMMAND,
{ATTR_ENTITY_ID: MAIN_ENTITY_ID, ATTR_COMMAND: ["home"]},
blocking=True,
)
remote_mock.assert_called_once_with("home")
|
import numpy as np
from scattertext.semioticsquare import SemioticSquare
from scattertext.termranking import AbsoluteFrequencyRanker
from scattertext.termscoring.RankDifference import RankDifference
class FourSquareAxes(SemioticSquare):
'''
This creates a semiotic square where the complex term is considered the "top" category, the
neutral term is the "bottom" category, the positive dexis is the "left" category, and the
negative dexis is the "right" category.
'''
def __init__(self,
term_doc_matrix,
left_categories,
right_categories,
top_categories,
bottom_categories,
left_category_name=None,
right_category_name=None,
top_category_name=None,
bottom_category_name=None,
x_scorer=RankDifference(),
y_scorer=RankDifference(),
term_ranker=AbsoluteFrequencyRanker,
labels=None):
for param in [left_categories, right_categories, top_categories, bottom_categories]:
assert type(param) == list
assert set(param) - set(term_doc_matrix.get_categories()) == set()
assert len(param) > 0
self.term_doc_matrix_ = term_doc_matrix
self._labels = labels
self.left_category_name_ = left_category_name if left_category_name is not None else left_categories[0]
self.right_category_name_ = right_category_name if right_category_name is not None else right_categories[0]
self.top_category_name_ = top_category_name if top_category_name is not None else top_categories[0]
self.bottom_category_name_ = bottom_category_name if bottom_category_name is not None else bottom_categories[0]
self.x_scorer_ = x_scorer
self.y_scorer_ = y_scorer
self.term_ranker_ = term_ranker
self.left_categories_, self.right_categories_, self.top_categories_, self.bottom_categories_ \
= left_categories, right_categories, top_categories, bottom_categories
self.axes = self._build_axes()
self.lexicons = self._build_lexicons()
def _get_y_baseline(self):
return self.y_scorer_.get_default_score()
def _get_x_baseline(self):
return self.x_scorer_.get_default_score()
def _get_all_categories(self):
return self.left_categories_ + self.right_categories_ + self.top_categories_ + self.bottom_categories_
def _build_axes(self, scorer=None):
tdf = self.term_ranker_(self.term_doc_matrix_).get_ranks()
tdf.columns = [c[:-5] for c in tdf.columns]
tdf = tdf[self._get_all_categories()]
counts = tdf.sum(axis=1)
tdf['x'] = self.x_scorer_.get_scores(tdf[self.left_categories_].sum(axis=1),
tdf[self.right_categories_].sum(axis=1))
tdf['x'][np.isnan(tdf['x'])] = self.x_scorer_.get_default_score()
tdf['y'] = self.y_scorer_.get_scores(tdf[self.top_categories_].sum(axis=1),
tdf[self.bottom_categories_].sum(axis=1))
tdf['y'][np.isnan(tdf['y'])] = self.y_scorer_.get_default_score()
tdf['counts'] = counts
return tdf[['x', 'y', 'counts']]
def get_labels(self):
a = self._get_default_a_label()
b = self._get_default_b_label()
default_labels = {'a': a,
'not_a': '' if a == '' else 'Not ' + a,
'b': b,
'not_b': '' if b == '' else 'Not ' + b,
'a_and_b': self.top_category_name_,
'not_a_and_not_b': self.bottom_category_name_,
'a_and_not_b': self.left_category_name_,
'b_and_not_a': self.right_category_name_}
labels = self._labels
if labels is None:
labels = {}
return {name + '_label': labels.get(name, default_labels[name])
for name in default_labels}
def _get_default_b_label(self):
return ''
def _get_default_a_label(self):
return ''
|
import re
from kalliope.core import Utils
from kalliope.core.ConfigurationManager import YAMLLoader
from kalliope.core.Models.Dna import Dna
class InvalidDNAException(Exception):
pass
VALID_DNA_MODULE_TYPE = ["neuron", "stt", "tts", "trigger", "signal"]
class DnaLoader(object):
def __init__(self, file_path):
"""
Load a DNA file and check the content of this one
:param file_path: path the the DNA file to load
"""
self.file_path = file_path
if self.file_path is None:
raise InvalidDNAException("[DnaLoader] You must set a file file")
self.yaml_config = YAMLLoader.get_config(self.file_path)
self.dna = self._load_dna()
def get_yaml_config(self):
"""
Class Methods which loads default or the provided YAML file and return it as a String
:return: The loaded DNA YAML file
:rtype: String
"""
return self.yaml_config
def get_dna(self):
"""
Return the loaded DNA object if this one is valid
:return:
"""
return self.dna
def _load_dna(self):
"""
retur a DNA object from a loaded yaml file
:return:
"""
new_dna = None
if self._check_dna_file(self.yaml_config):
new_dna = Dna()
new_dna.name = self.yaml_config["name"]
new_dna.module_type = self.yaml_config["type"]
new_dna.author = self.yaml_config["author"]
new_dna.kalliope_supported_version = self.yaml_config["kalliope_supported_version"]
new_dna.tags = self.yaml_config["tags"]
return new_dna
@staticmethod
def _check_dna_file(dna_file):
"""
Check the content of a DNA file
:param dna_file: the dna to check
:return: True if ok, False otherwise
"""
success_loading = True
if "name" not in dna_file:
Utils.print_danger("The DNA of does not contains a \"name\" tag")
success_loading = False
if "type" not in dna_file:
Utils.print_danger("The DNA of does not contains a \"type\" tag")
success_loading = False
else:
# we have a type, check that is a valid one
if dna_file["type"] not in VALID_DNA_MODULE_TYPE:
Utils.print_danger("The DNA type %s is not valid" % dna_file["type"])
Utils.print_danger("The DNA type must be one of the following: %s" % VALID_DNA_MODULE_TYPE)
success_loading = False
if "kalliope_supported_version" not in dna_file:
Utils.print_danger("The DNA of does not contains a \"kalliope_supported_version\" tag")
success_loading = False
else:
# kalliope_supported_version must be a non empty list
if not isinstance(dna_file["kalliope_supported_version"], list):
Utils.print_danger("kalliope_supported_version is not a list")
success_loading = False
else:
if not dna_file["kalliope_supported_version"]:
Utils.print_danger("kalliope_supported_version cannot be empty")
success_loading = False
else:
for supported_version in dna_file["kalliope_supported_version"]:
# check if major version is provided
if not re.search('^[\d]*[.][\d]*$', str(supported_version)):
Utils.print_danger("kalliope_supported_version cannot handle this format of version %s. "
"Only major version should be provided" % supported_version)
success_loading = False
return success_loading
|
import urwid
def exit_on_q(key):
if key in ('q', 'Q'):
raise urwid.ExitMainLoop()
class QuestionBox(urwid.Filler):
def keypress(self, size, key):
if key != 'enter':
return super(QuestionBox, self).keypress(size, key)
self.original_widget = urwid.Text(
u"Nice to meet you,\n%s.\n\nPress Q to exit." %
edit.edit_text)
edit = urwid.Edit(u"What is your name?\n")
fill = QuestionBox(edit)
loop = urwid.MainLoop(fill, unhandled_input=exit_on_q)
loop.run()
|
from __future__ import division, print_function, absolute_import
import sys
import os
from plumbum import local
from .termsize import get_terminal_size
from .progress import Progress
def readline(message=""):
"""Gets a line of input from the user (stdin)"""
sys.stdout.write(message)
sys.stdout.flush()
return sys.stdin.readline()
def ask(question, default=None):
"""
Presents the user with a yes/no question.
:param question: The question to ask
:param default: If ``None``, the user must answer. If ``True`` or ``False``, lack of response is
interpreted as the default option
:returns: the user's choice
"""
question = question.rstrip().rstrip("?").rstrip() + "?"
if default is None:
question += " (y/n) "
elif default:
question += " [Y/n] "
else:
question += " [y/N] "
while True:
try:
answer = readline(question).strip().lower()
except EOFError:
answer = None
if answer in ("y", "yes"):
return True
elif answer in ("n", "no"):
return False
elif not answer and default is not None:
return default
else:
sys.stdout.write("Invalid response, please try again\n")
def choose(question, options, default=None):
"""Prompts the user with a question and a set of options, from which the user needs to choose.
:param question: The question to ask
:param options: A set of options. It can be a list (of strings or two-tuples, mapping text
to returned-object) or a dict (mapping text to returned-object).``
:param default: If ``None``, the user must answer. Otherwise, lack of response is interpreted
as this answer
:returns: The user's choice
Example::
ans = choose("What is your favorite color?", ["blue", "yellow", "green"], default = "yellow")
# `ans` will be one of "blue", "yellow" or "green"
ans = choose("What is your favorite color?",
{"blue" : 0x0000ff, "yellow" : 0xffff00 , "green" : 0x00ff00}, default = 0x00ff00)
# this will display "blue", "yellow" and "green" but return a numerical value
"""
if hasattr(options, "items"):
options = options.items()
sys.stdout.write(question.rstrip() + "\n")
choices = {}
defindex = None
for i, item in enumerate(options):
i = i + 1 # python2.5
if isinstance(item, (tuple, list)) and len(item) == 2:
text = item[0]
val = item[1]
else:
text = item
val = item
choices[i] = val
if default is not None and default == val:
defindex = i
sys.stdout.write("(%d) %s\n" % (i, text))
if default is not None:
if defindex is None:
msg = "Choice [%s]: " % (default, )
else:
msg = "Choice [%d]: " % (defindex, )
else:
msg = "Choice: "
while True:
try:
choice = readline(msg).strip()
except EOFError:
choice = ""
if not choice and default:
return default
try:
choice = int(choice)
if choice not in choices:
raise ValueError()
except ValueError:
sys.stdout.write("Invalid choice, please try again\n")
continue
return choices[choice]
def prompt(question,
type=str,
default=NotImplemented,
validator=lambda val: True):
"""
Presents the user with a validated question, keeps asking if validation does not pass.
:param question: The question to ask
:param type: The type of the answer, defaults to str
:param default: The default choice
:param validator: An extra validator called after type conversion, can raise ValueError or return False to trigger a retry.
:returns: the user's choice
"""
question = question.rstrip(" \t:")
if default is not NotImplemented:
question += " [%s]" % (default, )
question += ": "
while True:
try:
ans = readline(question).strip()
except EOFError:
ans = ""
if not ans:
if default is not NotImplemented:
#sys.stdout.write("\b%s\n" % (default,))
return default
else:
continue
try:
ans = type(ans)
except (TypeError, ValueError) as ex:
sys.stdout.write("Invalid value (%s), please try again\n" % (ex, ))
continue
try:
valid = validator(ans)
except ValueError as ex:
sys.stdout.write("%s, please try again\n" % (ex, ))
continue
if not valid:
sys.stdout.write(
"Value not in specified range, please try again\n")
continue
return ans
def hexdump(data_or_stream, bytes_per_line=16, aggregate=True):
"""Convert the given bytes (or a stream with a buffering ``read()`` method) to hexdump-formatted lines,
with possible aggregation of identical lines. Returns a generator of formatted lines.
"""
if hasattr(data_or_stream, "read"):
def read_chunk():
while True:
buf = data_or_stream.read(bytes_per_line)
if not buf:
break
yield buf
else:
def read_chunk():
for i in range(0, len(data_or_stream), bytes_per_line):
yield data_or_stream[i:i + bytes_per_line]
prev = None
skipped = False
for i, chunk in enumerate(read_chunk()):
hexd = " ".join("%02x" % (ord(ch), ) for ch in chunk)
text = "".join(ch if 32 <= ord(ch) < 127 else "." for ch in chunk)
if aggregate and prev == chunk:
skipped = True
continue
prev = chunk
if skipped:
yield "*"
yield "%06x | %s| %s" % (i * bytes_per_line,
hexd.ljust(bytes_per_line * 3, " "), text)
skipped = False
def pager(rows, pagercmd=None): # pragma: no cover
"""Opens a pager (e.g., ``less``) to display the given text. Requires a terminal.
:param rows: a ``bytes`` or a list/iterator of "rows" (``bytes``)
:param pagercmd: the pager program to run. Defaults to ``less -RSin``
"""
if not pagercmd:
pagercmd = local["less"]["-RSin"]
if hasattr(rows, "splitlines"):
rows = rows.splitlines()
pg = pagercmd.popen(stdout=None, stderr=None)
try:
for row in rows:
line = "%s\n" % (row, )
try:
pg.stdin.write(line)
pg.stdin.flush()
except IOError:
break
pg.stdin.close()
pg.wait()
finally:
try:
rows.close()
except Exception:
pass
if pg and pg.poll() is None:
try:
pg.terminate()
except Exception:
pass
os.system("reset")
|
import os
import sys
import time
import cherrypy
starttime = time.time()
class Root:
@cherrypy.expose
def index(self):
return 'Hello World'
@cherrypy.expose
def mtimes(self):
return repr(cherrypy.engine.publish('Autoreloader', 'mtimes'))
@cherrypy.expose
def pid(self):
return str(os.getpid())
@cherrypy.expose
def start(self):
return repr(starttime)
@cherrypy.expose
def exit(self):
# This handler might be called before the engine is STARTED if an
# HTTP worker thread handles it before the HTTP server returns
# control to engine.start. We avoid that race condition here
# by waiting for the Bus to be STARTED.
cherrypy.engine.wait(state=cherrypy.engine.states.STARTED)
cherrypy.engine.exit()
@cherrypy.engine.subscribe('start', priority=100)
def unsub_sig():
cherrypy.log('unsubsig: %s' % cherrypy.config.get('unsubsig', False))
if cherrypy.config.get('unsubsig', False):
cherrypy.log('Unsubscribing the default cherrypy signal handler')
cherrypy.engine.signal_handler.unsubscribe()
try:
from signal import signal, SIGTERM
except ImportError:
pass
else:
def old_term_handler(signum=None, frame=None):
cherrypy.log('I am an old SIGTERM handler.')
sys.exit(0)
cherrypy.log('Subscribing the new one.')
signal(SIGTERM, old_term_handler)
@cherrypy.engine.subscribe('start', priority=6)
def starterror():
if cherrypy.config.get('starterror', False):
1 / 0
@cherrypy.engine.subscribe('start', priority=6)
def log_test_case_name():
if cherrypy.config.get('test_case_name', False):
cherrypy.log('STARTED FROM: %s' %
cherrypy.config.get('test_case_name'))
cherrypy.tree.mount(Root(), '/', {'/': {}})
|
from pytest import mark
from cerberus import errors
from cerberus.tests import assert_fail, assert_success
@mark.parametrize('constraint', (('Graham Chapman', 'Eric Idle'), 'Terry Gilliam'))
def test_contains_succeeds(constraint):
assert_success(
schema={'actors': {'contains': constraint}},
document={'actors': ('Graham Chapman', 'Eric Idle', 'Terry Gilliam')},
)
@mark.parametrize('constraint', (('Graham Chapman', 'Eric Idle'), 'Terry Gilliam'))
def test_contains_fails(validator, constraint):
assert_fail(
document={
'actors': ('Eric idle', 'Terry Jones', 'John Cleese', 'Michael ' 'Palin')
},
schema={'actors': {'contains': constraint}},
validator=validator,
)
assert errors.MISSING_MEMBERS in validator.document_error_tree['actors']
missing_actors = validator.document_error_tree['actors'][
errors.MISSING_MEMBERS
].info[0]
assert any(x in missing_actors for x in ('Eric Idle', 'Terry Gilliam'))
|
import os.path as op
import numpy as np
from numpy.testing import assert_array_almost_equal, assert_allclose
from scipy.signal import lfilter
from mne import io
from mne.time_frequency.ar import _yule_walker, fit_iir_model_raw
from mne.utils import requires_version, run_tests_if_main
raw_fname = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data',
'test_raw.fif')
# 0.7 attempts to import nonexistent TimeSeries from Pandas 0.20
@requires_version('patsy', '0.4')
@requires_version('statsmodels', '0.8')
def test_yule_walker():
"""Test Yule-Walker against statsmodels."""
from statsmodels.regression.linear_model import yule_walker as sm_yw
d = np.random.randn(100)
sm_rho, sm_sigma = sm_yw(d, order=2)
rho, sigma = _yule_walker(d[np.newaxis], order=2)
assert_array_almost_equal(sm_sigma, sigma)
assert_array_almost_equal(sm_rho, rho)
def test_ar_raw():
"""Test fitting AR model on raw data."""
raw = io.read_raw_fif(raw_fname).crop(0, 2).load_data()
raw.pick_types(meg='grad')
# pick MEG gradiometers
for order in (2, 5, 10):
coeffs = fit_iir_model_raw(raw, order)[1][1:]
assert coeffs.shape == (order,)
assert_allclose(-coeffs[0], 1., atol=0.5)
# let's make sure we're doing something reasonable: first, white noise
rng = np.random.RandomState(0)
raw._data = rng.randn(*raw._data.shape)
raw._data *= 1e-15
for order in (2, 5, 10):
coeffs = fit_iir_model_raw(raw, order)[1]
assert_allclose(coeffs, [1.] + [0.] * order, atol=2e-2)
# Now let's try pink noise
iir = [1, -1, 0.2]
raw._data = lfilter([1.], iir, raw._data)
for order in (2, 5, 10):
coeffs = fit_iir_model_raw(raw, order)[1]
assert_allclose(coeffs, iir + [0.] * (order - 2), atol=5e-2)
run_tests_if_main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.