text
stringlengths 213
32.3k
|
---|
import os
import pickle
import shutil
# the name of the S3 bucket to post findings to
FINDINGS_S3_BUCKET = 'deeposm'
# set in Dockerfile as env variable
GEO_DATA_DIR = os.environ.get("GEO_DATA_DIR")
# where training data gets cached/retrieved
RAW_LABEL_DATA_DIR = os.path.join(GEO_DATA_DIR, "openstreetmap")
NAIP_DATA_DIR = os.path.join(GEO_DATA_DIR, "naip")
CACHE_PATH = GEO_DATA_DIR + '/generated/'
LABELS_DATA_DIR = os.path.join(CACHE_PATH, "way_bitmaps")
LABEL_CACHE_DIRECTORY = os.path.join(CACHE_PATH, "training_labels")
IMAGE_CACHE_DIRECTORY = os.path.join(CACHE_PATH, "training_images")
METADATA_PATH = 'training_metadata.pickle'
def cache_paths(raster_data_paths):
"""Cache a list of naip image paths, to pass on to the train_neural_net script."""
with open(CACHE_PATH + 'raster_data_paths.pickle', 'w') as outfile:
pickle.dump(raster_data_paths, outfile)
def create_cache_directories():
"""Cache a list of naip image paths, to pass on to the train_neural_net script."""
try:
shutil.rmtree(CACHE_PATH)
except:
pass
try:
shutil.rmtree(RAW_LABEL_DATA_DIR)
except:
pass
try:
os.mkdir(CACHE_PATH)
except:
pass
try:
os.mkdir(LABELS_DATA_DIR)
except:
pass
try:
os.mkdir(LABEL_CACHE_DIRECTORY)
except:
pass
try:
os.mkdir(IMAGE_CACHE_DIRECTORY)
except:
pass
try:
os.mkdir(RAW_LABEL_DATA_DIR)
except:
pass
|
from collections import namedtuple
from pyhap.const import HAP_REPR_AID, HAP_REPR_CHARS, HAP_REPR_IID, HAP_REPR_VALUE
import pytest
from homeassistant.components.fan import (
ATTR_DIRECTION,
ATTR_OSCILLATING,
ATTR_SPEED,
ATTR_SPEED_LIST,
DIRECTION_FORWARD,
DIRECTION_REVERSE,
DOMAIN,
SPEED_HIGH,
SPEED_LOW,
SPEED_OFF,
SUPPORT_DIRECTION,
SUPPORT_OSCILLATE,
SUPPORT_SET_SPEED,
)
from homeassistant.components.homekit.const import ATTR_VALUE
from homeassistant.components.homekit.util import HomeKitSpeedMapping
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_SUPPORTED_FEATURES,
EVENT_HOMEASSISTANT_START,
STATE_OFF,
STATE_ON,
STATE_UNKNOWN,
)
from homeassistant.core import CoreState
from homeassistant.helpers import entity_registry
from tests.async_mock import Mock
from tests.common import async_mock_service
from tests.components.homekit.common import patch_debounce
@pytest.fixture(scope="module")
def cls():
"""Patch debounce decorator during import of type_fans."""
patcher = patch_debounce()
patcher.start()
_import = __import__("homeassistant.components.homekit.type_fans", fromlist=["Fan"])
patcher_tuple = namedtuple("Cls", ["fan"])
yield patcher_tuple(fan=_import.Fan)
patcher.stop()
async def test_fan_basic(hass, hk_driver, cls, events):
"""Test fan with char state."""
entity_id = "fan.demo"
hass.states.async_set(entity_id, STATE_ON, {ATTR_SUPPORTED_FEATURES: 0})
await hass.async_block_till_done()
acc = cls.fan(hass, hk_driver, "Fan", entity_id, 1, None)
hk_driver.add_accessory(acc)
assert acc.aid == 1
assert acc.category == 3 # Fan
assert acc.char_active.value == 1
# If there are no speed_list values, then HomeKit speed is unsupported
assert acc.char_speed is None
await acc.run_handler()
await hass.async_block_till_done()
assert acc.char_active.value == 1
hass.states.async_set(entity_id, STATE_OFF, {ATTR_SUPPORTED_FEATURES: 0})
await hass.async_block_till_done()
assert acc.char_active.value == 0
hass.states.async_set(entity_id, STATE_UNKNOWN)
await hass.async_block_till_done()
assert acc.char_active.value == 0
hass.states.async_remove(entity_id)
await hass.async_block_till_done()
assert acc.char_active.value == 0
# Set from HomeKit
call_turn_on = async_mock_service(hass, DOMAIN, "turn_on")
call_turn_off = async_mock_service(hass, DOMAIN, "turn_off")
char_active_iid = acc.char_active.to_HAP()[HAP_REPR_IID]
hk_driver.set_characteristics(
{
HAP_REPR_CHARS: [
{
HAP_REPR_AID: acc.aid,
HAP_REPR_IID: char_active_iid,
HAP_REPR_VALUE: 1,
},
]
},
"mock_addr",
)
await hass.async_block_till_done()
assert call_turn_on
assert call_turn_on[0].data[ATTR_ENTITY_ID] == entity_id
assert len(events) == 1
assert events[-1].data[ATTR_VALUE] is None
hass.states.async_set(entity_id, STATE_ON)
await hass.async_block_till_done()
hk_driver.set_characteristics(
{
HAP_REPR_CHARS: [
{
HAP_REPR_AID: acc.aid,
HAP_REPR_IID: char_active_iid,
HAP_REPR_VALUE: 0,
},
]
},
"mock_addr",
)
await hass.async_block_till_done()
assert call_turn_off
assert call_turn_off[0].data[ATTR_ENTITY_ID] == entity_id
assert len(events) == 2
assert events[-1].data[ATTR_VALUE] is None
async def test_fan_direction(hass, hk_driver, cls, events):
"""Test fan with direction."""
entity_id = "fan.demo"
hass.states.async_set(
entity_id,
STATE_ON,
{ATTR_SUPPORTED_FEATURES: SUPPORT_DIRECTION, ATTR_DIRECTION: DIRECTION_FORWARD},
)
await hass.async_block_till_done()
acc = cls.fan(hass, hk_driver, "Fan", entity_id, 1, None)
hk_driver.add_accessory(acc)
assert acc.char_direction.value == 0
await acc.run_handler()
await hass.async_block_till_done()
assert acc.char_direction.value == 0
hass.states.async_set(entity_id, STATE_ON, {ATTR_DIRECTION: DIRECTION_REVERSE})
await hass.async_block_till_done()
assert acc.char_direction.value == 1
# Set from HomeKit
call_set_direction = async_mock_service(hass, DOMAIN, "set_direction")
char_direction_iid = acc.char_direction.to_HAP()[HAP_REPR_IID]
hk_driver.set_characteristics(
{
HAP_REPR_CHARS: [
{
HAP_REPR_AID: acc.aid,
HAP_REPR_IID: char_direction_iid,
HAP_REPR_VALUE: 0,
},
]
},
"mock_addr",
)
await hass.async_block_till_done()
assert call_set_direction[0]
assert call_set_direction[0].data[ATTR_ENTITY_ID] == entity_id
assert call_set_direction[0].data[ATTR_DIRECTION] == DIRECTION_FORWARD
assert len(events) == 1
assert events[-1].data[ATTR_VALUE] == DIRECTION_FORWARD
hk_driver.set_characteristics(
{
HAP_REPR_CHARS: [
{
HAP_REPR_AID: acc.aid,
HAP_REPR_IID: char_direction_iid,
HAP_REPR_VALUE: 1,
},
]
},
"mock_addr",
)
await hass.async_add_executor_job(acc.char_direction.client_update_value, 1)
await hass.async_block_till_done()
assert call_set_direction[1]
assert call_set_direction[1].data[ATTR_ENTITY_ID] == entity_id
assert call_set_direction[1].data[ATTR_DIRECTION] == DIRECTION_REVERSE
assert len(events) == 2
assert events[-1].data[ATTR_VALUE] == DIRECTION_REVERSE
async def test_fan_oscillate(hass, hk_driver, cls, events):
"""Test fan with oscillate."""
entity_id = "fan.demo"
hass.states.async_set(
entity_id,
STATE_ON,
{ATTR_SUPPORTED_FEATURES: SUPPORT_OSCILLATE, ATTR_OSCILLATING: False},
)
await hass.async_block_till_done()
acc = cls.fan(hass, hk_driver, "Fan", entity_id, 1, None)
hk_driver.add_accessory(acc)
assert acc.char_swing.value == 0
await acc.run_handler()
await hass.async_block_till_done()
assert acc.char_swing.value == 0
hass.states.async_set(entity_id, STATE_ON, {ATTR_OSCILLATING: True})
await hass.async_block_till_done()
assert acc.char_swing.value == 1
# Set from HomeKit
call_oscillate = async_mock_service(hass, DOMAIN, "oscillate")
char_swing_iid = acc.char_swing.to_HAP()[HAP_REPR_IID]
hk_driver.set_characteristics(
{
HAP_REPR_CHARS: [
{
HAP_REPR_AID: acc.aid,
HAP_REPR_IID: char_swing_iid,
HAP_REPR_VALUE: 0,
},
]
},
"mock_addr",
)
await hass.async_add_executor_job(acc.char_swing.client_update_value, 0)
await hass.async_block_till_done()
assert call_oscillate[0]
assert call_oscillate[0].data[ATTR_ENTITY_ID] == entity_id
assert call_oscillate[0].data[ATTR_OSCILLATING] is False
assert len(events) == 1
assert events[-1].data[ATTR_VALUE] is False
hk_driver.set_characteristics(
{
HAP_REPR_CHARS: [
{
HAP_REPR_AID: acc.aid,
HAP_REPR_IID: char_swing_iid,
HAP_REPR_VALUE: 1,
},
]
},
"mock_addr",
)
await hass.async_add_executor_job(acc.char_swing.client_update_value, 1)
await hass.async_block_till_done()
assert call_oscillate[1]
assert call_oscillate[1].data[ATTR_ENTITY_ID] == entity_id
assert call_oscillate[1].data[ATTR_OSCILLATING] is True
assert len(events) == 2
assert events[-1].data[ATTR_VALUE] is True
async def test_fan_speed(hass, hk_driver, cls, events):
"""Test fan with speed."""
entity_id = "fan.demo"
speed_list = [SPEED_OFF, SPEED_LOW, SPEED_HIGH]
hass.states.async_set(
entity_id,
STATE_ON,
{
ATTR_SUPPORTED_FEATURES: SUPPORT_SET_SPEED,
ATTR_SPEED: SPEED_OFF,
ATTR_SPEED_LIST: speed_list,
},
)
await hass.async_block_till_done()
acc = cls.fan(hass, hk_driver, "Fan", entity_id, 1, None)
hk_driver.add_accessory(acc)
# Initial value can be anything but 0. If it is 0, it might cause HomeKit to set the
# speed to 100 when turning on a fan on a freshly booted up server.
assert acc.char_speed.value != 0
await acc.run_handler()
await hass.async_block_till_done()
assert (
acc.speed_mapping.speed_ranges == HomeKitSpeedMapping(speed_list).speed_ranges
)
acc.speed_mapping.speed_to_homekit = Mock(return_value=42)
acc.speed_mapping.speed_to_states = Mock(return_value="ludicrous")
hass.states.async_set(entity_id, STATE_ON, {ATTR_SPEED: SPEED_HIGH})
await hass.async_block_till_done()
acc.speed_mapping.speed_to_homekit.assert_called_with(SPEED_HIGH)
assert acc.char_speed.value == 42
# Set from HomeKit
call_set_speed = async_mock_service(hass, DOMAIN, "set_speed")
char_speed_iid = acc.char_speed.to_HAP()[HAP_REPR_IID]
char_active_iid = acc.char_active.to_HAP()[HAP_REPR_IID]
hk_driver.set_characteristics(
{
HAP_REPR_CHARS: [
{
HAP_REPR_AID: acc.aid,
HAP_REPR_IID: char_speed_iid,
HAP_REPR_VALUE: 42,
},
]
},
"mock_addr",
)
await hass.async_add_executor_job(acc.char_speed.client_update_value, 42)
await hass.async_block_till_done()
acc.speed_mapping.speed_to_states.assert_called_with(42)
assert acc.char_speed.value == 42
assert acc.char_active.value == 1
assert call_set_speed[0]
assert call_set_speed[0].data[ATTR_ENTITY_ID] == entity_id
assert call_set_speed[0].data[ATTR_SPEED] == "ludicrous"
assert len(events) == 1
assert events[-1].data[ATTR_VALUE] == "ludicrous"
# Verify speed is preserved from off to on
hass.states.async_set(entity_id, STATE_OFF, {ATTR_SPEED: SPEED_OFF})
await hass.async_block_till_done()
assert acc.char_speed.value == 42
assert acc.char_active.value == 0
hk_driver.set_characteristics(
{
HAP_REPR_CHARS: [
{
HAP_REPR_AID: acc.aid,
HAP_REPR_IID: char_active_iid,
HAP_REPR_VALUE: 1,
},
]
},
"mock_addr",
)
await hass.async_block_till_done()
assert acc.char_speed.value == 42
assert acc.char_active.value == 1
async def test_fan_set_all_one_shot(hass, hk_driver, cls, events):
"""Test fan with speed."""
entity_id = "fan.demo"
speed_list = [SPEED_OFF, SPEED_LOW, SPEED_HIGH]
hass.states.async_set(
entity_id,
STATE_ON,
{
ATTR_SUPPORTED_FEATURES: SUPPORT_SET_SPEED
| SUPPORT_OSCILLATE
| SUPPORT_DIRECTION,
ATTR_SPEED: SPEED_OFF,
ATTR_OSCILLATING: False,
ATTR_DIRECTION: DIRECTION_FORWARD,
ATTR_SPEED_LIST: speed_list,
},
)
await hass.async_block_till_done()
acc = cls.fan(hass, hk_driver, "Fan", entity_id, 1, None)
hk_driver.add_accessory(acc)
# Initial value can be anything but 0. If it is 0, it might cause HomeKit to set the
# speed to 100 when turning on a fan on a freshly booted up server.
assert acc.char_speed.value != 0
await acc.run_handler()
await hass.async_block_till_done()
assert (
acc.speed_mapping.speed_ranges == HomeKitSpeedMapping(speed_list).speed_ranges
)
acc.speed_mapping.speed_to_homekit = Mock(return_value=42)
acc.speed_mapping.speed_to_states = Mock(return_value="ludicrous")
hass.states.async_set(
entity_id,
STATE_OFF,
{
ATTR_SUPPORTED_FEATURES: SUPPORT_SET_SPEED
| SUPPORT_OSCILLATE
| SUPPORT_DIRECTION,
ATTR_SPEED: SPEED_OFF,
ATTR_OSCILLATING: False,
ATTR_DIRECTION: DIRECTION_FORWARD,
ATTR_SPEED_LIST: speed_list,
},
)
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == STATE_OFF
# Set from HomeKit
call_set_speed = async_mock_service(hass, DOMAIN, "set_speed")
call_oscillate = async_mock_service(hass, DOMAIN, "oscillate")
call_set_direction = async_mock_service(hass, DOMAIN, "set_direction")
call_turn_on = async_mock_service(hass, DOMAIN, "turn_on")
call_turn_off = async_mock_service(hass, DOMAIN, "turn_off")
char_active_iid = acc.char_active.to_HAP()[HAP_REPR_IID]
char_direction_iid = acc.char_direction.to_HAP()[HAP_REPR_IID]
char_swing_iid = acc.char_swing.to_HAP()[HAP_REPR_IID]
char_speed_iid = acc.char_speed.to_HAP()[HAP_REPR_IID]
hk_driver.set_characteristics(
{
HAP_REPR_CHARS: [
{
HAP_REPR_AID: acc.aid,
HAP_REPR_IID: char_active_iid,
HAP_REPR_VALUE: 1,
},
{
HAP_REPR_AID: acc.aid,
HAP_REPR_IID: char_speed_iid,
HAP_REPR_VALUE: 42,
},
{
HAP_REPR_AID: acc.aid,
HAP_REPR_IID: char_swing_iid,
HAP_REPR_VALUE: 1,
},
{
HAP_REPR_AID: acc.aid,
HAP_REPR_IID: char_direction_iid,
HAP_REPR_VALUE: 1,
},
]
},
"mock_addr",
)
await hass.async_block_till_done()
acc.speed_mapping.speed_to_states.assert_called_with(42)
assert not call_turn_on
assert call_set_speed[0]
assert call_set_speed[0].data[ATTR_ENTITY_ID] == entity_id
assert call_set_speed[0].data[ATTR_SPEED] == "ludicrous"
assert call_oscillate[0]
assert call_oscillate[0].data[ATTR_ENTITY_ID] == entity_id
assert call_oscillate[0].data[ATTR_OSCILLATING] is True
assert call_set_direction[0]
assert call_set_direction[0].data[ATTR_ENTITY_ID] == entity_id
assert call_set_direction[0].data[ATTR_DIRECTION] == DIRECTION_REVERSE
assert len(events) == 3
assert events[0].data[ATTR_VALUE] is True
assert events[1].data[ATTR_VALUE] == DIRECTION_REVERSE
assert events[2].data[ATTR_VALUE] == "ludicrous"
hass.states.async_set(
entity_id,
STATE_ON,
{
ATTR_SUPPORTED_FEATURES: SUPPORT_SET_SPEED
| SUPPORT_OSCILLATE
| SUPPORT_DIRECTION,
ATTR_SPEED: SPEED_OFF,
ATTR_OSCILLATING: False,
ATTR_DIRECTION: DIRECTION_FORWARD,
ATTR_SPEED_LIST: speed_list,
},
)
await hass.async_block_till_done()
hk_driver.set_characteristics(
{
HAP_REPR_CHARS: [
{
HAP_REPR_AID: acc.aid,
HAP_REPR_IID: char_active_iid,
HAP_REPR_VALUE: 1,
},
{
HAP_REPR_AID: acc.aid,
HAP_REPR_IID: char_speed_iid,
HAP_REPR_VALUE: 42,
},
{
HAP_REPR_AID: acc.aid,
HAP_REPR_IID: char_swing_iid,
HAP_REPR_VALUE: 1,
},
{
HAP_REPR_AID: acc.aid,
HAP_REPR_IID: char_direction_iid,
HAP_REPR_VALUE: 1,
},
]
},
"mock_addr",
)
# Turn on should not be called if its already on
# and we set a fan speed
await hass.async_block_till_done()
acc.speed_mapping.speed_to_states.assert_called_with(42)
assert len(events) == 6
assert call_set_speed[1]
assert call_set_speed[1].data[ATTR_ENTITY_ID] == entity_id
assert call_set_speed[1].data[ATTR_SPEED] == "ludicrous"
assert call_oscillate[1]
assert call_oscillate[1].data[ATTR_ENTITY_ID] == entity_id
assert call_oscillate[1].data[ATTR_OSCILLATING] is True
assert call_set_direction[1]
assert call_set_direction[1].data[ATTR_ENTITY_ID] == entity_id
assert call_set_direction[1].data[ATTR_DIRECTION] == DIRECTION_REVERSE
assert events[-3].data[ATTR_VALUE] is True
assert events[-2].data[ATTR_VALUE] == DIRECTION_REVERSE
assert events[-1].data[ATTR_VALUE] == "ludicrous"
hk_driver.set_characteristics(
{
HAP_REPR_CHARS: [
{
HAP_REPR_AID: acc.aid,
HAP_REPR_IID: char_active_iid,
HAP_REPR_VALUE: 0,
},
{
HAP_REPR_AID: acc.aid,
HAP_REPR_IID: char_speed_iid,
HAP_REPR_VALUE: 42,
},
{
HAP_REPR_AID: acc.aid,
HAP_REPR_IID: char_swing_iid,
HAP_REPR_VALUE: 1,
},
{
HAP_REPR_AID: acc.aid,
HAP_REPR_IID: char_direction_iid,
HAP_REPR_VALUE: 1,
},
]
},
"mock_addr",
)
await hass.async_block_till_done()
assert len(events) == 7
assert call_turn_off
assert call_turn_off[0].data[ATTR_ENTITY_ID] == entity_id
assert len(call_set_speed) == 2
assert len(call_oscillate) == 2
assert len(call_set_direction) == 2
async def test_fan_restore(hass, hk_driver, cls, events):
"""Test setting up an entity from state in the event registry."""
hass.state = CoreState.not_running
registry = await entity_registry.async_get_registry(hass)
registry.async_get_or_create(
"fan",
"generic",
"1234",
suggested_object_id="simple",
)
registry.async_get_or_create(
"fan",
"generic",
"9012",
suggested_object_id="all_info_set",
capabilities={"speed_list": ["off", "low", "medium", "high"]},
supported_features=SUPPORT_SET_SPEED | SUPPORT_OSCILLATE | SUPPORT_DIRECTION,
device_class="mock-device-class",
)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START, {})
await hass.async_block_till_done()
acc = cls.fan(hass, hk_driver, "Fan", "fan.simple", 2, None)
assert acc.category == 3
assert acc.char_active is not None
assert acc.char_direction is None
assert acc.char_speed is None
assert acc.char_swing is None
acc = cls.fan(hass, hk_driver, "Fan", "fan.all_info_set", 2, None)
assert acc.category == 3
assert acc.char_active is not None
assert acc.char_direction is not None
assert acc.char_speed is not None
assert acc.char_swing is not None
|
from __future__ import print_function
import os
import re
import sys
import tensorflow as tf
def write_config():
"""Retrive compile and link information from tensorflow and write to .bazelrc."""
cflags = tf.sysconfig.get_compile_flags()
inc_regex = re.compile("^-I")
opt_regex = re.compile("^-D")
include_list = []
opt_list = []
for arg in cflags:
if inc_regex.match(arg):
include_list.append(arg)
elif opt_regex.match(arg):
opt_list.append(arg)
else:
print("WARNING: Unexpected cflag item {}".format(arg))
if len(include_list) != 1:
print("ERROR: Expected a single include directory in " +
"tf.sysconfig.get_compile_flags()")
exit(1)
library_regex = re.compile("^-l")
libdir_regex = re.compile("^-L")
library_list = []
libdir_list = []
lib = tf.sysconfig.get_link_flags()
for arg in lib:
if library_regex.match(arg):
library_list.append(arg)
elif libdir_regex.match(arg):
libdir_list.append(arg)
else:
print("WARNING: Unexpected link flag item {}".format(arg))
if len(library_list) != 1 or len(libdir_list) != 1:
print("ERROR: Expected exactly one lib and one libdir in" +
"tf.sysconfig.get_link_flags()")
exit(1)
try:
with open(".bazelrc", "w") as bazel_rc:
for opt in opt_list:
bazel_rc.write('build --copt="{}"\n'.format(opt))
bazel_rc.write('build --action_env TF_HEADER_DIR="{}"\n'
.format(include_list[0][2:]))
bazel_rc.write('build --action_env TF_SHARED_LIBRARY_DIR="{}"\n'
.format(libdir_list[0][2:]))
library_name = library_list[0][2:]
if library_name.startswith(":"):
library_name = library_name[1:]
else:
library_name = "lib" + library_name + ".so"
bazel_rc.write('build --action_env TF_SHARED_LIBRARY_NAME="{}"\n'
.format(library_name))
bazel_rc.close()
except OSError:
print("ERROR: Writing .bazelrc")
exit(1)
def compile_bazel():
write_config()
if os.system('rm -f tensorflow_gcs_config/*.so && bazel build -c dbg //tensorflow_gcs_config:_gcs_config_ops.so && cp bazel-bin/tensorflow_gcs_config/_gcs_config_ops.so tensorflow_gcs_config/') != 0:
raise Exception('Failed to build C extension.')
|
from google_nest_sdm.device import Device
from google_nest_sdm.event import EventMessage
from .common import async_setup_sdm_platform
PLATFORM = "sensor"
THERMOSTAT_TYPE = "sdm.devices.types.THERMOSTAT"
async def async_setup_sensor(hass, devices={}, structures={}):
"""Set up the platform and prerequisites."""
return await async_setup_sdm_platform(hass, PLATFORM, devices, structures)
async def test_thermostat_device(hass):
"""Test a thermostat with temperature and humidity sensors."""
devices = {
"some-device-id": Device.MakeDevice(
{
"name": "some-device-id",
"type": THERMOSTAT_TYPE,
"traits": {
"sdm.devices.traits.Info": {
"customName": "My Sensor",
},
"sdm.devices.traits.Temperature": {
"ambientTemperatureCelsius": 25.1,
},
"sdm.devices.traits.Humidity": {
"ambientHumidityPercent": 35.0,
},
},
},
auth=None,
)
}
await async_setup_sensor(hass, devices)
temperature = hass.states.get("sensor.my_sensor_temperature")
assert temperature is not None
assert temperature.state == "25.1"
humidity = hass.states.get("sensor.my_sensor_humidity")
assert humidity is not None
assert humidity.state == "35.0"
registry = await hass.helpers.entity_registry.async_get_registry()
entry = registry.async_get("sensor.my_sensor_temperature")
assert entry.unique_id == "some-device-id-temperature"
assert entry.original_name == "My Sensor Temperature"
assert entry.domain == "sensor"
device_registry = await hass.helpers.device_registry.async_get_registry()
device = device_registry.async_get(entry.device_id)
assert device.name == "My Sensor"
assert device.model == "Thermostat"
assert device.identifiers == {("nest", "some-device-id")}
async def test_no_devices(hass):
"""Test no devices returned by the api."""
await async_setup_sensor(hass)
temperature = hass.states.get("sensor.my_sensor_temperature")
assert temperature is None
humidity = hass.states.get("sensor.my_sensor_humidity")
assert humidity is None
async def test_device_no_sensor_traits(hass):
"""Test a device with applicable sensor traits."""
devices = {
"some-device-id": Device.MakeDevice(
{
"name": "some-device-id",
"type": THERMOSTAT_TYPE,
"traits": {},
},
auth=None,
)
}
await async_setup_sensor(hass, devices)
temperature = hass.states.get("sensor.my_sensor_temperature")
assert temperature is None
humidity = hass.states.get("sensor.my_sensor_humidity")
assert humidity is None
async def test_device_name_from_structure(hass):
"""Test a device without a custom name, inferring name from structure."""
devices = {
"some-device-id": Device.MakeDevice(
{
"name": "some-device-id",
"type": THERMOSTAT_TYPE,
"traits": {
"sdm.devices.traits.Temperature": {
"ambientTemperatureCelsius": 25.2,
},
},
"parentRelations": [
{"parent": "some-structure-id", "displayName": "Some Room"}
],
},
auth=None,
)
}
await async_setup_sensor(hass, devices)
temperature = hass.states.get("sensor.some_room_temperature")
assert temperature is not None
assert temperature.state == "25.2"
async def test_event_updates_sensor(hass):
"""Test a pubsub message received by subscriber to update temperature."""
devices = {
"some-device-id": Device.MakeDevice(
{
"name": "some-device-id",
"type": THERMOSTAT_TYPE,
"traits": {
"sdm.devices.traits.Info": {
"customName": "My Sensor",
},
"sdm.devices.traits.Temperature": {
"ambientTemperatureCelsius": 25.1,
},
},
},
auth=None,
)
}
subscriber = await async_setup_sensor(hass, devices)
temperature = hass.states.get("sensor.my_sensor_temperature")
assert temperature is not None
assert temperature.state == "25.1"
# Simulate a pubsub message received by the subscriber with a trait update
event = EventMessage(
{
"eventId": "some-event-id",
"timestamp": "2019-01-01T00:00:01Z",
"resourceUpdate": {
"name": "some-device-id",
"traits": {
"sdm.devices.traits.Temperature": {
"ambientTemperatureCelsius": 26.2,
},
},
},
},
auth=None,
)
subscriber.receive_event(event)
await hass.async_block_till_done() # Process dispatch/update signal
temperature = hass.states.get("sensor.my_sensor_temperature")
assert temperature is not None
assert temperature.state == "26.2"
async def test_device_with_unknown_type(hass):
"""Test a device without a custom name, inferring name from structure."""
devices = {
"some-device-id": Device.MakeDevice(
{
"name": "some-device-id",
"type": "some-unknown-type",
"traits": {
"sdm.devices.traits.Info": {
"customName": "My Sensor",
},
"sdm.devices.traits.Temperature": {
"ambientTemperatureCelsius": 25.1,
},
},
},
auth=None,
)
}
await async_setup_sensor(hass, devices)
temperature = hass.states.get("sensor.my_sensor_temperature")
assert temperature is not None
assert temperature.state == "25.1"
registry = await hass.helpers.entity_registry.async_get_registry()
entry = registry.async_get("sensor.my_sensor_temperature")
assert entry.unique_id == "some-device-id-temperature"
assert entry.original_name == "My Sensor Temperature"
assert entry.domain == "sensor"
device_registry = await hass.helpers.device_registry.async_get_registry()
device = device_registry.async_get(entry.device_id)
assert device.name == "My Sensor"
assert device.model is None
assert device.identifiers == {("nest", "some-device-id")}
|
import logging
from deluge_client import DelugeRPCClient, FailedToReconnectException
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_HOST,
CONF_MONITORED_VARIABLES,
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
CONF_USERNAME,
DATA_RATE_KILOBYTES_PER_SECOND,
STATE_IDLE,
)
from homeassistant.exceptions import PlatformNotReady
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
_THROTTLED_REFRESH = None
DEFAULT_NAME = "Deluge"
DEFAULT_PORT = 58846
DHT_UPLOAD = 1000
DHT_DOWNLOAD = 1000
SENSOR_TYPES = {
"current_status": ["Status", None],
"download_speed": ["Down Speed", DATA_RATE_KILOBYTES_PER_SECOND],
"upload_speed": ["Up Speed", DATA_RATE_KILOBYTES_PER_SECOND],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Required(CONF_USERNAME): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_MONITORED_VARIABLES, default=[]): vol.All(
cv.ensure_list, [vol.In(SENSOR_TYPES)]
),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Deluge sensors."""
name = config[CONF_NAME]
host = config[CONF_HOST]
username = config[CONF_USERNAME]
password = config[CONF_PASSWORD]
port = config[CONF_PORT]
deluge_api = DelugeRPCClient(host, port, username, password)
try:
deluge_api.connect()
except ConnectionRefusedError as err:
_LOGGER.error("Connection to Deluge Daemon failed")
raise PlatformNotReady from err
dev = []
for variable in config[CONF_MONITORED_VARIABLES]:
dev.append(DelugeSensor(variable, deluge_api, name))
add_entities(dev)
class DelugeSensor(Entity):
"""Representation of a Deluge sensor."""
def __init__(self, sensor_type, deluge_client, client_name):
"""Initialize the sensor."""
self._name = SENSOR_TYPES[sensor_type][0]
self.client = deluge_client
self.type = sensor_type
self.client_name = client_name
self._state = None
self._unit_of_measurement = SENSOR_TYPES[sensor_type][1]
self.data = None
self._available = False
@property
def name(self):
"""Return the name of the sensor."""
return f"{self.client_name} {self._name}"
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def available(self):
"""Return true if device is available."""
return self._available
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
def update(self):
"""Get the latest data from Deluge and updates the state."""
try:
self.data = self.client.call(
"core.get_session_status",
[
"upload_rate",
"download_rate",
"dht_upload_rate",
"dht_download_rate",
],
)
self._available = True
except FailedToReconnectException:
_LOGGER.error("Connection to Deluge Daemon Lost")
self._available = False
return
upload = self.data[b"upload_rate"] - self.data[b"dht_upload_rate"]
download = self.data[b"download_rate"] - self.data[b"dht_download_rate"]
if self.type == "current_status":
if self.data:
if upload > 0 and download > 0:
self._state = "Up/Down"
elif upload > 0 and download == 0:
self._state = "Seeding"
elif upload == 0 and download > 0:
self._state = "Downloading"
else:
self._state = STATE_IDLE
else:
self._state = None
if self.data:
if self.type == "download_speed":
kb_spd = float(download)
kb_spd = kb_spd / 1024
self._state = round(kb_spd, 2 if kb_spd < 0.1 else 1)
elif self.type == "upload_speed":
kb_spd = float(upload)
kb_spd = kb_spd / 1024
self._state = round(kb_spd, 2 if kb_spd < 0.1 else 1)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
import functools
from absl._collections_abc import abc
from absl.flags import _argument_parser
from absl.flags import _exceptions
from absl.flags import _helpers
import six
@functools.total_ordering
class Flag(object):
"""Information about a command-line flag.
'Flag' objects define the following fields:
.name - the name for this flag;
.default - the default value for this flag;
.default_unparsed - the unparsed default value for this flag.
.default_as_str - default value as repr'd string, e.g., "'true'" (or None);
.value - the most recent parsed value of this flag; set by parse();
.help - a help string or None if no help is available;
.short_name - the single letter alias for this flag (or None);
.boolean - if 'true', this flag does not accept arguments;
.present - true if this flag was parsed from command line flags;
.parser - an ArgumentParser object;
.serializer - an ArgumentSerializer object;
.allow_override - the flag may be redefined without raising an error, and
newly defined flag overrides the old one.
.allow_override_cpp - use the flag from C++ if available; the flag
definition is replaced by the C++ flag after init;
.allow_hide_cpp - use the Python flag despite having a C++ flag with
the same name (ignore the C++ flag);
.using_default_value - the flag value has not been set by user;
.allow_overwrite - the flag may be parsed more than once without raising
an error, the last set value will be used;
.allow_using_method_names - whether this flag can be defined even if it has
a name that conflicts with a FlagValues method.
The only public method of a 'Flag' object is parse(), but it is
typically only called by a 'FlagValues' object. The parse() method is
a thin wrapper around the 'ArgumentParser' parse() method. The parsed
value is saved in .value, and the .present attribute is updated. If
this flag was already present, an Error is raised.
parse() is also called during __init__ to parse the default value and
initialize the .value attribute. This enables other python modules to
safely use flags even if the __main__ module neglects to parse the
command line arguments. The .present attribute is cleared after
__init__ parsing. If the default value is set to None, then the
__init__ parsing step is skipped and the .value attribute is
initialized to None.
Note: The default value is also presented to the user in the help
string, so it is important that it be a legal value for this flag.
"""
def __init__(self, parser, serializer, name, default, help_string,
short_name=None, boolean=False, allow_override=False,
allow_override_cpp=False, allow_hide_cpp=False,
allow_overwrite=True, allow_using_method_names=False):
self.name = name
if not help_string:
help_string = '(no help available)'
self.help = help_string
self.short_name = short_name
self.boolean = boolean
self.present = 0
self.parser = parser
self.serializer = serializer
self.allow_override = allow_override
self.allow_override_cpp = allow_override_cpp
self.allow_hide_cpp = allow_hide_cpp
self.allow_overwrite = allow_overwrite
self.allow_using_method_names = allow_using_method_names
self.using_default_value = True
self._value = None
self.validators = []
if self.allow_hide_cpp and self.allow_override_cpp:
raise _exceptions.Error(
"Can't have both allow_hide_cpp (means use Python flag) and "
'allow_override_cpp (means use C++ flag after InitGoogle)')
self._set_default(default)
@property
def value(self):
return self._value
@value.setter
def value(self, value):
self._value = value
def __hash__(self):
return hash(id(self))
def __eq__(self, other):
return self is other
def __lt__(self, other):
if isinstance(other, Flag):
return id(self) < id(other)
return NotImplemented
def __getstate__(self):
raise TypeError("can't pickle Flag objects")
def __copy__(self):
raise TypeError('%s does not support shallow copies. '
'Use copy.deepcopy instead.' % type(self).__name__)
def __deepcopy__(self, memo):
result = object.__new__(type(self))
result.__dict__ = copy.deepcopy(self.__dict__, memo)
return result
def _get_parsed_value_as_string(self, value):
"""Returns parsed flag value as string."""
if value is None:
return None
if self.serializer:
return repr(self.serializer.serialize(value))
if self.boolean:
if value:
return repr('true')
else:
return repr('false')
return repr(_helpers.str_or_unicode(value))
def parse(self, argument):
"""Parses string and sets flag value.
Args:
argument: str or the correct flag value type, argument to be parsed.
"""
if self.present and not self.allow_overwrite:
raise _exceptions.IllegalFlagValueError(
'flag --%s=%s: already defined as %s' % (
self.name, argument, self.value))
self.value = self._parse(argument)
self.present += 1
def _parse(self, argument):
"""Internal parse function.
It returns the parsed value, and does not modify class states.
Args:
argument: str or the correct flag value type, argument to be parsed.
Returns:
The parsed value.
"""
try:
return self.parser.parse(argument)
except (TypeError, ValueError) as e: # Recast as IllegalFlagValueError.
raise _exceptions.IllegalFlagValueError(
'flag --%s=%s: %s' % (self.name, argument, e))
def unparse(self):
self.value = self.default
self.using_default_value = True
self.present = 0
def serialize(self):
"""Serializes the flag."""
return self._serialize(self.value)
def _serialize(self, value):
"""Internal serialize function."""
if value is None:
return ''
if self.boolean:
if value:
return '--%s' % self.name
else:
return '--no%s' % self.name
else:
if not self.serializer:
raise _exceptions.Error(
'Serializer not present for flag %s' % self.name)
return '--%s=%s' % (self.name, self.serializer.serialize(value))
def _set_default(self, value):
"""Changes the default value (and current value too) for this Flag."""
self.default_unparsed = value
if value is None:
self.default = None
else:
self.default = self._parse_from_default(value)
self.default_as_str = self._get_parsed_value_as_string(self.default)
if self.using_default_value:
self.value = self.default
# This is split out so that aliases can skip regular parsing of the default
# value.
def _parse_from_default(self, value):
return self._parse(value)
def flag_type(self):
"""Returns a str that describes the type of the flag.
NOTE: we use strings, and not the types.*Type constants because
our flags can have more exotic types, e.g., 'comma separated list
of strings', 'whitespace separated list of strings', etc.
"""
return self.parser.flag_type()
def _create_xml_dom_element(self, doc, module_name, is_key=False):
"""Returns an XML element that contains this flag's information.
This is information that is relevant to all flags (e.g., name,
meaning, etc.). If you defined a flag that has some other pieces of
info, then please override _ExtraXMLInfo.
Please do NOT override this method.
Args:
doc: minidom.Document, the DOM document it should create nodes from.
module_name: str,, the name of the module that defines this flag.
is_key: boolean, True iff this flag is key for main module.
Returns:
A minidom.Element instance.
"""
element = doc.createElement('flag')
if is_key:
element.appendChild(_helpers.create_xml_dom_element(doc, 'key', 'yes'))
element.appendChild(_helpers.create_xml_dom_element(
doc, 'file', module_name))
# Adds flag features that are relevant for all flags.
element.appendChild(_helpers.create_xml_dom_element(doc, 'name', self.name))
if self.short_name:
element.appendChild(_helpers.create_xml_dom_element(
doc, 'short_name', self.short_name))
if self.help:
element.appendChild(_helpers.create_xml_dom_element(
doc, 'meaning', self.help))
# The default flag value can either be represented as a string like on the
# command line, or as a Python object. We serialize this value in the
# latter case in order to remain consistent.
if self.serializer and not isinstance(self.default, str):
if self.default is not None:
default_serialized = self.serializer.serialize(self.default)
else:
default_serialized = ''
else:
default_serialized = self.default
element.appendChild(_helpers.create_xml_dom_element(
doc, 'default', default_serialized))
value_serialized = self._serialize_value_for_xml(self.value)
element.appendChild(_helpers.create_xml_dom_element(
doc, 'current', value_serialized))
element.appendChild(_helpers.create_xml_dom_element(
doc, 'type', self.flag_type()))
# Adds extra flag features this flag may have.
for e in self._extra_xml_dom_elements(doc):
element.appendChild(e)
return element
def _serialize_value_for_xml(self, value):
"""Returns the serialized value, for use in an XML help text."""
return value
def _extra_xml_dom_elements(self, doc):
"""Returns extra info about this flag in XML.
"Extra" means "not already included by _create_xml_dom_element above."
Args:
doc: minidom.Document, the DOM document it should create nodes from.
Returns:
A list of minidom.Element.
"""
# Usually, the parser knows the extra details about the flag, so
# we just forward the call to it.
return self.parser._custom_xml_dom_elements(doc) # pylint: disable=protected-access
class BooleanFlag(Flag):
"""Basic boolean flag.
Boolean flags do not take any arguments, and their value is either
True (1) or False (0). The false value is specified on the command
line by prepending the word 'no' to either the long or the short flag
name.
For example, if a Boolean flag was created whose long name was
'update' and whose short name was 'x', then this flag could be
explicitly unset through either --noupdate or --nox.
"""
def __init__(self, name, default, help, short_name=None, **args): # pylint: disable=redefined-builtin
p = _argument_parser.BooleanParser()
super(BooleanFlag, self).__init__(
p, None, name, default, help, short_name, 1, **args)
class EnumFlag(Flag):
"""Basic enum flag; its value can be any string from list of enum_values."""
def __init__(self, name, default, help, enum_values, # pylint: disable=redefined-builtin
short_name=None, case_sensitive=True, **args):
p = _argument_parser.EnumParser(enum_values, case_sensitive)
g = _argument_parser.ArgumentSerializer()
super(EnumFlag, self).__init__(
p, g, name, default, help, short_name, **args)
self.help = '<%s>: %s' % ('|'.join(enum_values), self.help)
def _extra_xml_dom_elements(self, doc):
elements = []
for enum_value in self.parser.enum_values:
elements.append(_helpers.create_xml_dom_element(
doc, 'enum_value', enum_value))
return elements
class EnumClassFlag(Flag):
"""Basic enum flag; its value is an enum class's member."""
def __init__(
self,
name,
default,
help, # pylint: disable=redefined-builtin
enum_class,
short_name=None,
case_sensitive=False,
**args):
p = _argument_parser.EnumClassParser(
enum_class, case_sensitive=case_sensitive)
g = _argument_parser.EnumClassSerializer(lowercase=not case_sensitive)
super(EnumClassFlag, self).__init__(
p, g, name, default, help, short_name, **args)
self.help = '<%s>: %s' % ('|'.join(p.member_names), self.help)
def _extra_xml_dom_elements(self, doc):
elements = []
for enum_value in self.parser.enum_class.__members__.keys():
elements.append(_helpers.create_xml_dom_element(
doc, 'enum_value', enum_value))
return elements
class MultiFlag(Flag):
"""A flag that can appear multiple time on the command-line.
The value of such a flag is a list that contains the individual values
from all the appearances of that flag on the command-line.
See the __doc__ for Flag for most behavior of this class. Only
differences in behavior are described here:
* The default value may be either a single value or an iterable of values.
A single value is transformed into a single-item list of that value.
* The value of the flag is always a list, even if the option was
only supplied once, and even if the default value is a single
value
"""
def __init__(self, *args, **kwargs):
super(MultiFlag, self).__init__(*args, **kwargs)
self.help += ';\n repeat this option to specify a list of values'
def parse(self, arguments):
"""Parses one or more arguments with the installed parser.
Args:
arguments: a single argument or a list of arguments (typically a
list of default values); a single argument is converted
internally into a list containing one item.
"""
new_values = self._parse(arguments)
if self.present:
self.value.extend(new_values)
else:
self.value = new_values
self.present += len(new_values)
def _parse(self, arguments):
if (isinstance(arguments, abc.Iterable) and
not isinstance(arguments, six.string_types)):
arguments = list(arguments)
if not isinstance(arguments, list):
# Default value may be a list of values. Most other arguments
# will not be, so convert them into a single-item list to make
# processing simpler below.
arguments = [arguments]
return [super(MultiFlag, self)._parse(item) for item in arguments]
def _serialize(self, value):
"""See base class."""
if not self.serializer:
raise _exceptions.Error(
'Serializer not present for flag %s' % self.name)
if value is None:
return ''
serialized_items = [
super(MultiFlag, self)._serialize(value_item) for value_item in value
]
return '\n'.join(serialized_items)
def flag_type(self):
"""See base class."""
return 'multi ' + self.parser.flag_type()
def _extra_xml_dom_elements(self, doc):
elements = []
if hasattr(self.parser, 'enum_values'):
for enum_value in self.parser.enum_values:
elements.append(_helpers.create_xml_dom_element(
doc, 'enum_value', enum_value))
return elements
class MultiEnumClassFlag(MultiFlag):
"""A multi_enum_class flag.
See the __doc__ for MultiFlag for most behaviors of this class. In addition,
this class knows how to handle enum.Enum instances as values for this flag
type.
"""
def __init__(self,
name,
default,
help_string,
enum_class,
case_sensitive=False,
**args):
p = _argument_parser.EnumClassParser(
enum_class, case_sensitive=case_sensitive)
g = _argument_parser.EnumClassListSerializer(
list_sep=',', lowercase=not case_sensitive)
super(MultiEnumClassFlag, self).__init__(
p, g, name, default, help_string, **args)
self.help = (
'<%s>: %s;\n repeat this option to specify a list of values' %
('|'.join(p.member_names), help_string or '(no help available)'))
def _extra_xml_dom_elements(self, doc):
elements = []
for enum_value in self.parser.enum_class.__members__.keys():
elements.append(_helpers.create_xml_dom_element(
doc, 'enum_value', enum_value))
return elements
def _serialize_value_for_xml(self, value):
"""See base class."""
if value is not None:
value_serialized = self.serializer.serialize(value)
else:
value_serialized = ''
return value_serialized
|
from warnings import warn
from .utils import Serialize
from .lexer import TerminalDef
###{standalone
class LexerConf(Serialize):
__serialize_fields__ = 'terminals', 'ignore', 'g_regex_flags', 'use_bytes', 'lexer_type'
__serialize_namespace__ = TerminalDef,
def __init__(self, terminals, re_module, ignore=(), postlex=None, callbacks=None, g_regex_flags=0, skip_validation=False, use_bytes=False):
self.terminals = terminals
self.terminals_by_name = {t.name: t for t in self.terminals}
assert len(self.terminals) == len(self.terminals_by_name)
self.ignore = ignore
self.postlex = postlex
self.callbacks = callbacks or {}
self.g_regex_flags = g_regex_flags
self.re_module = re_module
self.skip_validation = skip_validation
self.use_bytes = use_bytes
self.lexer_type = None
@property
def tokens(self):
warn("LexerConf.tokens is deprecated. Use LexerConf.terminals instead", DeprecationWarning)
return self.terminals
def _deserialize(self):
self.terminals_by_name = {t.name: t for t in self.terminals}
class ParserConf(Serialize):
__serialize_fields__ = 'rules', 'start', 'parser_type'
def __init__(self, rules, callbacks, start):
assert isinstance(start, list)
self.rules = rules
self.callbacks = callbacks
self.start = start
self.parser_type = None
###}
|
import time
import threading
import asyncio
from flexx import flx
class MyComponent1(flx.Component):
foo = flx.Property(0, settable=True)
@flx.reaction('foo')
def on_foo(self, *events):
for ev in events:
print('foo changed to', ev.new_value)
# Create component in main thread
comp = MyComponent1()
# Start server in its own thread
def start_flexx():
flx.create_server(loop=asyncio.new_event_loop())
flx.start()
t = threading.Thread(target=start_flexx)
t.start()
# Manipulate component from main thread
# (the component's on_foo() gets called from other thread)
for i in range(5, 9):
time.sleep(1)
comp.set_foo(i)
# Stop event loop (this is thread-safe) and wait for thread to end
flx.stop()
t.join()
|
import unittest
import numpy as np
import chainer
from chainer.backends import cuda
from chainer.functions import relu
from chainer import testing
from chainermn import create_communicator
from chainercv.links.model.mobilenet import TFConv2DBNActiv
from chainercv.utils.testing import attr
def _add_one(x):
return x + 1
@testing.parameterize(*testing.product({
'dilate': [1, 2],
'pad': [1, 'SAME'],
'args_style': ['explicit', 'None', 'omit'],
'activ': ['relu', 'add_one', None],
}))
class TestTFConv2DBNActiv(unittest.TestCase):
in_channels = 1
out_channels = 1
ksize = 3
stride = 1
pad = 1
def setUp(self):
if self.activ == 'relu':
activ = relu
elif self.activ == 'add_one':
activ = _add_one
elif self.activ is None:
activ = None
self.x = np.random.uniform(
-1, 1, (5, self.in_channels, 5, 5)).astype(np.float32)
self.gy = np.random.uniform(
-1, 1, (5, self.out_channels, 5, 5)).astype(np.float32)
# Convolution is the identity function.
initialW = np.array([[0, 0, 0], [0, 1, 0], [0, 0, 0]],
dtype=np.float32).reshape((1, 1, 3, 3))
bn_kwargs = {'decay': 0.8}
initial_bias = 0
if self.args_style == 'explicit':
self.l = TFConv2DBNActiv(
self.in_channels, self.out_channels, self.ksize,
self.stride, self.pad, self.dilate,
initialW=initialW, initial_bias=initial_bias,
activ=activ, bn_kwargs=bn_kwargs)
elif self.args_style == 'None':
self.l = TFConv2DBNActiv(
None, self.out_channels, self.ksize, self.stride, self.pad,
self.dilate, initialW=initialW, initial_bias=initial_bias,
activ=activ, bn_kwargs=bn_kwargs)
elif self.args_style == 'omit':
self.l = TFConv2DBNActiv(
self.out_channels, self.ksize, stride=self.stride,
pad=self.pad, dilate=self.dilate, initialW=initialW,
initial_bias=initial_bias, activ=activ, bn_kwargs=bn_kwargs)
def check_forward(self, x_data):
x = chainer.Variable(x_data)
# Make the batch normalization to be the identity function.
self.l.bn.avg_var[:] = 1
self.l.bn.avg_mean[:] = 0
with chainer.using_config('train', False):
y = self.l(x)
self.assertIsInstance(y, chainer.Variable)
self.assertIsInstance(y.array, self.l.xp.ndarray)
if self.dilate == 1:
_x_data = x_data
elif self.dilate == 2:
_x_data = x_data[:, :, 1:-1, 1:-1]
if self.activ == 'relu':
np.testing.assert_almost_equal(
cuda.to_cpu(y.array), np.maximum(cuda.to_cpu(_x_data), 0),
decimal=4
)
elif self.activ == 'add_one':
np.testing.assert_almost_equal(
cuda.to_cpu(y.array), cuda.to_cpu(_x_data) + 1,
decimal=4
)
elif self.activ is None:
np.testing.assert_almost_equal(
cuda.to_cpu(y.array), cuda.to_cpu(_x_data),
decimal=4
)
def test_forward_cpu(self):
self.check_forward(self.x)
@attr.gpu
def test_forward_gpu(self):
self.l.to_gpu()
self.check_forward(cuda.to_gpu(self.x))
def check_backward(self, x_data, y_grad):
x = chainer.Variable(x_data)
y = self.l(x)
if self.dilate == 1:
y.grad = y_grad
elif self.dilate == 2:
y.grad = y_grad[:, :, 1:-1, 1:-1]
y.backward()
def test_backward_cpu(self):
self.check_backward(self.x, self.gy)
@attr.gpu
def test_backward_gpu(self):
self.l.to_gpu()
self.check_backward(cuda.to_gpu(self.x), cuda.to_gpu(self.gy))
@attr.mpi
class TestTFConv2DMultiNodeBNActiv(unittest.TestCase):
in_channels = 1
out_channels = 1
ksize = 3
stride = 1
pad = 1
dilate = 1
def setUp(self):
self.x = np.random.uniform(
-1, 1, (5, self.in_channels, 5, 5)).astype(np.float32)
self.gy = np.random.uniform(
-1, 1, (5, self.out_channels, 5, 5)).astype(np.float32)
# Convolution is the identity function.
initialW = np.array([[0, 0, 0], [0, 1, 0], [0, 0, 0]],
dtype=np.float32).reshape((1, 1, 3, 3))
bn_kwargs = {'decay': 0.8, 'comm': create_communicator('naive')}
initial_bias = 0
activ = relu
self.l = TFConv2DBNActiv(
self.in_channels, self.out_channels, self.ksize, self.stride,
self.pad, self.dilate, initialW=initialW,
initial_bias=initial_bias, activ=activ, bn_kwargs=bn_kwargs)
def check_forward(self, x_data):
x = chainer.Variable(x_data)
# Make the batch normalization to be the identity function.
self.l.bn.avg_var[:] = 1
self.l.bn.avg_mean[:] = 0
with chainer.using_config('train', False):
y = self.l(x)
self.assertIsInstance(y, chainer.Variable)
self.assertIsInstance(y.array, self.l.xp.ndarray)
np.testing.assert_almost_equal(
cuda.to_cpu(y.array), np.maximum(cuda.to_cpu(x_data), 0),
decimal=4
)
def test_multi_node_batch_normalization_forward_cpu(self):
self.check_forward(self.x)
@attr.gpu
def test_multi_node_batch_normalization_forward_gpu(self):
self.l.to_gpu()
self.check_forward(cuda.to_gpu(self.x))
def check_backward(self, x_data, y_grad):
x = chainer.Variable(x_data)
y = self.l(x)
y.grad = y_grad
y.backward()
def test_multi_node_batch_normalization_backward_cpu(self):
self.check_backward(self.x, self.gy)
@attr.gpu
def test_multi_node_batch_normalization_backward_gpu(self):
self.l.to_gpu()
self.check_backward(cuda.to_gpu(self.x), cuda.to_gpu(self.gy))
testing.run_module(__name__, __file__)
|
import logging
import sys
from functools import wraps
from time import sleep
from pymongo.errors import (AutoReconnect, OperationFailure, DuplicateKeyError, ServerSelectionTimeoutError,
BulkWriteError)
from .hooks import log_exception as _log_exception
logger = logging.getLogger(__name__)
_MAX_RETRIES = 15
def _get_host(store):
ret = {}
if store:
try:
if isinstance(store, (list, tuple)):
store = store[0]
ret['l'] = store._arctic_lib.get_name()
ret['mnodes'] = ["{}:{}".format(h, p) for h, p in store._collection.database.client.nodes]
ret['mhost'] = "{}".format(store._arctic_lib.arctic.mongo_host)
except Exception:
# Sometimes get_name(), for example, fails if we're not connected to MongoDB.
pass
return ret
_in_retry = False
_retry_count = 0
def mongo_retry(f):
"""
Catch-all decorator that handles AutoReconnect and OperationFailure
errors from PyMongo
"""
log_all_exceptions = 'arctic' in f.__module__ if f.__module__ else False
@wraps(f)
def f_retry(*args, **kwargs):
global _retry_count, _in_retry
top_level = not _in_retry
_in_retry = True
try:
while True:
try:
return f(*args, **kwargs)
except (DuplicateKeyError, ServerSelectionTimeoutError, BulkWriteError) as e:
# Re-raise errors that won't go away.
_handle_error(f, e, _retry_count, **_get_host(args))
raise
except (OperationFailure, AutoReconnect) as e:
_retry_count += 1
_handle_error(f, e, _retry_count, **_get_host(args))
except Exception as e:
if log_all_exceptions:
_log_exception(f.__name__, e, _retry_count, **_get_host(args))
raise
finally:
if top_level:
_in_retry = False
_retry_count = 0
return f_retry
def _handle_error(f, e, retry_count, **kwargs):
if retry_count > _MAX_RETRIES:
logger.error('Too many retries %s [%s], raising' % (f.__name__, e))
e.traceback = sys.exc_info()[2]
raise
log_fn = logger.warning if retry_count > 2 else logger.debug
log_fn('%s %s [%s], retrying %i' % (type(e), f.__name__, e, retry_count))
# Log operation failure errors
_log_exception(f.__name__, e, retry_count, **kwargs)
# if 'unauthorized' in str(e):
# raise
sleep(0.01 * min((3 ** retry_count), 50)) # backoff...
|
import re
import os
from setuptools import setup, find_packages
def fpath(name):
return os.path.join(os.path.dirname(__file__), name)
def read(fname):
return open(fpath(fname)).read()
def desc():
return read('README.md')
# grep flasgger/__init__.py since python 3.x cannot
# import it before using 2to3
file_text = read(fpath('flasgger/__init__.py'))
def grep(attrname):
pattern = r"{0}\W*=\W*'([^']+)'".format(attrname)
strval, = re.findall(pattern, file_text)
return strval
setup(
name='flasgger',
version=grep('__version__'),
url='https://github.com/flasgger/flasgger/',
license='MIT',
author=grep('__author__'),
author_email=grep('__email__'),
description='Extract swagger specs from your flask project',
long_description=desc(),
long_description_content_type="text/markdown",
packages=find_packages(
exclude=[
'tests', 'tests.*',
'examples', 'examples.*',
'demo_app', 'demo_app.*',
'etc', 'etc.*'
]
),
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=[
'Flask>=0.10',
'PyYAML>=3.0',
'jsonschema>=3.0.1',
'mistune',
'six>=1.10.0'
],
classifiers=[
'Intended Audience :: Developers',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
]
)
|
import datetime
import dill
from nose.tools import raises
import numpy as np
import os
import random
import sys
import warnings
sys.path = [os.path.abspath(os.path.dirname(__file__))] + sys.path
sys.path = [os.path.abspath(os.path.dirname(os.path.dirname(__file__)))] + sys.path
os.environ['is_test_suite'] = 'True'
from auto_ml import Predictor
import utils_testing as utils
@raises(ValueError)
def test_bad_val_in_column_descriptions():
np.random.seed(0)
df_titanic_train, df_titanic_test = utils.get_titanic_binary_classification_dataset()
column_descriptions = {
'survived': 'output'
, 'sex': 'categorical'
, 'embarked': 'categorical'
, 'pclass': 'categorical'
, 'fare': 'this_is_a_bad_value'
}
with warnings.catch_warnings(record=True) as w:
ml_predictor = Predictor(type_of_estimator='classifier', column_descriptions=column_descriptions)
print('we should be throwing a warning for the user to give them useful feedback')
assert len(w) == 1
assert True
@raises(ValueError)
def test_missing_output_col_in_column_descriptions():
np.random.seed(0)
df_titanic_train, df_titanic_test = utils.get_titanic_binary_classification_dataset()
column_descriptions = {
# 'survived': 'output'
'sex': 'categorical'
, 'embarked': 'categorical'
, 'pclass': 'categorical'
}
ml_predictor = Predictor(type_of_estimator='classifier', column_descriptions=column_descriptions)
@raises(ValueError)
def test_bad_val_for_type_of_estimator():
np.random.seed(0)
df_titanic_train, df_titanic_test = utils.get_titanic_binary_classification_dataset()
column_descriptions = {
# 'survived': 'output'
'sex': 'categorical'
, 'embarked': 'categorical'
, 'pclass': 'categorical'
}
ml_predictor = Predictor(type_of_estimator='invalid_type_of_estimator', column_descriptions=column_descriptions)
def test_nans_in_output_column():
np.random.seed(0)
df_titanic_train, df_titanic_test = utils.get_titanic_binary_classification_dataset()
column_descriptions = {
'survived': 'output'
, 'sex': 'categorical'
, 'embarked': 'categorical'
, 'pclass': 'categorical'
}
ml_predictor = Predictor(type_of_estimator='classifier', column_descriptions=column_descriptions)
ml_predictor.train(df_titanic_train)
test_score = ml_predictor.score(df_titanic_test, df_titanic_test.survived)
print('test_score')
print(test_score)
assert -0.215 < test_score < -0.13
def test_verify_features_finds_missing_prediction_features():
np.random.seed(0)
df_titanic_train, df_titanic_test = utils.get_titanic_binary_classification_dataset()
column_descriptions = {
'survived': 'output'
, 'sex': 'categorical'
, 'embarked': 'categorical'
, 'pclass': 'categorical'
}
ml_predictor = Predictor(type_of_estimator='classifier', column_descriptions=column_descriptions)
ml_predictor.train(df_titanic_train, verify_features=True)
file_name = ml_predictor.save(str(random.random()))
with open(file_name, 'rb') as read_file:
saved_ml_pipeline = dill.load(read_file)
os.remove(file_name)
try:
keras_file_name = file_name[:-5] + '_keras_deep_learning_model.h5'
os.remove(keras_file_name)
except:
pass
# Remove the "age" column from our prediction data
df_titanic_test = df_titanic_test.drop('age', axis=1)
missing_features = saved_ml_pipeline.named_steps['final_model'].verify_features(df_titanic_test)
print('missing_features')
print(missing_features)
print("len(missing_features['prediction_not_training'])")
print(len(missing_features['prediction_not_training']))
print("len(missing_features['training_not_prediction'])")
print(len(missing_features['training_not_prediction']))
assert len(missing_features['prediction_not_training']) == 0
assert len(missing_features['training_not_prediction']) == 1
def test_verify_features_finds_missing_training_features():
np.random.seed(0)
df_titanic_train, df_titanic_test = utils.get_titanic_binary_classification_dataset()
column_descriptions = {
'survived': 'output'
, 'sex': 'categorical'
, 'embarked': 'categorical'
, 'pclass': 'categorical'
}
# Remove the "sibsp" column from our training data
df_titanic_train = df_titanic_train.drop('sibsp', axis=1)
ml_predictor = Predictor(type_of_estimator='classifier', column_descriptions=column_descriptions)
ml_predictor.train(df_titanic_train, verify_features=True)
file_name = ml_predictor.save(str(random.random()))
with open(file_name, 'rb') as read_file:
saved_ml_pipeline = dill.load(read_file)
os.remove(file_name)
try:
keras_file_name = file_name[:-5] + '_keras_deep_learning_model.h5'
os.remove(keras_file_name)
except:
pass
missing_features = saved_ml_pipeline.named_steps['final_model'].verify_features(df_titanic_test)
print('missing_features')
print(missing_features)
print("len(missing_features['prediction_not_training'])")
print(len(missing_features['prediction_not_training']))
print("len(missing_features['training_not_prediction'])")
print(len(missing_features['training_not_prediction']))
assert len(missing_features['prediction_not_training']) == 1
assert len(missing_features['training_not_prediction']) == 0
def test_verify_features_finds_no_missing_features_when_none_are_missing():
np.random.seed(0)
df_titanic_train, df_titanic_test = utils.get_titanic_binary_classification_dataset()
column_descriptions = {
'survived': 'output'
, 'sex': 'categorical'
, 'embarked': 'categorical'
, 'pclass': 'categorical'
}
ml_predictor = Predictor(type_of_estimator='classifier', column_descriptions=column_descriptions)
ml_predictor.train(df_titanic_train, verify_features=True)
file_name = ml_predictor.save(str(random.random()))
with open(file_name, 'rb') as read_file:
saved_ml_pipeline = dill.load(read_file)
os.remove(file_name)
missing_features = saved_ml_pipeline.named_steps['final_model'].verify_features(df_titanic_test)
print('missing_features')
print(missing_features)
print("len(missing_features['prediction_not_training'])")
print(len(missing_features['prediction_not_training']))
print("len(missing_features['training_not_prediction'])")
print(len(missing_features['training_not_prediction']))
assert len(missing_features['prediction_not_training']) == 0
assert len(missing_features['training_not_prediction']) == 0
def test_unexpected_datetime_column_handled_without_errors():
df_titanic_train, df_titanic_test = utils.get_titanic_binary_classification_dataset()
column_descriptions = {
'survived': 'output'
, 'sex': 'categorical'
, 'embarked': 'categorical'
, 'pclass': 'categorical'
}
ml_predictor = Predictor(type_of_estimator='classifier', column_descriptions=column_descriptions)
ml_predictor.train(df_titanic_train)
test_dict = df_titanic_test.sample(frac=0.1).to_dict('records')[0]
test_dict['unexpected_column'] = datetime.date.today()
test_dict['anoter_unexpected_column'] = datetime.datetime.today()
ml_predictor.predict(test_dict)
# We want to make sure the above does not throw an error
assert True
def test_unmarked_categorical_column_throws_warning():
df_titanic_train, df_titanic_test = utils.get_titanic_binary_classification_dataset()
column_descriptions = {
'survived': 'output'
# This is the column we are "forgetting" to mark as categorical
# , 'sex': 'categorical'
, 'embarked': 'categorical'
, 'pclass': 'categorical'
}
ml_predictor = Predictor(type_of_estimator='classifier', column_descriptions=column_descriptions)
with warnings.catch_warnings(record=True) as caught_w:
ml_predictor.train(df_titanic_train)
print('we should be throwing a warning for the user to give them useful feedback on the unlabeled categorical column')
assert len(caught_w) == 1
ml_predictor.predict(df_titanic_test)
# We want to make sure the above does not throw an error
assert True
|
import sys
from acme.errors import ClientError
from flask import current_app
from lemur.extensions import sentry, metrics
from lemur.plugins import lemur_aws as aws, ExpirationNotificationPlugin
from lemur.plugins.bases import DestinationPlugin, ExportDestinationPlugin, SourcePlugin
from lemur.plugins.lemur_aws import iam, s3, elb, ec2, sns
def get_region_from_dns(dns):
# XXX.REGION.elb.amazonaws.com
if dns.endswith(".elb.amazonaws.com"):
return dns.split(".")[-4]
else:
# NLBs have a different pattern on the dns XXXX.elb.REGION.amazonaws.com
return dns.split(".")[-3]
def format_elb_cipher_policy_v2(policy):
"""
Attempts to format cipher policy information for elbv2 into a common format.
:param policy:
:return:
"""
ciphers = []
name = None
for descr in policy["SslPolicies"]:
name = descr["Name"]
for cipher in descr["Ciphers"]:
ciphers.append(cipher["Name"])
return dict(name=name, ciphers=ciphers)
def format_elb_cipher_policy(policy):
"""
Attempts to format cipher policy information into a common format.
:param policy:
:return:
"""
ciphers = []
name = None
for descr in policy["PolicyDescriptions"]:
for attr in descr["PolicyAttributeDescriptions"]:
if attr["AttributeName"] == "Reference-Security-Policy":
name = attr["AttributeValue"]
continue
if attr["AttributeValue"] == "true":
ciphers.append(attr["AttributeName"])
return dict(name=name, ciphers=ciphers)
def get_elb_endpoints(account_number, region, elb_dict):
"""
Retrieves endpoint information from elb response data.
:param account_number:
:param region:
:param elb_dict:
:return:
"""
endpoints = []
for listener in elb_dict["ListenerDescriptions"]:
if not listener["Listener"].get("SSLCertificateId"):
continue
if listener["Listener"]["SSLCertificateId"] == "Invalid-Certificate":
continue
endpoint = dict(
name=elb_dict["LoadBalancerName"],
dnsname=elb_dict["DNSName"],
type="elb",
port=listener["Listener"]["LoadBalancerPort"],
certificate_name=iam.get_name_from_arn(
listener["Listener"]["SSLCertificateId"]
),
)
if listener["PolicyNames"]:
policy = elb.describe_load_balancer_policies(
elb_dict["LoadBalancerName"],
listener["PolicyNames"],
account_number=account_number,
region=region,
)
endpoint["policy"] = format_elb_cipher_policy(policy)
current_app.logger.debug("Found new endpoint. Endpoint: {}".format(endpoint))
endpoints.append(endpoint)
return endpoints
def get_elb_endpoints_v2(account_number, region, elb_dict):
"""
Retrieves endpoint information from elbv2 response data.
:param account_number:
:param region:
:param elb_dict:
:return:
"""
endpoints = []
listeners = elb.describe_listeners_v2(
account_number=account_number,
region=region,
LoadBalancerArn=elb_dict["LoadBalancerArn"],
)
for listener in listeners["Listeners"]:
if not listener.get("Certificates"):
continue
for certificate in listener["Certificates"]:
endpoint = dict(
name=elb_dict["LoadBalancerName"],
dnsname=elb_dict["DNSName"],
type="elbv2",
port=listener["Port"],
certificate_name=iam.get_name_from_arn(certificate["CertificateArn"]),
)
if listener["SslPolicy"]:
policy = elb.describe_ssl_policies_v2(
[listener["SslPolicy"]], account_number=account_number, region=region
)
endpoint["policy"] = format_elb_cipher_policy_v2(policy)
endpoints.append(endpoint)
return endpoints
class AWSSourcePlugin(SourcePlugin):
title = "AWS"
slug = "aws-source"
description = "Discovers all SSL certificates and ELB endpoints in an AWS account"
version = aws.VERSION
author = "Kevin Glisson"
author_url = "https://github.com/netflix/lemur"
options = [
{
"name": "accountNumber",
"type": "str",
"required": True,
"validation": "/^[0-9]{12,12}$/",
"helpMessage": "Must be a valid AWS account number!",
},
{
"name": "regions",
"type": "str",
"helpMessage": "Comma separated list of regions to search in, if no region is specified we look in all regions.",
},
]
def get_certificates(self, options, **kwargs):
cert_data = iam.get_all_certificates(
account_number=self.get_option("accountNumber", options)
)
return [
dict(
body=c["CertificateBody"],
chain=c.get("CertificateChain"),
name=c["ServerCertificateMetadata"]["ServerCertificateName"],
)
for c in cert_data
]
def get_endpoints(self, options, **kwargs):
endpoints = []
account_number = self.get_option("accountNumber", options)
regions = self.get_option("regions", options)
if not regions:
regions = ec2.get_regions(account_number=account_number)
else:
regions = "".join(regions.split()).split(",")
for region in regions:
elbs = elb.get_all_elbs(account_number=account_number, region=region)
current_app.logger.info({
"message": "Describing classic load balancers",
"account_number": account_number,
"region": region,
"number_of_load_balancers": len(elbs)
})
for e in elbs:
endpoints.extend(get_elb_endpoints(account_number, region, e))
# fetch advanced ELBs
elbs_v2 = elb.get_all_elbs_v2(account_number=account_number, region=region)
current_app.logger.info({
"message": "Describing advanced load balancers",
"account_number": account_number,
"region": region,
"number_of_load_balancers": len(elbs_v2)
})
for e in elbs_v2:
endpoints.extend(get_elb_endpoints_v2(account_number, region, e))
return endpoints
def update_endpoint(self, endpoint, certificate):
options = endpoint.source.options
account_number = self.get_option("accountNumber", options)
# relies on the fact that region is included in DNS name
region = get_region_from_dns(endpoint.dnsname)
arn = iam.create_arn_from_cert(account_number, region, certificate.name)
if endpoint.type == "elbv2":
listener_arn = elb.get_listener_arn_from_endpoint(
endpoint.name,
endpoint.port,
account_number=account_number,
region=region,
)
elb.attach_certificate_v2(
listener_arn,
endpoint.port,
[{"CertificateArn": arn}],
account_number=account_number,
region=region,
)
else:
elb.attach_certificate(
endpoint.name,
endpoint.port,
arn,
account_number=account_number,
region=region,
)
def clean(self, certificate, options, **kwargs):
account_number = self.get_option("accountNumber", options)
iam.delete_cert(certificate.name, account_number=account_number)
def get_certificate_by_name(self, certificate_name, options):
account_number = self.get_option("accountNumber", options)
# certificate name may contain path, in which case we remove it
if "/" in certificate_name:
certificate_name = certificate_name.split('/')[-1]
try:
cert = iam.get_certificate(certificate_name, account_number=account_number)
if cert:
return dict(
body=cert["CertificateBody"],
chain=cert.get("CertificateChain"),
name=cert["ServerCertificateMetadata"]["ServerCertificateName"],
)
except ClientError:
current_app.logger.warning(
"get_elb_certificate_failed: Unable to get certificate for {0}".format(certificate_name))
sentry.captureException()
metrics.send(
"get_elb_certificate_failed", "counter", 1,
metric_tags={"certificate_name": certificate_name, "account_number": account_number}
)
return None
def get_endpoint_certificate_names(self, endpoint):
options = endpoint.source.options
account_number = self.get_option("accountNumber", options)
region = get_region_from_dns(endpoint.dnsname)
certificate_names = []
if endpoint.type == "elb":
elb_details = elb.get_elbs(account_number=account_number,
region=region,
LoadBalancerNames=[endpoint.name],)
for lb_description in elb_details["LoadBalancerDescriptions"]:
for listener_description in lb_description["ListenerDescriptions"]:
listener = listener_description.get("Listener")
if not listener.get("SSLCertificateId"):
continue
certificate_names.append(iam.get_name_from_arn(listener.get("SSLCertificateId")))
elif endpoint.type == "elbv2":
listeners = elb.describe_listeners_v2(
account_number=account_number,
region=region,
LoadBalancerArn=elb.get_load_balancer_arn_from_endpoint(endpoint.name,
account_number=account_number,
region=region),
)
for listener in listeners["Listeners"]:
if not listener.get("Certificates"):
continue
for certificate in listener["Certificates"]:
certificate_names.append(iam.get_name_from_arn(certificate["CertificateArn"]))
return certificate_names
class AWSDestinationPlugin(DestinationPlugin):
title = "AWS"
slug = "aws-destination"
description = "Allow the uploading of certificates to AWS IAM"
version = aws.VERSION
sync_as_source = True
sync_as_source_name = AWSSourcePlugin.slug
author = "Kevin Glisson"
author_url = "https://github.com/netflix/lemur"
options = [
{
"name": "accountNumber",
"type": "str",
"required": True,
"validation": "[0-9]{12}",
"helpMessage": "Must be a valid AWS account number!",
},
{
"name": "path",
"type": "str",
"default": "/",
"helpMessage": "Path to upload certificate.",
},
]
def upload(self, name, body, private_key, cert_chain, options, **kwargs):
try:
iam.upload_cert(
name,
body,
private_key,
self.get_option("path", options),
cert_chain=cert_chain,
account_number=self.get_option("accountNumber", options),
)
except ClientError:
sentry.captureException()
def deploy(self, elb_name, account, region, certificate):
pass
def clean(self, certificate, options, **kwargs):
account_number = self.get_option("accountNumber", options)
iam.delete_cert(certificate.name, account_number=account_number)
class S3DestinationPlugin(ExportDestinationPlugin):
title = "AWS-S3"
slug = "aws-s3"
description = "Allow the uploading of certificates to Amazon S3"
author = "Mikhail Khodorovskiy, Harm Weites <[email protected]>"
author_url = "https://github.com/Netflix/lemur"
additional_options = [
{
"name": "bucket",
"type": "str",
"required": True,
"validation": "[0-9a-z.-]{3,63}",
"helpMessage": "Must be a valid S3 bucket name!",
},
{
"name": "accountNumber",
"type": "str",
"required": True,
"validation": "[0-9]{12}",
"helpMessage": "A valid AWS account number with permission to access S3",
},
{
"name": "region",
"type": "str",
"default": "us-east-1",
"required": False,
"helpMessage": "Region bucket exists",
"available": ["us-east-1", "us-west-2", "eu-west-1"],
},
{
"name": "encrypt",
"type": "bool",
"required": False,
"helpMessage": "Enable server side encryption",
"default": True,
},
{
"name": "prefix",
"type": "str",
"required": False,
"helpMessage": "Must be a valid S3 object prefix!",
},
]
def __init__(self, *args, **kwargs):
super(S3DestinationPlugin, self).__init__(*args, **kwargs)
def upload(self, name, body, private_key, chain, options, **kwargs):
files = self.export(body, private_key, chain, options)
for ext, passphrase, data in files:
s3.put(
self.get_option("bucket", options),
self.get_option("region", options),
"{prefix}/{name}.{extension}".format(
prefix=self.get_option("prefix", options), name=name, extension=ext
),
data,
self.get_option("encrypt", options),
account_number=self.get_option("accountNumber", options),
)
def upload_acme_token(self, token_path, token, options, **kwargs):
"""
This is called from the acme http challenge
:param self:
:param token_path:
:param token:
:param options:
:param kwargs:
:return:
"""
current_app.logger.debug("S3 destination plugin is started to upload HTTP-01 challenge")
function = f"{__name__}.{sys._getframe().f_code.co_name}"
account_number = self.get_option("accountNumber", options)
bucket_name = self.get_option("bucket", options)
prefix = self.get_option("prefix", options)
region = self.get_option("region", options)
filename = token_path.split("/")[-1]
if not prefix.endswith("/"):
prefix + "/"
response = s3.put(bucket_name=bucket_name,
region_name=region,
prefix=prefix + filename,
data=token,
encrypt=False,
account_number=account_number)
res = "Success" if response else "Failure"
log_data = {
"function": function,
"message": "upload acme token challenge",
"result": res,
"bucket_name": bucket_name,
"filename": filename
}
current_app.logger.info(log_data)
metrics.send(f"{function}", "counter", 1, metric_tags={"result": res,
"bucket_name": bucket_name,
"filename": filename})
return response
def delete_acme_token(self, token_path, options, **kwargs):
current_app.logger.debug("S3 destination plugin is started to delete HTTP-01 challenge")
function = f"{__name__}.{sys._getframe().f_code.co_name}"
account_number = self.get_option("accountNumber", options)
bucket_name = self.get_option("bucket", options)
prefix = self.get_option("prefix", options)
filename = token_path.split("/")[-1]
response = s3.delete(bucket_name=bucket_name,
prefixed_object_name=prefix + filename,
account_number=account_number)
res = "Success" if response else "Failure"
log_data = {
"function": function,
"message": "delete acme token challenge",
"result": res,
"bucket_name": bucket_name,
"filename": filename
}
current_app.logger.info(log_data)
metrics.send(f"{function}", "counter", 1, metric_tags={"result": res,
"bucket_name": bucket_name,
"filename": filename})
return response
class SNSNotificationPlugin(ExpirationNotificationPlugin):
title = "AWS SNS"
slug = "aws-sns"
description = "Sends notifications to AWS SNS"
version = aws.VERSION
author = "Jasmine Schladen <[email protected]>"
author_url = "https://github.com/Netflix/lemur"
additional_options = [
{
"name": "accountNumber",
"type": "str",
"required": True,
"validation": "[0-9]{12}",
"helpMessage": "A valid AWS account number with permission to access the SNS topic",
},
{
"name": "region",
"type": "str",
"required": True,
"validation": "[0-9a-z\\-]{1,25}",
"helpMessage": "Region in which the SNS topic is located, e.g. \"us-east-1\"",
},
{
"name": "topicName",
"type": "str",
"required": True,
# base topic name is 1-256 characters (alphanumeric plus underscore and hyphen)
"validation": "^[a-zA-Z0-9_\\-]{1,256}$",
"helpMessage": "The name of the topic to use for expiration notifications",
}
]
def send(self, notification_type, message, excluded_targets, options, **kwargs):
"""
While we receive a `targets` parameter here, it is unused, as the SNS topic is pre-configured in the
plugin configuration, and can't reasonably be changed dynamically.
"""
topic_arn = f"arn:aws:sns:{self.get_option('region', options)}:" \
f"{self.get_option('accountNumber', options)}:" \
f"{self.get_option('topicName', options)}"
current_app.logger.info(f"Publishing {notification_type} notification to topic {topic_arn}")
sns.publish(topic_arn, message, notification_type, region_name=self.get_option("region", options))
|
from xknx.devices import Climate as XknxClimate, Device as XknxDevice
from homeassistant.helpers.entity import Entity
from .const import DOMAIN
class KnxEntity(Entity):
"""Representation of a KNX entity."""
def __init__(self, device: XknxDevice):
"""Set up device."""
self._device = device
@property
def name(self):
"""Return the name of the KNX device."""
return self._device.name
@property
def available(self):
"""Return True if entity is available."""
return self.hass.data[DOMAIN].connected
@property
def should_poll(self):
"""No polling needed within KNX."""
return False
async def async_update(self):
"""Request a state update from KNX bus."""
await self._device.sync()
async def after_update_callback(self, device: XknxDevice):
"""Call after device was updated."""
self.async_write_ha_state()
async def async_added_to_hass(self) -> None:
"""Store register state change callback."""
self._device.register_device_updated_cb(self.after_update_callback)
if isinstance(self._device, XknxClimate):
self._device.mode.register_device_updated_cb(self.after_update_callback)
async def async_will_remove_from_hass(self) -> None:
"""Disconnect device object when removed."""
self._device.unregister_device_updated_cb(self.after_update_callback)
if isinstance(self._device, XknxClimate):
self._device.mode.unregister_device_updated_cb(self.after_update_callback)
|
import collections
import itertools
import posixpath
import socket
import xml.etree.ElementTree as ET
from http import client
from radicale import app, httputils, pathutils, rights, storage, xmlutils
from radicale.log import logger
def xml_propfind(base_prefix, path, xml_request, allowed_items, user,
encoding):
"""Read and answer PROPFIND requests.
Read rfc4918-9.1 for info.
The collections parameter is a list of collections that are to be included
in the output.
"""
# A client may choose not to submit a request body. An empty PROPFIND
# request body MUST be treated as if it were an 'allprop' request.
top_element = (xml_request[0] if xml_request is not None else
ET.Element(xmlutils.make_clark("D:allprop")))
props = ()
allprop = False
propname = False
if top_element.tag == xmlutils.make_clark("D:allprop"):
allprop = True
elif top_element.tag == xmlutils.make_clark("D:propname"):
propname = True
elif top_element.tag == xmlutils.make_clark("D:prop"):
props = [prop.tag for prop in top_element]
if xmlutils.make_clark("D:current-user-principal") in props and not user:
# Ask for authentication
# Returning the DAV:unauthenticated pseudo-principal as specified in
# RFC 5397 doesn't seem to work with DAVx5.
return client.FORBIDDEN, None
# Writing answer
multistatus = ET.Element(xmlutils.make_clark("D:multistatus"))
for item, permission in allowed_items:
write = permission == "w"
multistatus.append(xml_propfind_response(
base_prefix, path, item, props, user, encoding, write=write,
allprop=allprop, propname=propname))
return client.MULTI_STATUS, multistatus
def xml_propfind_response(base_prefix, path, item, props, user, encoding,
write=False, propname=False, allprop=False):
"""Build and return a PROPFIND response."""
if propname and allprop or (props and (propname or allprop)):
raise ValueError("Only use one of props, propname and allprops")
is_collection = isinstance(item, storage.BaseCollection)
if is_collection:
is_leaf = item.get_meta("tag") in ("VADDRESSBOOK", "VCALENDAR")
collection = item
else:
collection = item.collection
response = ET.Element(xmlutils.make_clark("D:response"))
href = ET.Element(xmlutils.make_clark("D:href"))
if is_collection:
# Some clients expect collections to end with /
uri = pathutils.unstrip_path(item.path, True)
else:
uri = pathutils.unstrip_path(
posixpath.join(collection.path, item.href))
href.text = xmlutils.make_href(base_prefix, uri)
response.append(href)
if propname or allprop:
props = []
# Should list all properties that can be retrieved by the code below
props.append(xmlutils.make_clark("D:principal-collection-set"))
props.append(xmlutils.make_clark("D:current-user-principal"))
props.append(xmlutils.make_clark("D:current-user-privilege-set"))
props.append(xmlutils.make_clark("D:supported-report-set"))
props.append(xmlutils.make_clark("D:resourcetype"))
props.append(xmlutils.make_clark("D:owner"))
if is_collection and collection.is_principal:
props.append(xmlutils.make_clark("C:calendar-user-address-set"))
props.append(xmlutils.make_clark("D:principal-URL"))
props.append(xmlutils.make_clark("CR:addressbook-home-set"))
props.append(xmlutils.make_clark("C:calendar-home-set"))
if not is_collection or is_leaf:
props.append(xmlutils.make_clark("D:getetag"))
props.append(xmlutils.make_clark("D:getlastmodified"))
props.append(xmlutils.make_clark("D:getcontenttype"))
props.append(xmlutils.make_clark("D:getcontentlength"))
if is_collection:
if is_leaf:
props.append(xmlutils.make_clark("D:displayname"))
props.append(xmlutils.make_clark("D:sync-token"))
if collection.get_meta("tag") == "VCALENDAR":
props.append(xmlutils.make_clark("CS:getctag"))
props.append(
xmlutils.make_clark("C:supported-calendar-component-set"))
meta = item.get_meta()
for tag in meta:
if tag == "tag":
continue
clark_tag = xmlutils.make_clark(tag)
if clark_tag not in props:
props.append(clark_tag)
responses = collections.defaultdict(list)
if propname:
for tag in props:
responses[200].append(ET.Element(tag))
props = ()
for tag in props:
element = ET.Element(tag)
is404 = False
if tag == xmlutils.make_clark("D:getetag"):
if not is_collection or is_leaf:
element.text = item.etag
else:
is404 = True
elif tag == xmlutils.make_clark("D:getlastmodified"):
if not is_collection or is_leaf:
element.text = item.last_modified
else:
is404 = True
elif tag == xmlutils.make_clark("D:principal-collection-set"):
child_element = ET.Element(xmlutils.make_clark("D:href"))
child_element.text = xmlutils.make_href(base_prefix, "/")
element.append(child_element)
elif (tag in (xmlutils.make_clark("C:calendar-user-address-set"),
xmlutils.make_clark("D:principal-URL"),
xmlutils.make_clark("CR:addressbook-home-set"),
xmlutils.make_clark("C:calendar-home-set")) and
collection.is_principal and is_collection):
child_element = ET.Element(xmlutils.make_clark("D:href"))
child_element.text = xmlutils.make_href(base_prefix, path)
element.append(child_element)
elif tag == xmlutils.make_clark("C:supported-calendar-component-set"):
human_tag = xmlutils.make_human_tag(tag)
if is_collection and is_leaf:
meta = item.get_meta(human_tag)
if meta:
components = meta.split(",")
else:
components = ("VTODO", "VEVENT", "VJOURNAL")
for component in components:
comp = ET.Element(xmlutils.make_clark("C:comp"))
comp.set("name", component)
element.append(comp)
else:
is404 = True
elif tag == xmlutils.make_clark("D:current-user-principal"):
if user:
child_element = ET.Element(xmlutils.make_clark("D:href"))
child_element.text = xmlutils.make_href(
base_prefix, "/%s/" % user)
element.append(child_element)
else:
element.append(ET.Element(
xmlutils.make_clark("D:unauthenticated")))
elif tag == xmlutils.make_clark("D:current-user-privilege-set"):
privileges = ["D:read"]
if write:
privileges.append("D:all")
privileges.append("D:write")
privileges.append("D:write-properties")
privileges.append("D:write-content")
for human_tag in privileges:
privilege = ET.Element(xmlutils.make_clark("D:privilege"))
privilege.append(ET.Element(
xmlutils.make_clark(human_tag)))
element.append(privilege)
elif tag == xmlutils.make_clark("D:supported-report-set"):
# These 3 reports are not implemented
reports = ["D:expand-property",
"D:principal-search-property-set",
"D:principal-property-search"]
if is_collection and is_leaf:
reports.append("D:sync-collection")
if item.get_meta("tag") == "VADDRESSBOOK":
reports.append("CR:addressbook-multiget")
reports.append("CR:addressbook-query")
elif item.get_meta("tag") == "VCALENDAR":
reports.append("C:calendar-multiget")
reports.append("C:calendar-query")
for human_tag in reports:
supported_report = ET.Element(
xmlutils.make_clark("D:supported-report"))
report_element = ET.Element(xmlutils.make_clark("D:report"))
report_element.append(
ET.Element(xmlutils.make_clark(human_tag)))
supported_report.append(report_element)
element.append(supported_report)
elif tag == xmlutils.make_clark("D:getcontentlength"):
if not is_collection or is_leaf:
element.text = str(len(item.serialize().encode(encoding)))
else:
is404 = True
elif tag == xmlutils.make_clark("D:owner"):
# return empty elment, if no owner available (rfc3744-5.1)
if collection.owner:
child_element = ET.Element(xmlutils.make_clark("D:href"))
child_element.text = xmlutils.make_href(
base_prefix, "/%s/" % collection.owner)
element.append(child_element)
elif is_collection:
if tag == xmlutils.make_clark("D:getcontenttype"):
if is_leaf:
element.text = xmlutils.MIMETYPES[item.get_meta("tag")]
else:
is404 = True
elif tag == xmlutils.make_clark("D:resourcetype"):
if item.is_principal:
child_element = ET.Element(
xmlutils.make_clark("D:principal"))
element.append(child_element)
if is_leaf:
if item.get_meta("tag") == "VADDRESSBOOK":
child_element = ET.Element(
xmlutils.make_clark("CR:addressbook"))
element.append(child_element)
elif item.get_meta("tag") == "VCALENDAR":
child_element = ET.Element(
xmlutils.make_clark("C:calendar"))
element.append(child_element)
child_element = ET.Element(xmlutils.make_clark("D:collection"))
element.append(child_element)
elif tag == xmlutils.make_clark("RADICALE:displayname"):
# Only for internal use by the web interface
displayname = item.get_meta("D:displayname")
if displayname is not None:
element.text = displayname
else:
is404 = True
elif tag == xmlutils.make_clark("D:displayname"):
displayname = item.get_meta("D:displayname")
if not displayname and is_leaf:
displayname = item.path
if displayname is not None:
element.text = displayname
else:
is404 = True
elif tag == xmlutils.make_clark("CS:getctag"):
if is_leaf:
element.text = item.etag
else:
is404 = True
elif tag == xmlutils.make_clark("D:sync-token"):
if is_leaf:
element.text, _ = item.sync()
else:
is404 = True
else:
human_tag = xmlutils.make_human_tag(tag)
meta = item.get_meta(human_tag)
if meta is not None:
element.text = meta
else:
is404 = True
# Not for collections
elif tag == xmlutils.make_clark("D:getcontenttype"):
element.text = xmlutils.get_content_type(item, encoding)
elif tag == xmlutils.make_clark("D:resourcetype"):
# resourcetype must be returned empty for non-collection elements
pass
else:
is404 = True
responses[404 if is404 else 200].append(element)
for status_code, childs in responses.items():
if not childs:
continue
propstat = ET.Element(xmlutils.make_clark("D:propstat"))
response.append(propstat)
prop = ET.Element(xmlutils.make_clark("D:prop"))
prop.extend(childs)
propstat.append(prop)
status = ET.Element(xmlutils.make_clark("D:status"))
status.text = xmlutils.make_response(status_code)
propstat.append(status)
return response
class ApplicationPropfindMixin:
def _collect_allowed_items(self, items, user):
"""Get items from request that user is allowed to access."""
for item in items:
if isinstance(item, storage.BaseCollection):
path = pathutils.unstrip_path(item.path, True)
if item.get_meta("tag"):
permissions = rights.intersect(
self._rights.authorization(user, path), "rw")
target = "collection with tag %r" % item.path
else:
permissions = rights.intersect(
self._rights.authorization(user, path), "RW")
target = "collection %r" % item.path
else:
path = pathutils.unstrip_path(item.collection.path, True)
permissions = rights.intersect(
self._rights.authorization(user, path), "rw")
target = "item %r from %r" % (item.href, item.collection.path)
if rights.intersect(permissions, "Ww"):
permission = "w"
status = "write"
elif rights.intersect(permissions, "Rr"):
permission = "r"
status = "read"
else:
permission = ""
status = "NO"
logger.debug(
"%s has %s access to %s",
repr(user) if user else "anonymous user", status, target)
if permission:
yield item, permission
def do_PROPFIND(self, environ, base_prefix, path, user):
"""Manage PROPFIND request."""
access = app.Access(self._rights, user, path)
if not access.check("r"):
return httputils.NOT_ALLOWED
try:
xml_content = self._read_xml_request_body(environ)
except RuntimeError as e:
logger.warning(
"Bad PROPFIND request on %r: %s", path, e, exc_info=True)
return httputils.BAD_REQUEST
except socket.timeout:
logger.debug("Client timed out", exc_info=True)
return httputils.REQUEST_TIMEOUT
with self._storage.acquire_lock("r", user):
items = self._storage.discover(
path, environ.get("HTTP_DEPTH", "0"))
# take root item for rights checking
item = next(items, None)
if not item:
return httputils.NOT_FOUND
if not access.check("r", item):
return httputils.NOT_ALLOWED
# put item back
items = itertools.chain([item], items)
allowed_items = self._collect_allowed_items(items, user)
headers = {"DAV": httputils.DAV_HEADERS,
"Content-Type": "text/xml; charset=%s" % self._encoding}
status, xml_answer = xml_propfind(
base_prefix, path, xml_content, allowed_items, user,
self._encoding)
if status == client.FORBIDDEN and xml_answer is None:
return httputils.NOT_ALLOWED
return status, headers, self._xml_response(xml_answer)
|
import os
import os.path
import sys
import contextlib
# Import side-effects are an evil thing, but here it's okay so scripts using
# colors work on Windows as well.
try:
import colorama
except ImportError:
colorama = None
else:
colorama.init()
use_color = os.name != 'nt' or colorama
fg_colors = {
'reset': 0,
'bold': 1,
'black': 30,
'red': 31,
'green': 32,
'yellow': 33,
'blue': 34,
'magenta': 35,
'cyan': 36,
'white': 37,
}
bg_colors = {name: col + 10 for name, col in fg_colors.items()}
ON_CI = 'CI' in os.environ
def _esc(code):
"""Get an ANSI color code based on a color number."""
return '\033[{}m'.format(code)
def print_col(text, color, file=sys.stdout):
"""Print a colorized text."""
if use_color:
fg = _esc(fg_colors[color.lower()])
reset = _esc(fg_colors['reset'])
print(''.join([fg, text, reset]), file=file, flush=True)
else:
print(text, file=file, flush=True)
def print_error(text):
print_col(text, 'red', file=sys.stderr)
def print_title(text):
"""Print a title."""
print()
print_col("==================== {} ====================".format(text),
'yellow')
def print_subtitle(text):
"""Print a subtitle."""
print_col("------ {} ------".format(text), 'cyan')
def change_cwd():
"""Change the scripts cwd if it was started inside the script folder."""
cwd = os.getcwd()
if os.path.split(cwd)[1] == 'scripts':
os.chdir(os.path.join(cwd, os.pardir))
@contextlib.contextmanager
def gha_group(name):
"""Print a GitHub Actions group.
Gets ignored if not on CI.
"""
if ON_CI:
print('::group::' + name)
yield
print('::endgroup::')
else:
yield
def gha_error(message):
"""Print a GitHub Actions error.
Should only be called on CI.
"""
assert ON_CI
print('::error::' + message)
|
from datetime import timedelta
import pytest
import homeassistant.components.automation as automation
from homeassistant.components.remote import DOMAIN
from homeassistant.const import CONF_PLATFORM, STATE_OFF, STATE_ON
from homeassistant.helpers import device_registry
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from tests.async_mock import patch
from tests.common import (
MockConfigEntry,
async_get_device_automation_capabilities,
async_get_device_automations,
async_mock_service,
mock_device_registry,
mock_registry,
)
@pytest.fixture
def device_reg(hass):
"""Return an empty, loaded, registry."""
return mock_device_registry(hass)
@pytest.fixture
def entity_reg(hass):
"""Return an empty, loaded, registry."""
return mock_registry(hass)
@pytest.fixture
def calls(hass):
"""Track calls to a mock service."""
return async_mock_service(hass, "test", "automation")
async def test_get_conditions(hass, device_reg, entity_reg):
"""Test we get the expected conditions from a remote."""
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(DOMAIN, "test", "5678", device_id=device_entry.id)
expected_conditions = [
{
"condition": "device",
"domain": DOMAIN,
"type": "is_off",
"device_id": device_entry.id,
"entity_id": f"{DOMAIN}.test_5678",
},
{
"condition": "device",
"domain": DOMAIN,
"type": "is_on",
"device_id": device_entry.id,
"entity_id": f"{DOMAIN}.test_5678",
},
]
conditions = await async_get_device_automations(hass, "condition", device_entry.id)
assert conditions == expected_conditions
async def test_get_condition_capabilities(hass, device_reg, entity_reg):
"""Test we get the expected capabilities from a remote condition."""
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(DOMAIN, "test", "5678", device_id=device_entry.id)
expected_capabilities = {
"extra_fields": [
{"name": "for", "optional": True, "type": "positive_time_period_dict"}
]
}
conditions = await async_get_device_automations(hass, "condition", device_entry.id)
for condition in conditions:
capabilities = await async_get_device_automation_capabilities(
hass, "condition", condition
)
assert capabilities == expected_capabilities
async def test_if_state(hass, calls):
"""Test for turn_on and turn_off conditions."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
ent1, ent2, ent3 = platform.ENTITIES
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {"platform": "event", "event_type": "test_event1"},
"condition": [
{
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": ent1.entity_id,
"type": "is_on",
}
],
"action": {
"service": "test.automation",
"data_template": {
"some": "is_on {{ trigger.%s }}"
% "}} - {{ trigger.".join(("platform", "event.event_type"))
},
},
},
{
"trigger": {"platform": "event", "event_type": "test_event2"},
"condition": [
{
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": ent1.entity_id,
"type": "is_off",
}
],
"action": {
"service": "test.automation",
"data_template": {
"some": "is_off {{ trigger.%s }}"
% "}} - {{ trigger.".join(("platform", "event.event_type"))
},
},
},
]
},
)
await hass.async_block_till_done()
assert hass.states.get(ent1.entity_id).state == STATE_ON
assert len(calls) == 0
hass.bus.async_fire("test_event1")
hass.bus.async_fire("test_event2")
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data["some"] == "is_on event - test_event1"
hass.states.async_set(ent1.entity_id, STATE_OFF)
hass.bus.async_fire("test_event1")
hass.bus.async_fire("test_event2")
await hass.async_block_till_done()
assert len(calls) == 2
assert calls[1].data["some"] == "is_off event - test_event2"
async def test_if_fires_on_for_condition(hass, calls):
"""Test for firing if condition is on with delay."""
point1 = dt_util.utcnow()
point2 = point1 + timedelta(seconds=10)
point3 = point2 + timedelta(seconds=10)
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
ent1, ent2, ent3 = platform.ENTITIES
with patch("homeassistant.core.dt_util.utcnow") as mock_utcnow:
mock_utcnow.return_value = point1
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {"platform": "event", "event_type": "test_event1"},
"condition": {
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": ent1.entity_id,
"type": "is_off",
"for": {"seconds": 5},
},
"action": {
"service": "test.automation",
"data_template": {
"some": "is_off {{ trigger.%s }}"
% "}} - {{ trigger.".join(
("platform", "event.event_type")
)
},
},
}
]
},
)
await hass.async_block_till_done()
assert hass.states.get(ent1.entity_id).state == STATE_ON
assert len(calls) == 0
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 0
# Time travel 10 secs into the future
mock_utcnow.return_value = point2
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 0
hass.states.async_set(ent1.entity_id, STATE_OFF)
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 0
# Time travel 20 secs into the future
mock_utcnow.return_value = point3
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data["some"] == "is_off event - test_event1"
|
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site
from django.http import HttpResponsePermanentRedirect
from django.shortcuts import get_object_or_404
from django.utils.decorators import method_decorator
from django.views.decorators.csrf import csrf_exempt
from django.views.generic.base import TemplateView
import django_comments as comments
from zinnia.flags import TRACKBACK
from zinnia.flags import get_user_flagger
from zinnia.models.entry import Entry
from zinnia.signals import trackback_was_posted
from zinnia.spam_checker import check_is_spam
class EntryTrackback(TemplateView):
"""
View for handling trackbacks on the entries.
"""
content_type = 'text/xml'
template_name = 'zinnia/entry_trackback.xml'
@method_decorator(csrf_exempt)
def dispatch(self, *args, **kwargs):
"""
Decorate the view dispatcher with csrf_exempt.
"""
return super(EntryTrackback, self).dispatch(*args, **kwargs)
def get_object(self):
"""
Retrieve the Entry trackbacked.
"""
return get_object_or_404(Entry.published, pk=self.kwargs['pk'])
def get(self, request, *args, **kwargs):
"""
GET only do a permanent redirection to the Entry.
"""
entry = self.get_object()
return HttpResponsePermanentRedirect(entry.get_absolute_url())
def post(self, request, *args, **kwargs):
"""
Check if an URL is provided and if trackbacks
are enabled on the Entry.
If so the URL is registered one time as a trackback.
"""
url = request.POST.get('url')
if not url:
return self.get(request, *args, **kwargs)
entry = self.get_object()
site = Site.objects.get_current()
if not entry.trackbacks_are_open:
return self.render_to_response(
{'error': 'Trackback is not enabled for %s' % entry.title})
title = request.POST.get('title') or url
excerpt = request.POST.get('excerpt') or title
blog_name = request.POST.get('blog_name') or title
ip_address = request.META.get('REMOTE_ADDR', None)
trackback_klass = comments.get_model()
trackback_datas = {
'content_type': ContentType.objects.get_for_model(Entry),
'object_pk': entry.pk,
'site': site,
'user_url': url,
'user_name': blog_name,
'ip_address': ip_address,
'comment': excerpt
}
trackback = trackback_klass(**trackback_datas)
if check_is_spam(trackback, entry, request):
return self.render_to_response(
{'error': 'Trackback considered like spam'})
trackback_defaults = {'comment': trackback_datas.pop('comment')}
trackback, created = trackback_klass.objects.get_or_create(
defaults=trackback_defaults,
**trackback_datas)
if created:
trackback.flags.create(user=get_user_flagger(), flag=TRACKBACK)
trackback_was_posted.send(trackback.__class__,
trackback=trackback,
entry=entry)
else:
return self.render_to_response(
{'error': 'Trackback is already registered'})
return self.render_to_response({})
|
import os
import os.path as op
import pytest
import numpy as np
from numpy.testing import (assert_array_equal, assert_equal, assert_allclose,
assert_array_less)
import mne
from mne.datasets import testing
from mne import read_trans, write_trans
from mne.io import read_info
from mne.utils import _TempDir, run_tests_if_main
from mne.transforms import (invert_transform, _get_trans,
rotation, rotation3d, rotation_angles, _find_trans,
combine_transforms, apply_trans, translation,
get_ras_to_neuromag_trans, _pol_to_cart,
quat_to_rot, rot_to_quat, _angle_between_quats,
_find_vector_rotation, _sph_to_cart, _cart_to_sph,
_topo_to_sph, _average_quats,
_SphericalSurfaceWarp as SphericalSurfaceWarp,
rotation3d_align_z_axis, _read_fs_xfm,
_write_fs_xfm, _quat_real, _fit_matched_points,
_quat_to_euler, _euler_to_quat,
_quat_to_affine)
data_path = testing.data_path(download=False)
fname = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc-trans.fif')
fname_eve = op.join(data_path, 'MEG', 'sample',
'sample_audvis_trunc_raw-eve.fif')
base_dir = op.join(op.dirname(__file__), '..', 'io', 'tests', 'data')
fname_trans = op.join(base_dir, 'sample-audvis-raw-trans.txt')
test_fif_fname = op.join(base_dir, 'test_raw.fif')
ctf_fname = op.join(base_dir, 'test_ctf_raw.fif')
hp_fif_fname = op.join(base_dir, 'test_chpi_raw_sss.fif')
def test_tps():
"""Test TPS warping."""
az = np.linspace(0., 2 * np.pi, 20, endpoint=False)
pol = np.linspace(0, np.pi, 12)[1:-1]
sph = np.array(np.meshgrid(1, az, pol, indexing='ij'))
sph.shape = (3, -1)
assert_equal(sph.shape[1], 200)
source = _sph_to_cart(sph.T)
destination = source.copy()
destination *= 2
destination[:, 0] += 1
# fit with 100 points
warp = SphericalSurfaceWarp()
assert 'no ' in repr(warp)
warp.fit(source[::3], destination[::2])
assert 'oct5' in repr(warp)
destination_est = warp.transform(source)
assert_allclose(destination_est, destination, atol=1e-3)
@testing.requires_testing_data
def test_get_trans():
"""Test converting '-trans.txt' to '-trans.fif'."""
trans = read_trans(fname)
trans = invert_transform(trans) # starts out as head->MRI, so invert
trans_2 = _get_trans(fname_trans)[0]
assert trans.__eq__(trans_2, atol=1e-5)
@testing.requires_testing_data
def test_io_trans():
"""Test reading and writing of trans files."""
tempdir = _TempDir()
os.mkdir(op.join(tempdir, 'sample'))
pytest.raises(RuntimeError, _find_trans, 'sample', subjects_dir=tempdir)
trans0 = read_trans(fname)
fname1 = op.join(tempdir, 'sample', 'test-trans.fif')
trans0.save(fname1)
assert fname1 == _find_trans('sample', subjects_dir=tempdir)
trans1 = read_trans(fname1)
# check all properties
assert trans0 == trans1
# check reading non -trans.fif files
pytest.raises(IOError, read_trans, fname_eve)
# check warning on bad filenames
fname2 = op.join(tempdir, 'trans-test-bad-name.fif')
with pytest.warns(RuntimeWarning, match='-trans.fif'):
write_trans(fname2, trans0)
def test_get_ras_to_neuromag_trans():
"""Test the coordinate transformation from ras to neuromag."""
# create model points in neuromag-like space
rng = np.random.RandomState(0)
anterior = [0, 1, 0]
left = [-1, 0, 0]
right = [.8, 0, 0]
up = [0, 0, 1]
rand_pts = rng.uniform(-1, 1, (3, 3))
pts = np.vstack((anterior, left, right, up, rand_pts))
# change coord system
rx, ry, rz, tx, ty, tz = rng.uniform(-2 * np.pi, 2 * np.pi, 6)
trans = np.dot(translation(tx, ty, tz), rotation(rx, ry, rz))
pts_changed = apply_trans(trans, pts)
# transform back into original space
nas, lpa, rpa = pts_changed[:3]
hsp_trans = get_ras_to_neuromag_trans(nas, lpa, rpa)
pts_restored = apply_trans(hsp_trans, pts_changed)
err = "Neuromag transformation failed"
assert_allclose(pts_restored, pts, atol=1e-6, err_msg=err)
def _cartesian_to_sphere(x, y, z):
"""Convert using old function."""
hypotxy = np.hypot(x, y)
r = np.hypot(hypotxy, z)
elev = np.arctan2(z, hypotxy)
az = np.arctan2(y, x)
return az, elev, r
def _sphere_to_cartesian(theta, phi, r):
"""Convert using old function."""
z = r * np.sin(phi)
rcos_phi = r * np.cos(phi)
x = rcos_phi * np.cos(theta)
y = rcos_phi * np.sin(theta)
return x, y, z
def test_sph_to_cart():
"""Test conversion between sphere and cartesian."""
# Simple test, expected value (11, 0, 0)
r, theta, phi = 11., 0., np.pi / 2.
z = r * np.cos(phi)
rsin_phi = r * np.sin(phi)
x = rsin_phi * np.cos(theta)
y = rsin_phi * np.sin(theta)
coord = _sph_to_cart(np.array([[r, theta, phi]]))[0]
assert_allclose(coord, (x, y, z), atol=1e-7)
assert_allclose(coord, (r, 0, 0), atol=1e-7)
rng = np.random.RandomState(0)
# round-trip test
coords = rng.randn(10, 3)
assert_allclose(_sph_to_cart(_cart_to_sph(coords)), coords, atol=1e-5)
# equivalence tests to old versions
for coord in coords:
sph = _cart_to_sph(coord[np.newaxis])
cart = _sph_to_cart(sph)
sph_old = np.array(_cartesian_to_sphere(*coord))
cart_old = _sphere_to_cartesian(*sph_old)
sph_old[1] = np.pi / 2. - sph_old[1] # new convention
assert_allclose(sph[0], sph_old[[2, 0, 1]], atol=1e-7)
assert_allclose(cart[0], cart_old, atol=1e-7)
assert_allclose(cart[0], coord, atol=1e-7)
def _polar_to_cartesian(theta, r):
"""Transform polar coordinates to cartesian."""
x = r * np.cos(theta)
y = r * np.sin(theta)
return x, y
def test_polar_to_cartesian():
"""Test helper transform function from polar to cartesian."""
r = 1
theta = np.pi
# expected values are (-1, 0)
x = r * np.cos(theta)
y = r * np.sin(theta)
coord = _pol_to_cart(np.array([[r, theta]]))[0]
# np.pi is an approx since pi is irrational
assert_allclose(coord, (x, y), atol=1e-7)
assert_allclose(coord, (-1, 0), atol=1e-7)
assert_allclose(coord, _polar_to_cartesian(theta, r), atol=1e-7)
rng = np.random.RandomState(0)
r = rng.randn(10)
theta = rng.rand(10) * (2 * np.pi)
polar = np.array((r, theta)).T
assert_allclose([_polar_to_cartesian(p[1], p[0]) for p in polar],
_pol_to_cart(polar), atol=1e-7)
def _topo_to_phi_theta(theta, radius):
"""Convert using old function."""
sph_phi = (0.5 - radius) * 180
sph_theta = -theta
return sph_phi, sph_theta
def test_topo_to_sph():
"""Test topo to sphere conversion."""
rng = np.random.RandomState(0)
angles = rng.rand(10) * 360
radii = rng.rand(10)
angles[0] = 30
radii[0] = 0.25
# new way
sph = _topo_to_sph(np.array([angles, radii]).T)
new = _sph_to_cart(sph)
new[:, [0, 1]] = new[:, [1, 0]] * [-1, 1]
# old way
for ii, (angle, radius) in enumerate(zip(angles, radii)):
sph_phi, sph_theta = _topo_to_phi_theta(angle, radius)
if ii == 0:
assert_allclose(_topo_to_phi_theta(angle, radius), [45, -30])
azimuth = sph_theta / 180.0 * np.pi
elevation = sph_phi / 180.0 * np.pi
assert_allclose(sph[ii], [1., azimuth, np.pi / 2. - elevation],
atol=1e-7)
r = np.ones_like(radius)
x, y, z = _sphere_to_cartesian(azimuth, elevation, r)
pos = [-y, x, z]
if ii == 0:
expected = np.array([1. / 2., np.sqrt(3) / 2., 1.])
expected /= np.sqrt(2)
assert_allclose(pos, expected, atol=1e-7)
assert_allclose(pos, new[ii], atol=1e-7)
def test_rotation():
"""Test conversion between rotation angles and transformation matrix."""
tests = [(0, 0, 1), (.5, .5, .5), (np.pi, 0, -1.5)]
for rot in tests:
x, y, z = rot
m = rotation3d(x, y, z)
m4 = rotation(x, y, z)
assert_array_equal(m, m4[:3, :3])
back = rotation_angles(m)
assert_equal(back, rot)
back4 = rotation_angles(m4)
assert_equal(back4, rot)
def test_rotation3d_align_z_axis():
"""Test rotation3d_align_z_axis."""
# The more complex z axis fails the assert presumably due to tolerance
#
inp_zs = [[0, 0, 1], [0, 1, 0], [1, 0, 0], [0, 0, -1],
[-0.75071668, -0.62183808, 0.22302888]]
exp_res = [[[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]],
[[1., 0., 0.], [0., 0., 1.], [0., -1., 0.]],
[[0., 0., 1.], [0., 1., 0.], [-1., 0., 0.]],
[[1., 0., 0.], [0., -1., 0.], [0., 0., -1.]],
[[0.53919688, -0.38169517, -0.75071668],
[-0.38169517, 0.683832, -0.62183808],
[0.75071668, 0.62183808, 0.22302888]]]
for res, z in zip(exp_res, inp_zs):
assert_allclose(res, rotation3d_align_z_axis(z), atol=1e-7)
@testing.requires_testing_data
def test_combine():
"""Test combining transforms."""
trans = read_trans(fname)
inv = invert_transform(trans)
combine_transforms(trans, inv, trans['from'], trans['from'])
pytest.raises(RuntimeError, combine_transforms, trans, inv,
trans['to'], trans['from'])
pytest.raises(RuntimeError, combine_transforms, trans, inv,
trans['from'], trans['to'])
pytest.raises(RuntimeError, combine_transforms, trans, trans,
trans['from'], trans['to'])
def test_quaternions():
"""Test quaternion calculations."""
rots = [np.eye(3)]
for fname in [test_fif_fname, ctf_fname, hp_fif_fname]:
rots += [read_info(fname)['dev_head_t']['trans'][:3, :3]]
# nasty numerical cases
rots += [np.array([
[-0.99978541, -0.01873462, -0.00898756],
[-0.01873462, 0.62565561, 0.77987608],
[-0.00898756, 0.77987608, -0.62587152],
])]
rots += [np.array([
[0.62565561, -0.01873462, 0.77987608],
[-0.01873462, -0.99978541, -0.00898756],
[0.77987608, -0.00898756, -0.62587152],
])]
rots += [np.array([
[-0.99978541, -0.00898756, -0.01873462],
[-0.00898756, -0.62587152, 0.77987608],
[-0.01873462, 0.77987608, 0.62565561],
])]
for rot in rots:
assert_allclose(rot, quat_to_rot(rot_to_quat(rot)),
rtol=1e-5, atol=1e-5)
rot = rot[np.newaxis, np.newaxis, :, :]
assert_allclose(rot, quat_to_rot(rot_to_quat(rot)),
rtol=1e-5, atol=1e-5)
# let's make sure our angle function works in some reasonable way
for ii in range(3):
for jj in range(3):
a = np.zeros(3)
b = np.zeros(3)
a[ii] = 1.
b[jj] = 1.
expected = np.pi if ii != jj else 0.
assert_allclose(_angle_between_quats(a, b), expected, atol=1e-5)
y_180 = np.array([[-1, 0, 0], [0, 1, 0], [0, 0, -1.]])
assert_allclose(_angle_between_quats(rot_to_quat(y_180),
np.zeros(3)), np.pi)
h_180_attitude_90 = np.array([[0, 1, 0], [1, 0, 0], [0, 0, -1.]])
assert_allclose(_angle_between_quats(rot_to_quat(h_180_attitude_90),
np.zeros(3)), np.pi)
def test_vector_rotation():
"""Test basic rotation matrix math."""
x = np.array([1., 0., 0.])
y = np.array([0., 1., 0.])
rot = _find_vector_rotation(x, y)
assert_array_equal(rot,
[[0, -1, 0], [1, 0, 0], [0, 0, 1]])
quat_1 = rot_to_quat(rot)
quat_2 = rot_to_quat(np.eye(3))
assert_allclose(_angle_between_quats(quat_1, quat_2), np.pi / 2.)
def test_average_quats():
"""Test averaging of quaternions."""
sq2 = 1. / np.sqrt(2.)
quats = np.array([[0, sq2, sq2],
[0, sq2, sq2],
[0, sq2, 0],
[0, 0, sq2],
[sq2, 0, 0]], float)
# In MATLAB:
# quats = [[0, sq2, sq2, 0]; [0, sq2, sq2, 0];
# [0, sq2, 0, sq2]; [0, 0, sq2, sq2]; [sq2, 0, 0, sq2]];
expected = [quats[0],
quats[0],
[0, 0.788675134594813, 0.577350269189626],
[0, 0.657192299694123, 0.657192299694123],
[0.100406058540540, 0.616329446922803, 0.616329446922803]]
# Averaging the first two should give the same thing:
for lim, ex in enumerate(expected):
assert_allclose(_average_quats(quats[:lim + 1]), ex, atol=1e-7)
quats[1] *= -1 # same quaternion (hidden value is zero here)!
rot_0, rot_1 = quat_to_rot(quats[:2])
assert_allclose(rot_0, rot_1, atol=1e-7)
for lim, ex in enumerate(expected):
assert_allclose(_average_quats(quats[:lim + 1]), ex, atol=1e-7)
# Assert some symmetry
count = 0
extras = [[sq2, sq2, 0]] + list(np.eye(3))
for quat in np.concatenate((quats, expected, extras)):
if np.isclose(_quat_real(quat), 0., atol=1e-7): # can flip sign
count += 1
angle = _angle_between_quats(quat, -quat)
assert_allclose(angle, 0., atol=1e-7)
rot_0, rot_1 = quat_to_rot(np.array((quat, -quat)))
assert_allclose(rot_0, rot_1, atol=1e-7)
assert count == 4 + len(extras)
@testing.requires_testing_data
def test_fs_xfm():
"""Test reading and writing of Freesurfer transforms."""
for subject in ('fsaverage', 'sample'):
fname = op.join(data_path, 'subjects', subject, 'mri', 'transforms',
'talairach.xfm')
xfm, kind = _read_fs_xfm(fname)
if subject == 'fsaverage':
assert_allclose(xfm, np.eye(4), atol=1e-5) # fsaverage is in MNI
assert kind == 'MNI Transform File'
tempdir = _TempDir()
fname_out = op.join(tempdir, 'out.xfm')
_write_fs_xfm(fname_out, xfm, kind)
xfm_read, kind_read = _read_fs_xfm(fname_out)
assert kind_read == kind
assert_allclose(xfm, xfm_read, rtol=1e-5, atol=1e-5)
# Some wacky one
xfm[:3] = np.random.RandomState(0).randn(3, 4)
_write_fs_xfm(fname_out, xfm, 'foo')
xfm_read, kind_read = _read_fs_xfm(fname_out)
assert kind_read == 'foo'
assert_allclose(xfm, xfm_read, rtol=1e-5, atol=1e-5)
# degenerate conditions
with open(fname_out, 'w') as fid:
fid.write('foo')
with pytest.raises(ValueError, match='Failed to find'):
_read_fs_xfm(fname_out)
_write_fs_xfm(fname_out, xfm[:2], 'foo')
with pytest.raises(ValueError, match='Could not find'):
_read_fs_xfm(fname_out)
@pytest.fixture()
def quats():
"""Make some unit quats."""
quats = np.random.RandomState(0).randn(5, 3)
quats[:, 0] = 0 # identity
quats /= 2 * np.linalg.norm(quats, axis=1, keepdims=True) # some real part
return quats
def _check_fit_matched_points(
p, x, weights, do_scale, angtol=1e-5, dtol=1e-5, stol=1e-7):
__tracebackhide__ = True
mne.coreg._ALLOW_ANALITICAL = False
try:
params = mne.coreg.fit_matched_points(
p, x, weights=weights, scale=do_scale, out='params')
finally:
mne.coreg._ALLOW_ANALITICAL = True
quat_an, scale_an = _fit_matched_points(p, x, weights, scale=do_scale)
assert len(params) == 6 + int(do_scale)
q_co = _euler_to_quat(params[:3])
translate_co = params[3:6]
angle = np.rad2deg(_angle_between_quats(quat_an[:3], q_co))
dist = np.linalg.norm(quat_an[3:] - translate_co)
assert 0 <= angle < angtol, 'angle'
assert 0 <= dist < dtol, 'dist'
if do_scale:
scale_co = params[6]
assert_allclose(scale_an, scale_co, rtol=stol, err_msg='scale')
# errs
trans = _quat_to_affine(quat_an)
trans[:3, :3] *= scale_an
weights = np.ones(1) if weights is None else weights
err_an = np.linalg.norm(
weights[:, np.newaxis] * apply_trans(trans, p) - x)
trans = mne.coreg._trans_from_params((True, True, do_scale), params)
err_co = np.linalg.norm(
weights[:, np.newaxis] * apply_trans(trans, p) - x)
if err_an > 1e-14:
assert err_an < err_co * 1.5
return quat_an, scale_an
@pytest.mark.parametrize('scaling', [0.25, 1])
@pytest.mark.parametrize('do_scale', (True, False))
def test_fit_matched_points(quats, scaling, do_scale):
"""Test analytical least-squares matched point fitting."""
if scaling != 1 and not do_scale:
return # no need to test this, it will not be good
rng = np.random.RandomState(0)
fro = rng.randn(10, 3)
translation = rng.randn(3)
for qi, quat in enumerate(quats):
to = scaling * np.dot(quat_to_rot(quat), fro.T).T + translation
for corrupted in (False, True):
# mess up a point
if corrupted:
to[0, 2] += 100
weights = np.ones(len(to))
weights[0] = 0
else:
weights = None
est, scale_est = _check_fit_matched_points(
fro, to, weights=weights, do_scale=do_scale)
assert_allclose(scale_est, scaling, rtol=1e-5)
assert_allclose(est[:3], quat, atol=1e-14)
assert_allclose(est[3:], translation, atol=1e-14)
# if we don't adjust for the corruption above, it should get worse
angle = dist = None
for weighted in (False, True):
if not weighted:
weights = None
dist_bounds = (5, 20)
if scaling == 1:
angle_bounds = (5, 95)
angtol, dtol, stol = 1, 15, 3
else:
angle_bounds = (5, 105)
angtol, dtol, stol = 20, 15, 3
else:
weights = np.ones(len(to))
weights[0] = 10 # weighted=True here means "make it worse"
angle_bounds = (angle, 180) # unweighted values as new min
dist_bounds = (dist, 100)
if scaling == 1:
# XXX this angtol is not great but there is a hard to
# identify linalg/angle calculation bug on Travis...
angtol, dtol, stol = 180, 70, 3
else:
angtol, dtol, stol = 50, 70, 3
est, scale_est = _check_fit_matched_points(
fro, to, weights=weights, do_scale=do_scale,
angtol=angtol, dtol=dtol, stol=stol)
assert not np.allclose(est[:3], quat, atol=1e-5)
assert not np.allclose(est[3:], translation, atol=1e-5)
angle = np.rad2deg(_angle_between_quats(est[:3], quat))
assert_array_less(angle_bounds[0], angle)
assert_array_less(angle, angle_bounds[1])
dist = np.linalg.norm(est[3:] - translation)
assert_array_less(dist_bounds[0], dist)
assert_array_less(dist, dist_bounds[1])
def test_euler(quats):
"""Test euler transformations."""
euler = _quat_to_euler(quats)
quats_2 = _euler_to_quat(euler)
assert_allclose(quats, quats_2, atol=1e-14)
quat_rot = quat_to_rot(quats)
euler_rot = np.array([rotation(*e)[:3, :3] for e in euler])
assert_allclose(quat_rot, euler_rot, atol=1e-14)
run_tests_if_main()
|
from contextlib import contextmanager
from json import dumps
from types import SimpleNamespace
from os import getpid
from httpobs.conf import (API_CACHED_RESULT_TIME,
DATABASE_CA_CERT,
DATABASE_DB,
DATABASE_HOST,
DATABASE_PASSWORD,
DATABASE_PORT,
DATABASE_SSL_MODE,
DATABASE_USER,
SCANNER_ABORT_SCAN_TIME)
from httpobs.scanner import (ALGORITHM_VERSION,
STATE_ABORTED,
STATE_FAILED,
STATE_FINISHED,
STATE_PENDING,
STATE_STARTING)
from httpobs.scanner.analyzer import NUM_TESTS
from httpobs.scanner.grader import get_grade_and_likelihood_for_score, MINIMUM_SCORE_FOR_EXTRA_CREDIT
import psycopg2
import psycopg2.extras
import psycopg2.pool
import sys
class SimpleDatabaseConnection:
def __init__(self):
self._initialized_pid = getpid()
self._connected = True
self._connect()
def _connect(self):
try:
self._conn = psycopg2.connect(database=DATABASE_DB,
host=DATABASE_HOST,
password=DATABASE_PASSWORD,
port=DATABASE_PORT,
sslmode=DATABASE_SSL_MODE,
sslrootcert=DATABASE_CA_CERT,
user=DATABASE_USER)
if not self._connected:
print('INFO: Connected to PostgreSQL', file=sys.stderr)
self._connected = True
except Exception as e:
print(e, file=sys.stderr)
self._conn = SimpleNamespace(closed=1)
if self._connected:
print('WARNING: Disconnected from PostgreSQL', file=sys.stderr)
self._connected = False
@property
def conn(self):
# TLS connections cannot be shared across workers; you'll get a decryption failed or bad mac error
# What we will do is detect if we're running in a different PID and reconnect if so
# TODO: use celery's worker init stuff instead?
if self._initialized_pid != getpid():
self.__init__()
# If the connection is closed, try to reconnect and raise an IOError if it's unsuccessful
if self._conn.closed:
self._connect()
if self._conn.closed:
raise IOError
return self._conn
# Create an initial database connection on startup
db = SimpleDatabaseConnection()
@contextmanager
def get_cursor():
try:
yield db.conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
try:
db.conn.commit()
except:
db.conn.rollback()
except:
raise IOError
# Print out a warning on startup if we can't connect to PostgreSQL
try:
with get_cursor() as _: # noqa
pass
except IOError:
print('WARNING: Unable to connect to PostgreSQL.', file=sys.stderr)
def insert_scan(site_id: int, hidden: bool = False) -> dict:
with get_cursor() as cur:
cur.execute("""INSERT INTO scans (site_id, state, start_time, algorithm_version, tests_quantity, hidden)
VALUES (%s, %s, NOW(), %s, %s, %s)
RETURNING *""",
(site_id, STATE_PENDING, ALGORITHM_VERSION, NUM_TESTS, hidden))
return dict(cur.fetchone())
def insert_scan_grade(scan_id, scan_grade, scan_score) -> dict:
with get_cursor() as cur:
cur.execute("""UPDATE scans
SET (grade, score) =
(%s, %s)
WHERE id = %s
RETURNING *""",
(scan_grade, scan_score, scan_id))
return dict(cur.fetchone())
# TODO: Separate out some of this logic so it doesn't need to be duplicated in local.scan()
def insert_test_results(site_id: int,
scan_id: int,
tests: list,
response_headers: dict,
status_code: int = None) -> dict:
with get_cursor() as cur:
tests_failed = tests_passed = 0
score_with_extra_credit = uncurved_score = 100
for test in tests:
name = test.pop('name')
expectation = test.pop('expectation')
passed = test.pop('pass')
result = test.pop('result')
score_modifier = test.pop('score_modifier')
# Keep track of how many tests passed or failed
if passed:
tests_passed += 1
else:
tests_failed += 1
# And keep track of the score
score_with_extra_credit += score_modifier
if score_modifier < 0:
uncurved_score += score_modifier
# Insert test result to the database
cur.execute("""INSERT INTO tests (site_id, scan_id, name, expectation, result, pass, output, score_modifier)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s)""",
(site_id, scan_id, name, expectation, result, passed, dumps(test), score_modifier))
# Only record the full score if the uncurved score already receives an A
score = score_with_extra_credit if uncurved_score >= MINIMUM_SCORE_FOR_EXTRA_CREDIT else uncurved_score
# Now we need to update the scans table
score, grade, likelihood_indicator = get_grade_and_likelihood_for_score(score)
# Update the scans table
cur.execute("""UPDATE scans
SET (end_time, tests_failed, tests_passed, grade, score, likelihood_indicator,
state, response_headers, status_code) =
(NOW(), %s, %s, %s, %s, %s, %s, %s, %s)
WHERE id = %s
RETURNING *""",
(tests_failed, tests_passed, grade, score, likelihood_indicator, STATE_FINISHED,
dumps(response_headers), status_code, scan_id))
row = dict(cur.fetchone())
return row
def periodic_maintenance() -> int:
"""
Update all scans that are stuck. The hard time limit for celery is 1129, so if something isn't aborted, finished,
or failed, we should just mark it as aborted.
:return: the number of scans that were closed out
"""
with get_cursor() as cur:
# Mark all scans that have been sitting unfinished for at least SCANNER_ABORT_SCAN_TIME as ABORTED
cur.execute("""UPDATE scans
SET (state, end_time) = (%s, NOW())
WHERE state != %s
AND state != %s
AND state != %s
AND start_time < NOW() - INTERVAL '%s seconds';""",
(STATE_ABORTED, STATE_ABORTED, STATE_FAILED, STATE_FINISHED, SCANNER_ABORT_SCAN_TIME))
return cur.rowcount
def refresh_materialized_views() -> None:
"""
Refresh every view in the database as used for grade statistics
:return: None
"""
with get_cursor() as cur:
# Update the various materialized views
cur.execute("REFRESH MATERIALIZED VIEW CONCURRENTLY latest_scans;")
cur.execute("REFRESH MATERIALIZED VIEW CONCURRENTLY earliest_scans;")
cur.execute("REFRESH MATERIALIZED VIEW CONCURRENTLY grade_distribution;")
cur.execute("REFRESH MATERIALIZED VIEW CONCURRENTLY grade_distribution_all_scans;")
cur.execute("REFRESH MATERIALIZED VIEW CONCURRENTLY scan_score_difference_distribution;")
cur.execute("REFRESH MATERIALIZED VIEW CONCURRENTLY scan_score_difference_distribution_summation;")
return None
def select_star_from(table: str) -> dict:
# Select all the rows in a given table. Note that this is specifically not parameterized.
with get_cursor() as cur:
cur.execute('SELECT * FROM {table}'.format(table=table))
return dict(cur.fetchall())
def select_scan_host_history(site_id: int) -> list:
# Get all of the site's historic scans
with get_cursor() as cur:
cur.execute("""SELECT id, grade, score, end_time FROM scans
WHERE site_id = %s
AND state = %s
ORDER BY end_time ASC;""",
(site_id, STATE_FINISHED))
if cur.rowcount > 0:
return([
{
'scan_id': row['id'],
'grade': row['grade'],
'score': row['score'],
'end_time': row['end_time'],
'end_time_unix_timestamp': int(row['end_time'].timestamp())
} for row in cur.fetchall()])
else:
return []
def select_scan_scanner_statistics(verbose: bool = False) -> dict:
# Get all the scanner statistics while minimizing the number of cursors needed
with get_cursor() as cur:
# Get the grade distribution across all scans (periodically refreshed)
cur.execute('SELECT * FROM grade_distribution;')
grade_distribution = dict(cur.fetchall())
# Get the grade distribution across all scans (periodically refreshed)
cur.execute('SELECT * FROM grade_distribution_all_scans;')
grade_distribution_all_scans = dict(cur.fetchall())
# And the summation of grade differences
cur.execute('SELECT * FROM scan_score_difference_distribution_summation;')
scan_score_difference_distribution_summation = dict(cur.fetchall())
# And the total number of scans
cur.execute("""SELECT id, start_time FROM scans ORDER BY id DESC LIMIT 1;""")
most_recent_scan = list(cur.fetchall())
# Stats we only need if verbose is true, as these take a while to collect
if verbose:
# Get the scanner stats
cur.execute('SELECT state, COUNT(*) as quantity FROM scans GROUP BY state;')
states = dict(cur.fetchall())
# Get the recent scan count
cur.execute("""SELECT DATE_TRUNC('hour', end_time) AS hour, COUNT(*) as num_scans
FROM scans
WHERE (end_time < DATE_TRUNC('hour', NOW()))
AND (end_time >= DATE_TRUNC('hour', NOW()) - INTERVAL '24 hours')
GROUP BY hour
ORDER BY hour DESC;""",
(STATE_FINISHED,))
recent_scans = dict(cur.fetchall()).items()
else:
recent_scans = {}
states = {}
return {
'grade_distribution': grade_distribution,
'grade_distribution_all_scans': grade_distribution_all_scans,
'most_recent_scan_datetime': most_recent_scan[0][1],
'recent_scans': recent_scans,
'scan_count': most_recent_scan[0][0],
'scan_score_difference_distribution_summation': scan_score_difference_distribution_summation,
'states': states,
}
def select_scan_recent_finished_scans(num_scans=10, min_score=0, max_score=100) -> dict:
# Used for /api/v1/getRecentScans
# Fix from: https://gist.github.com/april/61efa9ff197828bf5ab13e5a00be9138
with get_cursor() as cur:
cur.execute("""SELECT sites.domain, s2.grade
FROM
(SELECT DISTINCT ON (s1.site_id) s1.site_id, s1.grade, s1.end_time
FROM
(SELECT site_id, grade, end_time
FROM scans
WHERE state = %s
AND NOT hidden
AND score >= %s
AND score <= %s
ORDER BY end_time
DESC LIMIT %s) s1
ORDER BY s1.site_id, s1.end_time DESC) s2
INNER JOIN sites ON (sites.id = s2.site_id)
ORDER BY s2.end_time DESC LIMIT %s;""",
(STATE_FINISHED, min_score, max_score, num_scans * 2, num_scans))
return dict(cur.fetchall())
def select_scan_recent_scan(site_id: int, recent_in_seconds=API_CACHED_RESULT_TIME) -> dict:
with get_cursor() as cur:
cur.execute("""SELECT * FROM scans
WHERE site_id = %s
AND start_time >= NOW() - INTERVAL '%s seconds'
ORDER BY start_time DESC
LIMIT 1""",
(site_id, recent_in_seconds))
if cur.rowcount > 0:
return dict(cur.fetchone())
return {}
def select_site_headers(hostname: str) -> dict:
# Return the site's headers
with get_cursor() as cur:
cur.execute("""SELECT public_headers, private_headers, cookies FROM sites
WHERE domain = %s
ORDER BY creation_time DESC
LIMIT 1""",
(hostname,))
# If it has headers, merge the public and private headers together
if cur.rowcount > 0:
row = cur.fetchone()
headers = {} if row.get('public_headers') is None else row.get('public_headers')
private_headers = {} if row.get('private_headers') is None else row.get('private_headers')
headers.update(private_headers)
return {
'cookies': {} if row.get('cookies') is None else row.get('cookies'),
'headers': headers
}
else:
return {}
def select_site_id(hostname: str) -> int:
# See if the site exists already
with get_cursor() as cur:
cur.execute("""SELECT id FROM sites
WHERE domain = %s
ORDER BY creation_time DESC
LIMIT 1""",
(hostname,))
if cur.rowcount > 0:
return cur.fetchone()['id']
# If not, let's create the site
with get_cursor() as cur:
cur.execute("""INSERT INTO sites (domain, creation_time)
VALUES (%s, NOW())
RETURNING id""", (hostname,))
return cur.fetchone()['id']
def select_test_results(scan_id: int) -> dict:
tests = {}
with get_cursor() as cur:
cur.execute("SELECT * FROM tests WHERE scan_id = %s", (scan_id,))
# Grab every test and stuff it into the tests dictionary
if cur.rowcount > 1:
for test in cur:
tests[test['name']] = dict(test)
return tests
def update_scan_state(scan_id, state: str, error=None) -> dict:
if error:
with get_cursor() as cur:
cur.execute("""UPDATE scans
SET (state, end_time, error) = (%s, NOW(), %s)
WHERE id = %s
RETURNING *""",
(state, error, scan_id))
row = dict(cur.fetchone())
else:
with get_cursor() as cur:
cur.execute("""UPDATE scans
SET state = %s
WHERE id = %s
RETURNING *""",
(state, scan_id))
row = dict(cur.fetchone())
return row
def update_scans_dequeue_scans(num_to_dequeue: int = 0) -> dict:
with get_cursor() as cur:
cur.execute("""UPDATE scans
SET state = %s
FROM (
SELECT sites.domain, scans.site_id, scans.id AS scan_id, scans.state
FROM scans
INNER JOIN sites ON scans.site_id = sites.id
WHERE state = %s
LIMIT %s
FOR UPDATE) sub
WHERE scans.id = sub.scan_id
RETURNING sub.domain, sub.site_id, sub.scan_id""",
(STATE_STARTING, STATE_PENDING, num_to_dequeue))
return cur.fetchall()
|
from crispy_forms.helper import FormHelper
from django import forms
from weblate.lang.models import Language, Plural
class LanguageForm(forms.ModelForm):
class Meta:
model = Language
exclude = []
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_tag = False
@staticmethod
def get_field_doc(field):
return ("admin/languages", f"language-{field.name}")
class PluralForm(forms.ModelForm):
class Meta:
model = Plural
fields = ["number", "formula"]
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_tag = False
@staticmethod
def get_field_doc(field):
return ("admin/languages", f"plural-{field.name}")
|
from homeassistant.components.cover import (
ATTR_CURRENT_POSITION,
ATTR_CURRENT_TILT_POSITION,
ATTR_POSITION,
ATTR_TILT_POSITION,
)
from homeassistant.const import (
SERVICE_CLOSE_COVER,
SERVICE_CLOSE_COVER_TILT,
SERVICE_OPEN_COVER,
SERVICE_OPEN_COVER_TILT,
SERVICE_SET_COVER_POSITION,
SERVICE_SET_COVER_TILT_POSITION,
STATE_CLOSED,
STATE_OPEN,
)
from homeassistant.core import State
from tests.common import async_mock_service
async def test_reproducing_states(hass, caplog):
"""Test reproducing Cover states."""
hass.states.async_set("cover.entity_close", STATE_CLOSED, {})
hass.states.async_set(
"cover.entity_close_attr",
STATE_CLOSED,
{ATTR_CURRENT_POSITION: 0, ATTR_CURRENT_TILT_POSITION: 0},
)
hass.states.async_set(
"cover.entity_close_tilt", STATE_CLOSED, {ATTR_CURRENT_TILT_POSITION: 50}
)
hass.states.async_set("cover.entity_open", STATE_OPEN, {})
hass.states.async_set(
"cover.entity_slightly_open", STATE_OPEN, {ATTR_CURRENT_POSITION: 50}
)
hass.states.async_set(
"cover.entity_open_attr",
STATE_OPEN,
{ATTR_CURRENT_POSITION: 100, ATTR_CURRENT_TILT_POSITION: 0},
)
hass.states.async_set(
"cover.entity_open_tilt",
STATE_OPEN,
{ATTR_CURRENT_POSITION: 50, ATTR_CURRENT_TILT_POSITION: 50},
)
hass.states.async_set(
"cover.entity_entirely_open",
STATE_OPEN,
{ATTR_CURRENT_POSITION: 100, ATTR_CURRENT_TILT_POSITION: 100},
)
close_calls = async_mock_service(hass, "cover", SERVICE_CLOSE_COVER)
open_calls = async_mock_service(hass, "cover", SERVICE_OPEN_COVER)
close_tilt_calls = async_mock_service(hass, "cover", SERVICE_CLOSE_COVER_TILT)
open_tilt_calls = async_mock_service(hass, "cover", SERVICE_OPEN_COVER_TILT)
position_calls = async_mock_service(hass, "cover", SERVICE_SET_COVER_POSITION)
position_tilt_calls = async_mock_service(
hass, "cover", SERVICE_SET_COVER_TILT_POSITION
)
# These calls should do nothing as entities already in desired state
await hass.helpers.state.async_reproduce_state(
[
State("cover.entity_close", STATE_CLOSED),
State(
"cover.entity_close_attr",
STATE_CLOSED,
{ATTR_CURRENT_POSITION: 0, ATTR_CURRENT_TILT_POSITION: 0},
),
State(
"cover.entity_close_tilt",
STATE_CLOSED,
{ATTR_CURRENT_TILT_POSITION: 50},
),
State("cover.entity_open", STATE_OPEN),
State(
"cover.entity_slightly_open", STATE_OPEN, {ATTR_CURRENT_POSITION: 50}
),
State(
"cover.entity_open_attr",
STATE_OPEN,
{ATTR_CURRENT_POSITION: 100, ATTR_CURRENT_TILT_POSITION: 0},
),
State(
"cover.entity_open_tilt",
STATE_OPEN,
{ATTR_CURRENT_POSITION: 50, ATTR_CURRENT_TILT_POSITION: 50},
),
State(
"cover.entity_entirely_open",
STATE_OPEN,
{ATTR_CURRENT_POSITION: 100, ATTR_CURRENT_TILT_POSITION: 100},
),
]
)
assert len(close_calls) == 0
assert len(open_calls) == 0
assert len(close_tilt_calls) == 0
assert len(open_tilt_calls) == 0
assert len(position_calls) == 0
assert len(position_tilt_calls) == 0
# Test invalid state is handled
await hass.helpers.state.async_reproduce_state(
[State("cover.entity_close", "not_supported")]
)
assert "not_supported" in caplog.text
assert len(close_calls) == 0
assert len(open_calls) == 0
assert len(close_tilt_calls) == 0
assert len(open_tilt_calls) == 0
assert len(position_calls) == 0
assert len(position_tilt_calls) == 0
# Make sure correct services are called
await hass.helpers.state.async_reproduce_state(
[
State("cover.entity_close", STATE_OPEN),
State(
"cover.entity_close_attr",
STATE_OPEN,
{ATTR_CURRENT_POSITION: 50, ATTR_CURRENT_TILT_POSITION: 50},
),
State(
"cover.entity_close_tilt",
STATE_CLOSED,
{ATTR_CURRENT_TILT_POSITION: 100},
),
State("cover.entity_open", STATE_CLOSED),
State("cover.entity_slightly_open", STATE_OPEN, {}),
State("cover.entity_open_attr", STATE_CLOSED, {}),
State(
"cover.entity_open_tilt", STATE_OPEN, {ATTR_CURRENT_TILT_POSITION: 0}
),
State(
"cover.entity_entirely_open",
STATE_CLOSED,
{ATTR_CURRENT_POSITION: 0, ATTR_CURRENT_TILT_POSITION: 0},
),
# Should not raise
State("cover.non_existing", "on"),
],
)
valid_close_calls = [
{"entity_id": "cover.entity_open"},
{"entity_id": "cover.entity_open_attr"},
{"entity_id": "cover.entity_entirely_open"},
]
assert len(close_calls) == 3
for call in close_calls:
assert call.domain == "cover"
assert call.data in valid_close_calls
valid_close_calls.remove(call.data)
valid_open_calls = [
{"entity_id": "cover.entity_close"},
{"entity_id": "cover.entity_slightly_open"},
{"entity_id": "cover.entity_open_tilt"},
]
assert len(open_calls) == 3
for call in open_calls:
assert call.domain == "cover"
assert call.data in valid_open_calls
valid_open_calls.remove(call.data)
valid_close_tilt_calls = [
{"entity_id": "cover.entity_open_tilt"},
{"entity_id": "cover.entity_entirely_open"},
]
assert len(close_tilt_calls) == 2
for call in close_tilt_calls:
assert call.domain == "cover"
assert call.data in valid_close_tilt_calls
valid_close_tilt_calls.remove(call.data)
assert len(open_tilt_calls) == 1
assert open_tilt_calls[0].domain == "cover"
assert open_tilt_calls[0].data == {"entity_id": "cover.entity_close_tilt"}
assert len(position_calls) == 1
assert position_calls[0].domain == "cover"
assert position_calls[0].data == {
"entity_id": "cover.entity_close_attr",
ATTR_POSITION: 50,
}
assert len(position_tilt_calls) == 1
assert position_tilt_calls[0].domain == "cover"
assert position_tilt_calls[0].data == {
"entity_id": "cover.entity_close_attr",
ATTR_TILT_POSITION: 50,
}
|
from datetime import timedelta
import logging
from typing import Optional
import async_timeout
from asyncpysupla import SuplaAPI
import voluptuous as vol
from homeassistant.const import CONF_ACCESS_TOKEN
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.discovery import async_load_platform
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
)
_LOGGER = logging.getLogger(__name__)
DOMAIN = "supla"
CONF_SERVER = "server"
CONF_SERVERS = "servers"
SCAN_INTERVAL = timedelta(seconds=10)
SUPLA_FUNCTION_HA_CMP_MAP = {
"CONTROLLINGTHEROLLERSHUTTER": "cover",
"CONTROLLINGTHEGATE": "cover",
"LIGHTSWITCH": "switch",
}
SUPLA_FUNCTION_NONE = "NONE"
SUPLA_SERVERS = "supla_servers"
SUPLA_COORDINATORS = "supla_coordinators"
SERVER_CONFIG = vol.Schema(
{
vol.Required(CONF_SERVER): cv.string,
vol.Required(CONF_ACCESS_TOKEN): cv.string,
}
)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{vol.Required(CONF_SERVERS): vol.All(cv.ensure_list, [SERVER_CONFIG])}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, base_config):
"""Set up the Supla component."""
server_confs = base_config[DOMAIN][CONF_SERVERS]
hass.data[DOMAIN] = {SUPLA_SERVERS: {}, SUPLA_COORDINATORS: {}}
session = async_get_clientsession(hass)
for server_conf in server_confs:
server_address = server_conf[CONF_SERVER]
server = SuplaAPI(server_address, server_conf[CONF_ACCESS_TOKEN], session)
# Test connection
try:
srv_info = await server.get_server_info()
if srv_info.get("authenticated"):
hass.data[DOMAIN][SUPLA_SERVERS][server_conf[CONF_SERVER]] = server
else:
_LOGGER.error(
"Server: %s not configured. API call returned: %s",
server_address,
srv_info,
)
return False
except OSError:
_LOGGER.exception(
"Server: %s not configured. Error on Supla API access: ", server_address
)
return False
await discover_devices(hass, base_config)
return True
async def discover_devices(hass, hass_config):
"""
Run periodically to discover new devices.
Currently it is only run at startup.
"""
component_configs = {}
for server_name, server in hass.data[DOMAIN][SUPLA_SERVERS].items():
async def _fetch_channels():
async with async_timeout.timeout(SCAN_INTERVAL.total_seconds()):
channels = {
channel["id"]: channel
for channel in await server.get_channels(
include=["iodevice", "state", "connected"]
)
}
return channels
coordinator = DataUpdateCoordinator(
hass,
_LOGGER,
name=f"{DOMAIN}-{server_name}",
update_method=_fetch_channels,
update_interval=SCAN_INTERVAL,
)
await coordinator.async_refresh()
hass.data[DOMAIN][SUPLA_COORDINATORS][server_name] = coordinator
for channel_id, channel in coordinator.data.items():
channel_function = channel["function"]["name"]
if channel_function == SUPLA_FUNCTION_NONE:
_LOGGER.debug(
"Ignored function: %s, channel id: %s",
channel_function,
channel["id"],
)
continue
component_name = SUPLA_FUNCTION_HA_CMP_MAP.get(channel_function)
if component_name is None:
_LOGGER.warning(
"Unsupported function: %s, channel id: %s",
channel_function,
channel["id"],
)
continue
channel["server_name"] = server_name
component_configs.setdefault(component_name, []).append(
{
"channel_id": channel_id,
"server_name": server_name,
"function_name": channel["function"]["name"],
}
)
# Load discovered devices
for component_name, config in component_configs.items():
await async_load_platform(hass, component_name, DOMAIN, config, hass_config)
class SuplaChannel(CoordinatorEntity):
"""Base class of a Supla Channel (an equivalent of HA's Entity)."""
def __init__(self, config, server, coordinator):
"""Init from config, hookup[ server and coordinator."""
super().__init__(coordinator)
self.server_name = config["server_name"]
self.channel_id = config["channel_id"]
self.server = server
@property
def channel_data(self):
"""Return channel data taken from coordinator."""
return self.coordinator.data.get(self.channel_id)
@property
def unique_id(self) -> str:
"""Return a unique ID."""
return "supla-{}-{}".format(
self.channel_data["iodevice"]["gUIDString"].lower(),
self.channel_data["channelNumber"],
)
@property
def name(self) -> Optional[str]:
"""Return the name of the device."""
return self.channel_data["caption"]
@property
def available(self) -> bool:
"""Return True if entity is available."""
if self.channel_data is None:
return False
state = self.channel_data.get("state")
if state is None:
return False
return state.get("connected")
async def async_action(self, action, **add_pars):
"""
Run server action.
Actions are currently hardcoded in components.
Supla's API enables autodiscovery
"""
_LOGGER.debug(
"Executing action %s on channel %d, params: %s",
action,
self.channel_data["id"],
add_pars,
)
await self.server.execute_action(self.channel_data["id"], action, **add_pars)
# Update state
await self.coordinator.async_request_refresh()
|
import sys
import os, os.path
from io import open
import glob, time
from lark import Lark
from lark.indenter import Indenter
# __path__ = os.path.dirname(__file__)
class PythonIndenter(Indenter):
NL_type = '_NEWLINE'
OPEN_PAREN_types = ['LPAR', 'LSQB', 'LBRACE']
CLOSE_PAREN_types = ['RPAR', 'RSQB', 'RBRACE']
INDENT_type = '_INDENT'
DEDENT_type = '_DEDENT'
tab_len = 8
kwargs = dict(rel_to=__file__, postlex=PythonIndenter(), start='file_input')
python_parser2 = Lark.open('python2.lark', parser='lalr', **kwargs)
python_parser3 = Lark.open('python3.lark',parser='lalr', **kwargs)
python_parser2_earley = Lark.open('python2.lark', parser='earley', lexer='standard', **kwargs)
try:
xrange
except NameError:
chosen_parser = python_parser3
else:
chosen_parser = python_parser2
def _read(fn, *args):
kwargs = {'encoding': 'iso-8859-1'}
with open(fn, *args, **kwargs) as f:
return f.read()
def _get_lib_path():
if os.name == 'nt':
if 'PyPy' in sys.version:
return os.path.join(sys.prefix, 'lib-python', sys.winver)
else:
return os.path.join(sys.prefix, 'Lib')
else:
return [x for x in sys.path if x.endswith('%s.%s' % sys.version_info[:2])][0]
def test_python_lib():
path = _get_lib_path()
start = time.time()
files = glob.glob(path+'/*.py')
for f in files:
print( f )
chosen_parser.parse(_read(os.path.join(path, f)) + '\n')
end = time.time()
print( "test_python_lib (%d files), time: %s secs"%(len(files), end-start) )
def test_earley_equals_lalr():
path = _get_lib_path()
files = glob.glob(path+'/*.py')
for f in files:
print( f )
tree1 = python_parser2.parse(_read(os.path.join(path, f)) + '\n')
tree2 = python_parser2_earley.parse(_read(os.path.join(path, f)) + '\n')
assert tree1 == tree2
if __name__ == '__main__':
test_python_lib()
# test_earley_equals_lalr()
# python_parser3.parse(_read(sys.argv[1]) + '\n')
|
from flexx.util.testing import run_tests_if_main, skipif, skip, raises
from flexx.event.both_tester import run_in_both
from flexx import event
from flexx.event._dict import isidentifier
loop = event.loop
class Foo(event.Component):
def __init__(self):
super().__init__()
self.r = []
print('init')
@event.reaction('!foo')
def on_foo(self, *events):
self.r.append(len(events))
print(len(events))
## Tests for both
@run_in_both()
def test_calllater():
"""
xx
called later
called later
xx
called with 3
called with 4
xx
called with 3 and 4
called with 5 and 6
"""
def x1():
print('called later')
def x2(i):
print('called with', i)
def x3(i, j):
print('called with', i, 'and', j)
loop.call_soon(x1)
loop.call_soon(x1)
print('xx')
loop.iter()
loop.call_soon(x2, 3)
loop.call_soon(x2, 4)
print('xx')
loop.iter()
loop.call_soon(x3, 3, 4)
loop.call_soon(x3, 5, 6)
print('xx')
loop.iter()
@run_in_both(Foo)
def test_iter():
"""
init
-
true
1
false
init
-
true
2
false
"""
foo = Foo()
foo.emit('foo', {})
print('-')
print(loop.has_pending())
loop.iter()
print(loop.has_pending())
foo = Foo()
foo.emit('foo', {})
foo.emit('foo', {})
print('-')
print(loop.has_pending())
loop.iter()
print(loop.has_pending())
@run_in_both()
def test_iter_fail():
"""
1
ok
1
? AttributeError
"""
# Failing func call
res = []
def fail():
print('1')
raise AttributeError('xx')
try:
fail()
print('bad')
except AttributeError:
print('ok')
# When handled by the loop, error is printed, but no fail
loop.call_soon(fail)
loop.iter()
@run_in_both(Foo)
def test_context():
"""
init
1
init
2
"""
foo = Foo()
with loop:
foo.emit('foo', {})
foo = Foo()
with loop:
foo.emit('foo', {})
foo.emit('foo', {})
assert not loop.can_mutate()
@run_in_both(Foo)
def test_loop_reset():
"""
init
-
1
"""
foo = Foo()
foo.emit('foo', {})
foo.emit('foo', {})
foo.emit('foo', {})
loop._process_calls() # the callater to stop capturing events
loop.reset()
loop.iter()
print('-')
foo.emit('foo', {})
foo.emit('foo', {})
loop._process_calls() # the callater to stop capturing events
loop.reset()
foo.emit('foo', {})
loop.iter()
@run_in_both(Foo)
def test_loop_cannot_call_iter():
"""
? Cannot call
"""
def callback():
try:
loop.iter()
except RuntimeError as err:
print(err)
loop.call_soon(callback)
loop.iter()
## Tests for only Python
def test_loop_asyncio():
import asyncio
aio_loop = asyncio.new_event_loop()
loop.integrate(aio_loop, reset=False)
res = []
def callback():
res.append(1)
loop.call_soon(callback)
aio_loop.stop()
aio_loop.run_forever()
assert len(res) == 1
# Now run wrong loop
aio_loop = asyncio.new_event_loop()
# loop.integrate(aio_loop, reset=False) -> dont do this (yet)
loop.call_soon(callback)
aio_loop.stop()
aio_loop.run_forever()
assert len(res) == 1
loop.integrate(aio_loop, reset=False) # but do it now
aio_loop.stop()
aio_loop.run_forever()
aio_loop.stop()
aio_loop.run_forever()
assert len(res) == 2
def xx_disabled_test_integrate():
res = []
def calllater(f):
res.append(f)
ori = event.loop._call_soon_func
foo = Foo()
event.loop.integrate(calllater)
foo.emit('foo', {})
foo.emit('foo', {})
assert len(res) == 1 and res[0].__name__ == 'iter'
with raises(ValueError):
event.loop.integrate('not a callable')
event.loop._call_soon_func = ori
run_tests_if_main()
|
import re
from django.utils.translation import gettext_lazy as _
from .format import BaseFormatCheck
ANGULARJS_INTERPOLATION_MATCH = re.compile(
r"""
{{ # start symbol
\s* # ignore whitespace
((.+?))
\s* # ignore whitespace
}} # end symbol
""",
re.VERBOSE,
)
WHITESPACE = re.compile(r"\s+")
class AngularJSInterpolationCheck(BaseFormatCheck):
"""Check for AngularJS interpolation string."""
check_id = "angularjs_format"
name = _("AngularJS interpolation string")
description = _("AngularJS interpolation strings do not match source")
regexp = ANGULARJS_INTERPOLATION_MATCH
def cleanup_string(self, text):
return WHITESPACE.sub("", text)
|
import numpy as np
from tensornetwork.block_sparse.index import Index
from tensornetwork.block_sparse.blocksparsetensor import (BlockSparseTensor,
ChargeArray,
tensordot)
from tensornetwork.block_sparse.utils import (intersect, flatten,
get_real_dtype, _randn, _random)
from tensornetwork.block_sparse.blocksparse_utils import (
_find_transposed_diagonal_sparse_blocks, _find_diagonal_sparse_blocks,
compute_num_nonzero, compute_sparse_lookup)
from typing import List, Union, Any, Tuple, Type, Optional, Text, Sequence
from tensornetwork.block_sparse.initialization import empty_like
def norm(tensor: BlockSparseTensor) -> float:
"""
The norm of the tensor.
"""
return np.linalg.norm(tensor.data)
def diag(tensor: ChargeArray) -> Any:
"""
Return a diagonal `BlockSparseTensor` from a `ChargeArray`, or
return the diagonal of a `BlockSparseTensor` as a `ChargeArray`.
For input of type `BlockSparseTensor`:
The full diagonal is obtained from finding the diagonal blocks of the
`BlockSparseTensor`, taking the diagonal elements of those and packing
the result into a ChargeArray. Note that the computed diagonal elements
are usually different from the diagonal elements obtained from
converting the `BlockSparseTensor` to dense storage and taking the diagonal.
Note that the flow of the resulting 1d `ChargeArray` object is `False`.
Args:
tensor: A `ChargeArray`.
Returns:
ChargeArray: A 1d `CharggeArray` containing the diagonal of `tensor`,
or a diagonal matrix of type `BlockSparseTensor` containing `tensor`
on its diagonal.
"""
if tensor.ndim > 2:
raise ValueError("`diag` currently only implemented for matrices, "
"found `ndim={}".format(tensor.ndim))
if not isinstance(tensor, BlockSparseTensor):
if tensor.ndim > 1:
raise ValueError(
"`diag` currently only implemented for `ChargeArray` with ndim=1, "
"found `ndim={}`".format(tensor.ndim))
flat_charges = tensor._charges + tensor._charges
flat_flows = list(tensor._flows) + list(np.logical_not(tensor._flows))
flat_order = list(tensor.flat_order) + list(
np.asarray(tensor.flat_order) + len(tensor._charges))
tr_partition = len(tensor._order[0])
blocks, charges, shapes = _find_transposed_diagonal_sparse_blocks(
flat_charges, flat_flows, tr_partition, flat_order)
data = np.zeros(
np.int64(np.sum(np.prod(shapes, axis=0))), dtype=tensor.dtype)
lookup, unique, labels = compute_sparse_lookup(tensor._charges,
tensor._flows, charges)
for n, block in enumerate(blocks):
label = labels[np.nonzero(unique == charges[n])[0][0]]
data[block] = np.ravel(
np.diag(tensor.data[np.nonzero(lookup == label)[0]]))
order = [
tensor._order[0],
list(np.asarray(tensor._order[0]) + len(tensor._charges))
]
new_charges = [tensor._charges[0].copy(), tensor._charges[0].copy()]
return BlockSparseTensor(
data,
charges=new_charges,
flows=list(tensor._flows) + list(np.logical_not(tensor._flows)),
order=order,
check_consistency=False)
flat_charges = tensor._charges
flat_flows = tensor._flows
flat_order = tensor.flat_order
tr_partition = len(tensor._order[0])
sparse_blocks, charges, block_shapes = _find_transposed_diagonal_sparse_blocks( #pylint: disable=line-too-long
flat_charges, flat_flows, tr_partition, flat_order)
shapes = np.min(block_shapes, axis=0)
if len(sparse_blocks) > 0:
data = np.concatenate([
np.diag(np.reshape(tensor.data[sparse_blocks[n]], block_shapes[:, n]))
for n in range(len(sparse_blocks))
])
charge_labels = np.concatenate([
np.full(shapes[n], fill_value=n, dtype=np.int16)
for n in range(len(sparse_blocks))
])
else:
data = np.empty(0, dtype=tensor.dtype)
charge_labels = np.empty(0, dtype=np.int16)
newcharges = [charges[charge_labels]]
flows = [False]
return ChargeArray(data, newcharges, flows)
def reshape(tensor: ChargeArray, shape: Sequence[Union[Index,
int]]) -> ChargeArray:
"""
Reshape `tensor` into `shape.
`ChargeArray.reshape` works the same as the dense
version, with the notable exception that the tensor can only be
reshaped into a form compatible with its elementary shape.
The elementary shape is the shape determined by ChargeArray._charges.
For example, while the following reshaping is possible for regular
dense numpy tensor,
```
A = np.random.rand(6,6,6)
np.reshape(A, (2,3,6,6))
```
the same code for ChargeArray
```
q1 = U1Charge(np.random.randint(0,10,6))
q2 = U1Charge(np.random.randint(0,10,6))
q3 = U1Charge(np.random.randint(0,10,6))
i1 = Index(charges=q1,flow=False)
i2 = Index(charges=q2,flow=True)
i3 = Index(charges=q3,flow=False)
A = ChargeArray.randn(indices=[i1,i2,i3])
print(A.shape) #prints (6,6,6)
A.reshape((2,3,6,6)) #raises ValueError
```
raises a `ValueError` since (2,3,6,6)
is incompatible with the elementary shape (6,6,6) of the tensor.
Args:
tensor: A symmetric tensor.
shape: The new shape. Can either be a list of `Index`
or a list of `int`.
Returns:
ChargeArray: A new tensor reshaped into `shape`
"""
return tensor.reshape(shape)
def conj(tensor: ChargeArray) -> ChargeArray:
"""
Return the complex conjugate of `tensor` in a new
`ChargeArray`.
Args:
tensor: A `ChargeArray` object.
Returns:
ChargeArray
"""
return tensor.conj()
def transpose(tensor: ChargeArray,
order: Sequence[int] = np.asarray([1, 0]),
shuffle: Optional[bool] = False) -> ChargeArray:
"""
Transpose the tensor into the new order `order`. If `shuffle=False`
no data-reshuffling is done.
Args:
order: The new order of indices.
shuffle: If `True`, reshuffle data.
Returns:
ChargeArray: The transposed tensor.
"""
return tensor.transpose(order, shuffle)
def svd(matrix: BlockSparseTensor,
full_matrices: Optional[bool] = True,
compute_uv: Optional[bool] = True,
hermitian: Optional[bool] = False) -> Any:
"""
Compute the singular value decomposition of `matrix`.
The matrix if factorized into `u * s * vh`, with
`u` and `vh` the left and right singular vectors of `matrix`,
and `s` its singular values.
Args:
matrix: A matrix (i.e. an order-2 tensor) of type `BlockSparseTensor`
full_matrices: If `True`, expand `u` and `v` to square matrices
If `False` return the "economic" svd, i.e. `u.shape[1]=s.shape[0]`
and `v.shape[0]=s.shape[1]`
compute_uv: If `True`, return `u` and `v`.
hermitian: If `True`, assume hermiticity of `matrix`.
Returns:
If `compute_uv` is `True`: Three BlockSparseTensors `U,S,V`.
If `compute_uv` is `False`: A BlockSparseTensors `S` containing the
singular values.
"""
if matrix.ndim != 2:
raise NotImplementedError("svd currently supports only tensors of order 2.")
flat_charges = matrix._charges
flat_flows = matrix._flows
flat_order = matrix.flat_order
tr_partition = len(matrix._order[0])
blocks, charges, shapes = _find_transposed_diagonal_sparse_blocks(
flat_charges, flat_flows, tr_partition, flat_order)
u_blocks = []
singvals = []
v_blocks = []
for n, block in enumerate(blocks):
out = np.linalg.svd(
np.reshape(matrix.data[block], shapes[:, n]), full_matrices, compute_uv,
hermitian)
if compute_uv:
u_blocks.append(out[0])
singvals.append(out[1])
v_blocks.append(out[2])
else:
singvals.append(out)
tmp_labels = [
np.full(len(singvals[n]), fill_value=n, dtype=np.int16)
for n in range(len(singvals))
]
if len(tmp_labels) > 0:
left_singval_charge_labels = np.concatenate(tmp_labels)
else:
left_singval_charge_labels = np.empty(0, dtype=np.int16)
left_singval_charge = charges[left_singval_charge_labels]
if len(singvals) > 0:
all_singvals = np.concatenate(singvals)
else:
all_singvals = np.empty(0, dtype=get_real_dtype(matrix.dtype))
S = ChargeArray(all_singvals, [left_singval_charge], [False])
if compute_uv:
#define the new charges on the two central bonds
tmp_left_labels = [
np.full(u_blocks[n].shape[1], fill_value=n, dtype=np.int16)
for n in range(len(u_blocks))
]
if len(tmp_left_labels) > 0:
left_charge_labels = np.concatenate(tmp_left_labels)
else:
left_charge_labels = np.empty(0, dtype=np.int16)
tmp_right_labels = [
np.full(v_blocks[n].shape[0], fill_value=n, dtype=np.int16)
for n in range(len(v_blocks))
]
if len(tmp_right_labels) > 0:
right_charge_labels = np.concatenate(tmp_right_labels)
else:
right_charge_labels = np.empty(0, dtype=np.int16)
new_left_charge = charges[left_charge_labels]
new_right_charge = charges[right_charge_labels]
charges_u = [new_left_charge
] + [matrix._charges[o] for o in matrix._order[0]]
order_u = [[0]] + [list(np.arange(1, len(matrix._order[0]) + 1))]
flows_u = [True] + [matrix._flows[o] for o in matrix._order[0]]
charges_v = [new_right_charge
] + [matrix._charges[o] for o in matrix._order[1]]
flows_v = [False] + [matrix._flows[o] for o in matrix._order[1]]
order_v = [[0]] + [list(np.arange(1, len(matrix._order[1]) + 1))]
# We fill in data into the transposed U
# note that transposing is essentially free
if len(u_blocks) > 0:
all_u_blocks = np.concatenate([np.ravel(u.T) for u in u_blocks])
all_v_blocks = np.concatenate([np.ravel(v) for v in v_blocks])
else:
all_u_blocks = np.empty(0, dtype=matrix.dtype)
all_v_blocks = np.empty(0, dtype=matrix.dtype)
return BlockSparseTensor(
all_u_blocks,
charges=charges_u,
flows=flows_u,
order=order_u,
check_consistency=False).transpose((1, 0)), S, BlockSparseTensor(
all_v_blocks,
charges=charges_v,
flows=flows_v,
order=order_v,
check_consistency=False)
return S
def qr(matrix: BlockSparseTensor, mode: Text = 'reduced') -> Any:
"""
Compute the qr decomposition of an `M` by `N` matrix `matrix`.
The matrix is factorized into `q*r`, with
`q` an orthogonal matrix and `r` an upper triangular matrix.
Args:
matrix: A matrix (i.e. a rank-2 tensor) of type `BlockSparseTensor`
mode : Can take values {'reduced', 'complete', 'r', 'raw'}.
If K = min(M, N), then
* 'reduced' : returns q, r with dimensions (M, K), (K, N) (default)
* 'complete' : returns q, r with dimensions (M, M), (M, N)
* 'r' : returns r only with dimensions (K, N)
Returns:
(BlockSparseTensor,BlockSparseTensor): If mode = `reduced` or `complete`
BlockSparseTensor: If mode = `r`.
"""
if matrix.ndim != 2:
raise NotImplementedError("qr currently supports only rank-2 tensors.")
if mode not in ('reduced', 'complete', 'raw', 'r'):
raise ValueError('unknown value {} for input `mode`'.format(mode))
if mode == 'raw':
raise NotImplementedError('mode `raw` currenntly not supported')
flat_charges = matrix._charges
flat_flows = matrix._flows
flat_order = matrix.flat_order
tr_partition = len(matrix._order[0])
blocks, charges, shapes = _find_transposed_diagonal_sparse_blocks(
flat_charges, flat_flows, tr_partition, flat_order)
q_blocks = []
r_blocks = []
for n, block in enumerate(blocks):
out = np.linalg.qr(np.reshape(matrix.data[block], shapes[:, n]), mode)
if mode in ('reduced', 'complete'):
q_blocks.append(out[0])
r_blocks.append(out[1])
else:
r_blocks.append(out)
tmp_r_charge_labels = [
np.full(r_blocks[n].shape[0], fill_value=n, dtype=np.int16)
for n in range(len(r_blocks))
]
if len(tmp_r_charge_labels) > 0:
left_r_charge_labels = np.concatenate(tmp_r_charge_labels)
else:
left_r_charge_labels = np.empty(0, dtype=np.int16)
left_r_charge = charges[left_r_charge_labels]
charges_r = [left_r_charge] + [matrix._charges[o] for o in matrix._order[1]]
flows_r = [False] + [matrix._flows[o] for o in matrix._order[1]]
order_r = [[0]] + [list(np.arange(1, len(matrix._order[1]) + 1))]
if len(r_blocks) > 0:
all_r_blocks = np.concatenate([np.ravel(r) for r in r_blocks])
else:
all_r_blocks = np.empty(0, dtype=matrix.dtype)
R = BlockSparseTensor(
all_r_blocks,
charges=charges_r,
flows=flows_r,
order=order_r,
check_consistency=False)
if mode in ('reduced', 'complete'):
tmp_right_q_charge_labels = [
np.full(q_blocks[n].shape[1], fill_value=n, dtype=np.int16)
for n in range(len(q_blocks))
]
if len(tmp_right_q_charge_labels) > 0:
right_q_charge_labels = np.concatenate(tmp_right_q_charge_labels)
else:
right_q_charge_labels = np.empty(0, dtype=np.int16)
right_q_charge = charges[right_q_charge_labels]
charges_q = [
right_q_charge,
] + [matrix._charges[o] for o in matrix._order[0]]
order_q = [[0]] + [list(np.arange(1, len(matrix._order[0]) + 1))]
flows_q = [True] + [matrix._flows[o] for o in matrix._order[0]]
if len(q_blocks) > 0:
all_q_blocks = np.concatenate([np.ravel(q.T) for q in q_blocks])
else:
all_q_blocks = np.empty(0, dtype=matrix.dtype)
return BlockSparseTensor(
all_q_blocks,
charges=charges_q,
flows=flows_q,
order=order_q,
check_consistency=False).transpose((1, 0)), R
return R
def eigh(matrix: BlockSparseTensor,
UPLO: Optional[Text] = 'L') -> Tuple[ChargeArray, BlockSparseTensor]:
"""
Compute the eigen decomposition of a hermitian `M` by `M` matrix `matrix`.
Args:
matrix: A matrix (i.e. a rank-2 tensor) of type `BlockSparseTensor`
Returns:
(ChargeArray,BlockSparseTensor): The eigenvalues and eigenvectors
"""
if matrix.ndim != 2:
raise NotImplementedError("eigh currently supports only rank-2 tensors.")
flat_charges = matrix._charges
flat_flows = matrix._flows
flat_order = matrix.flat_order
tr_partition = len(matrix._order[0])
blocks, charges, shapes = _find_transposed_diagonal_sparse_blocks(
flat_charges, flat_flows, tr_partition, flat_order)
eigvals = []
v_blocks = []
for n, block in enumerate(blocks):
e, v = np.linalg.eigh(np.reshape(matrix.data[block], shapes[:, n]), UPLO)
eigvals.append(e)
v_blocks.append(v)
tmp_labels = [
np.full(len(eigvals[n]), fill_value=n, dtype=np.int16)
for n in range(len(eigvals))
]
if len(tmp_labels) > 0:
eigvalscharge_labels = np.concatenate(tmp_labels)
else:
eigvalscharge_labels = np.empty(0, dtype=np.int16)
eigvalscharge = charges[eigvalscharge_labels]
if len(eigvals) > 0:
all_eigvals = np.concatenate(eigvals)
else:
all_eigvals = np.empty(0, dtype=get_real_dtype(matrix.dtype))
E = ChargeArray(all_eigvals, [eigvalscharge], [False])
charges_v = [eigvalscharge] + [matrix._charges[o] for o in matrix._order[0]]
order_v = [[0]] + [list(np.arange(1, len(matrix._order[0]) + 1))]
flows_v = [True] + [matrix._flows[o] for o in matrix._order[0]]
if len(v_blocks) > 0:
all_v_blocks = np.concatenate([np.ravel(v.T) for v in v_blocks])
else:
all_v_blocks = np.empty(0, dtype=matrix.dtype)
V = BlockSparseTensor(
all_v_blocks,
charges=charges_v,
flows=flows_v,
order=order_v,
check_consistency=False).transpose()
return E, V #pytype: disable=bad-return-type
def eig(matrix: BlockSparseTensor) -> Tuple[ChargeArray, BlockSparseTensor]:
"""
Compute the eigen decomposition of an `M` by `M` matrix `matrix`.
Args:
matrix: A matrix (i.e. a rank-2 tensor) of type `BlockSparseTensor`
Returns:
(ChargeArray,BlockSparseTensor): The eigenvalues and eigenvectors
"""
if matrix.ndim != 2:
raise NotImplementedError("eig currently supports only rank-2 tensors.")
flat_charges = matrix._charges
flat_flows = matrix._flows
flat_order = matrix.flat_order
tr_partition = len(matrix._order[0])
blocks, charges, shapes = _find_transposed_diagonal_sparse_blocks(
flat_charges, flat_flows, tr_partition, flat_order)
eigvals = []
v_blocks = []
for n, block in enumerate(blocks):
e, v = np.linalg.eig(np.reshape(matrix.data[block], shapes[:, n]))
eigvals.append(e)
v_blocks.append(v)
tmp_labels = [
np.full(len(eigvals[n]), fill_value=n, dtype=np.int16)
for n in range(len(eigvals))
]
if len(tmp_labels) > 0:
eigvalscharge_labels = np.concatenate(tmp_labels)
else:
eigvalscharge_labels = np.empty(0, dtype=np.int16)
eigvalscharge = charges[eigvalscharge_labels]
if len(eigvals) > 0:
all_eigvals = np.concatenate(eigvals)
else:
all_eigvals = np.empty(0, dtype=get_real_dtype(matrix.dtype))
E = ChargeArray(all_eigvals, [eigvalscharge], [False])
charges_v = [eigvalscharge] + [matrix._charges[o] for o in matrix._order[0]]
order_v = [[0]] + [list(np.arange(1, len(matrix._order[0]) + 1))]
flows_v = [True] + [matrix._flows[o] for o in matrix._order[0]]
if len(v_blocks) > 0:
all_v_blocks = np.concatenate([np.ravel(v.T) for v in v_blocks])
else:
all_v_blocks = np.empty(0, dtype=matrix.dtype)
V = BlockSparseTensor(
all_v_blocks,
charges=charges_v,
flows=flows_v,
order=order_v,
check_consistency=False).transpose()
return E, V #pytype: disable=bad-return-type
def inv(matrix: BlockSparseTensor) -> BlockSparseTensor:
"""
Compute the matrix inverse of `matrix`.
Returns:
BlockSparseTensor: The inverse of `matrix`.
"""
if matrix.ndim != 2:
raise ValueError("`inv` can only be taken for matrices, "
"found tensor.ndim={}".format(matrix.ndim))
flat_charges = matrix._charges
flat_flows = matrix._flows
flat_order = matrix.flat_order
tr_partition = len(matrix._order[0])
blocks, _, shapes = _find_transposed_diagonal_sparse_blocks(
flat_charges, flat_flows, tr_partition, flat_order)
data = np.empty(np.sum(np.prod(shapes, axis=0)), dtype=matrix.dtype)
for n, block in enumerate(blocks):
data[block] = np.ravel(
np.linalg.inv(np.reshape(matrix.data[block], shapes[:, n])).T)
#pylint: disable=line-too-long
return BlockSparseTensor(
data=data,
charges=matrix._charges,
flows=np.logical_not(matrix._flows),
order=matrix._order,
check_consistency=False).transpose((1, 0)) #pytype: disable=bad-return-type
def sqrt(
tensor: Union[BlockSparseTensor, ChargeArray]
) -> Union[ChargeArray, BlockSparseTensor]:
obj = tensor.__new__(type(tensor))
obj.__init__(
np.sqrt(tensor.data),
charges=tensor._charges,
flows=tensor._flows,
order=tensor._order,
check_consistency=False)
return obj
def eye(column_index: Index,
row_index: Optional[Index] = None,
dtype: Optional[Type[np.number]] = None) -> BlockSparseTensor:
"""
Return an identity matrix.
Args:
column_index: The column index of the matrix.
row_index: The row index of the matrix.
dtype: The dtype of the matrix.
Returns:
BlockSparseTensor
"""
if row_index is None:
row_index = column_index.copy().flip_flow()
if dtype is None:
dtype = np.float64
blocks, _, shapes = _find_diagonal_sparse_blocks(
column_index.flat_charges + row_index.flat_charges,
column_index.flat_flows + row_index.flat_flows,
len(column_index.flat_charges))
data = np.empty(np.int64(np.sum(np.prod(shapes, axis=0))), dtype=dtype)
for n, block in enumerate(blocks):
data[block] = np.ravel(np.eye(shapes[0, n], shapes[1, n], dtype=dtype))
order = [list(np.arange(0, len(column_index.flat_charges)))] + [
list(
np.arange(
len(column_index.flat_charges),
len(column_index.flat_charges) + len(row_index.flat_charges)))
]
return BlockSparseTensor(
data=data,
charges=column_index.flat_charges + row_index.flat_charges,
flows=column_index.flat_flows + row_index.flat_flows,
order=order,
check_consistency=False)
def trace(tensor: BlockSparseTensor,
axes: Optional[Sequence[int]] = None) -> BlockSparseTensor:
"""
Compute the trace of a matrix or tensor. If input has `ndim>2`, take
the trace over the last two dimensions.
Args:
tensor: A `BlockSparseTensor`.
axes: The axes over which the trace should be computed.
Defaults to the last two indices of the tensor.
Returns:
BlockSparseTensor: The result of taking the trace.
"""
if tensor.ndim > 1:
if axes is None:
axes = (tensor.ndim - 2, tensor.ndim - 1)
if len(axes) != 2:
raise ValueError(f"`len(axes)` has to be 2, found `axes = {axes}`")
if not np.array_equal(tensor.flows[axes[0]],
np.logical_not(tensor.flows[axes[1]])):
raise ValueError(
f"trace indices for axes {axes} have non-matching flows.")
sparse_shape = tensor.sparse_shape
if sparse_shape[axes[0]].copy().flip_flow() != sparse_shape[axes[1]]:
raise ValueError(f"trace indices for axes {axes} are not matching")
#flatten the shape of `tensor`
out = tensor.reshape(
flatten([[tensor._charges[n].dim for n in o] for o in tensor._order]))
_, _, labels0 = np.intersect1d(
tensor._order[axes[0]], flatten(out._order), return_indices=True)
_, _, labels1 = np.intersect1d(
tensor._order[axes[1]], flatten(out._order), return_indices=True)
a0 = list(labels0[np.argsort(tensor._order[axes[0]])])
a1 = list(labels1[np.argsort(tensor._order[axes[1]])])
while len(a0) > 0:
i = a0.pop(0)
j = a1.pop(0)
identity = eye(
Index([out._charges[out._order[i][0]]],
[not out._flows[out._order[i][0]]]))
#pylint: disable=line-too-long
out = tensordot(out, identity, ([i, j], [0, 1])) # pytype: disable=wrong-arg-types
a0ar = np.asarray(a0)
mask_min = a0ar > np.min([i, j])
mask_max = a0ar > np.max([i, j])
a0ar[np.logical_and(mask_min, mask_max)] -= 2
a0ar[np.logical_xor(mask_min, mask_max)] -= 1
a1ar = np.asarray(a1)
mask_min = a1ar > np.min([i, j])
mask_max = a1ar > np.max([i, j])
a1ar[np.logical_and(mask_min, mask_max)] -= 2
a1ar[np.logical_xor(mask_min, mask_max)] -= 1
a0 = list(a0ar)
a1 = list(a1ar)
if out.ndim == 0:
return out.item()
return out # pytype: disable=bad-return-type
raise ValueError("trace can only be taken for tensors with ndim > 1")
def pinv(matrix: BlockSparseTensor,
rcond: Optional[float] = 1E-15,
hermitian: Optional[bool] = False) -> BlockSparseTensor:
"""
Compute the Moore-Penrose pseudo inverse of `matrix`.
Args:
rcond: Pseudo inverse cutoff.
Returns:
BlockSparseTensor: The pseudo inverse of `matrix`.
"""
if matrix.ndim != 2:
raise ValueError("`pinv` can only be taken for matrices, "
"found tensor.ndim={}".format(matrix.ndim))
flat_charges = matrix._charges
flat_flows = matrix._flows
flat_order = matrix.flat_order
tr_partition = len(matrix._order[0])
blocks, _, shapes = _find_transposed_diagonal_sparse_blocks(
flat_charges, flat_flows, tr_partition, flat_order)
data = np.empty(np.sum(np.prod(shapes, axis=0)), dtype=matrix.dtype)
for n, block in enumerate(blocks):
data[block] = np.ravel(
np.linalg.pinv(
np.reshape(matrix.data[block], shapes[:, n]),
rcond=rcond,
hermitian=hermitian).T)
#pylint: disable=line-too-long
return BlockSparseTensor(
data=data,
charges=matrix._charges,
flows=np.logical_not(matrix._flows),
order=matrix._order,
check_consistency=False).transpose((1, 0)) #pytype: disable=bad-return-type
def abs(tensor: BlockSparseTensor) -> BlockSparseTensor: #pylint: disable=redefined-builtin
result = empty_like(tensor)
result.data = np.abs(tensor.data)
return result
def sign(tensor: BlockSparseTensor) -> BlockSparseTensor:
result = empty_like(tensor)
result.data = np.sign(tensor.data)
return result
|
import keras.layers
from matchzoo.engine.base_model import BaseModel
from matchzoo.engine.param_table import ParamTable
from matchzoo.engine import hyper_spaces
class DenseBaseline(BaseModel):
"""
A simple densely connected baseline model.
Examples:
>>> model = DenseBaseline()
>>> model.params['mlp_num_layers'] = 2
>>> model.params['mlp_num_units'] = 300
>>> model.params['mlp_num_fan_out'] = 128
>>> model.params['mlp_activation_func'] = 'relu'
>>> model.guess_and_fill_missing_params(verbose=0)
>>> model.build()
>>> model.compile()
"""
@classmethod
def get_default_params(cls) -> ParamTable:
""":return: model default parameters."""
params = super().get_default_params(with_multi_layer_perceptron=True)
params['mlp_num_units'] = 256
params.get('mlp_num_units').hyper_space = \
hyper_spaces.quniform(16, 512)
params.get('mlp_num_layers').hyper_space = \
hyper_spaces.quniform(1, 5)
return params
def build(self):
"""Model structure."""
x_in = self._make_inputs()
x = keras.layers.concatenate(x_in)
x = self._make_multi_layer_perceptron_layer()(x)
x_out = self._make_output_layer()(x)
self._backend = keras.models.Model(inputs=x_in, outputs=x_out)
|
from collections import Counter
from scattertext.features.FeatsFromSpacyDoc import FeatsFromSpacyDoc
class UseFullDocAsMetadata(FeatsFromSpacyDoc):
def get_feats(self, doc):
return Counter()
def get_doc_metadata(self, doc):
'''
Parameters
----------
doc, Spacy Docs
Returns
-------
Counter str -> count
'''
return Counter({str(doc): 1})
|
from datetime import timedelta
from homeassistant.components.rflink import CONF_RECONNECT_INTERVAL
from homeassistant.const import (
EVENT_STATE_CHANGED,
STATE_OFF,
STATE_ON,
STATE_UNAVAILABLE,
)
import homeassistant.core as ha
import homeassistant.util.dt as dt_util
from tests.async_mock import patch
from tests.common import async_fire_time_changed
from tests.components.rflink.test_init import mock_rflink
DOMAIN = "binary_sensor"
CONFIG = {
"rflink": {
"port": "/dev/ttyABC0",
"ignore_devices": ["ignore_wildcard_*", "ignore_sensor"],
},
DOMAIN: {
"platform": "rflink",
"devices": {
"test": {"name": "test", "device_class": "door"},
"test2": {
"name": "test2",
"device_class": "motion",
"off_delay": 30,
"force_update": True,
},
},
},
}
async def test_default_setup(hass, monkeypatch):
"""Test all basic functionality of the rflink sensor component."""
# setup mocking rflink module
event_callback, create, _, _ = await mock_rflink(hass, CONFIG, DOMAIN, monkeypatch)
# make sure arguments are passed
assert create.call_args_list[0][1]["ignore"]
# test default state of sensor loaded from config
config_sensor = hass.states.get("binary_sensor.test")
assert config_sensor
assert config_sensor.state == STATE_OFF
assert config_sensor.attributes["device_class"] == "door"
# test on event for config sensor
event_callback({"id": "test", "command": "on"})
await hass.async_block_till_done()
assert hass.states.get("binary_sensor.test").state == STATE_ON
# test off event for config sensor
event_callback({"id": "test", "command": "off"})
await hass.async_block_till_done()
assert hass.states.get("binary_sensor.test").state == STATE_OFF
# test allon event for config sensor
event_callback({"id": "test", "command": "allon"})
await hass.async_block_till_done()
assert hass.states.get("binary_sensor.test").state == STATE_ON
# test alloff event for config sensor
event_callback({"id": "test", "command": "alloff"})
await hass.async_block_till_done()
assert hass.states.get("binary_sensor.test").state == STATE_OFF
async def test_entity_availability(hass, monkeypatch):
"""If Rflink device is disconnected, entities should become unavailable."""
# Make sure Rflink mock does not 'recover' to quickly from the
# disconnect or else the unavailability cannot be measured
config = CONFIG
failures = [True, True]
config[CONF_RECONNECT_INTERVAL] = 60
# Create platform and entities
_, _, _, disconnect_callback = await mock_rflink(
hass, config, DOMAIN, monkeypatch, failures=failures
)
# Entities are available by default
assert hass.states.get("binary_sensor.test").state == STATE_OFF
# Mock a disconnect of the Rflink device
disconnect_callback()
# Wait for dispatch events to propagate
await hass.async_block_till_done()
# Entity should be unavailable
assert hass.states.get("binary_sensor.test").state == STATE_UNAVAILABLE
# Reconnect the Rflink device
disconnect_callback()
# Wait for dispatch events to propagate
await hass.async_block_till_done()
# Entities should be available again
assert hass.states.get("binary_sensor.test").state == STATE_OFF
async def test_off_delay(hass, legacy_patchable_time, monkeypatch):
"""Test off_delay option."""
# setup mocking rflink module
event_callback, create, _, _ = await mock_rflink(hass, CONFIG, DOMAIN, monkeypatch)
# make sure arguments are passed
assert create.call_args_list[0][1]["ignore"]
events = []
on_event = {"id": "test2", "command": "on"}
@ha.callback
def callback(event):
"""Verify event got called."""
events.append(event)
hass.bus.async_listen(EVENT_STATE_CHANGED, callback)
now = dt_util.utcnow()
# fake time and turn on sensor
future = now + timedelta(seconds=0)
with patch(("homeassistant.helpers.event.dt_util.utcnow"), return_value=future):
async_fire_time_changed(hass, future)
event_callback(on_event)
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test2")
assert state.state == STATE_ON
assert len(events) == 1
# fake time and turn on sensor again
future = now + timedelta(seconds=15)
with patch(("homeassistant.helpers.event.dt_util.utcnow"), return_value=future):
async_fire_time_changed(hass, future)
event_callback(on_event)
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test2")
assert state.state == STATE_ON
assert len(events) == 2
# fake time and verify sensor still on (de-bounce)
future = now + timedelta(seconds=35)
with patch(("homeassistant.helpers.event.dt_util.utcnow"), return_value=future):
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test2")
assert state.state == STATE_ON
assert len(events) == 2
# fake time and verify sensor is off
future = now + timedelta(seconds=45)
with patch(("homeassistant.helpers.event.dt_util.utcnow"), return_value=future):
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test2")
assert state.state == STATE_OFF
assert len(events) == 3
|
import numpy as np
from scattertext.termsignificance import LogOddsRatioUninformativeDirichletPrior
class LogOddsUninformativePriorScore:
@staticmethod
def get_score(cat_word_counts, not_cat_word_counts, alpha_w=0.01):
X = LogOddsUninformativePriorScore. \
_turn_counts_into_matrix(cat_word_counts, not_cat_word_counts)
p_vals = LogOddsRatioUninformativeDirichletPrior(alpha_w).get_p_vals(X)
scores = LogOddsUninformativePriorScore._turn_pvals_into_scores(p_vals)
return scores
@staticmethod
def get_delta_hats(cat_word_counts, not_cat_word_counts, alpha_w=0.01):
return (LogOddsRatioUninformativeDirichletPrior(alpha_w)
.get_log_odds_with_prior(LogOddsUninformativePriorScore
._turn_counts_into_matrix(cat_word_counts,
not_cat_word_counts)))
@staticmethod
def get_delta_hats(cat_word_counts, not_cat_word_counts, alpha_w=0.01):
return (LogOddsRatioUninformativeDirichletPrior(alpha_w)
.get_log_odds_with_prior(LogOddsUninformativePriorScore
._turn_counts_into_matrix(cat_word_counts,
not_cat_word_counts)))
@staticmethod
def get_thresholded_score(cat_word_counts, not_cat_word_counts,
alpha_w=0.01,
threshold=0.1):
scores = (LogOddsRatioUninformativeDirichletPrior(alpha_w)
.get_p_values_from_counts(cat_word_counts, not_cat_word_counts)) * 2 - 1
# scores = (np.min(np.array([1 - scores, scores]), axis=0) <= threshold) * scores
return scores * ((scores < - (1. - (threshold * 2)))
| (scores > (1. - (threshold * 2))))
@staticmethod
def _turn_counts_into_matrix(cat_word_counts, not_cat_word_counts):
return np.array([cat_word_counts, not_cat_word_counts]).T
@staticmethod
def _turn_pvals_into_scores(p_vals):
# return np.max(np.array([1 - p_vals, p_vals]), axis=0)
return -((p_vals - 0.5) * 2)
|
from tensornetwork.network_operations import get_all_edges, get_subgraph_dangling
from tensornetwork.network_components import AbstractNode, Edge
from typing import (Any, Callable, Dict, List, Set, Tuple, Iterable, Text)
# `opt_einsum` algorithm method typing
Algorithm = Callable[[List[Set[Edge]], Set[Edge], Dict[Edge, Any]],
List[Tuple[int, int]]]
def multi_remove(elems: List[Any], indices: List[int]) -> List[Any]:
"""Remove multiple indicies in a list at once."""
return [i for j, i in enumerate(elems) if j not in indices]
def get_path(
nodes: Iterable[AbstractNode],
algorithm: Algorithm) -> Tuple[List[Tuple[int, int]], List[AbstractNode]]:
"""Calculates the contraction paths using `opt_einsum` methods.
Args:
nodes: An iterable of nodes.
algorithm: `opt_einsum` method to use for calculating the contraction path.
Returns:
The optimal contraction path as returned by `opt_einsum`.
"""
nodes = list(nodes)
input_sets = [set(node.edges) for node in nodes]
output_set = get_subgraph_dangling(nodes)
size_dict = {edge: edge.dimension for edge in get_all_edges(nodes)}
return algorithm(input_sets, output_set, size_dict), nodes
|
import os
import uuid
import pytest
from homeassistant import data_entry_flow
from homeassistant.auth import AuthManager, auth_store, models as auth_models
from homeassistant.auth.providers import command_line
from homeassistant.const import CONF_TYPE
from tests.async_mock import AsyncMock
@pytest.fixture
def store(hass):
"""Mock store."""
return auth_store.AuthStore(hass)
@pytest.fixture
def provider(hass, store):
"""Mock provider."""
return command_line.CommandLineAuthProvider(
hass,
store,
{
CONF_TYPE: "command_line",
command_line.CONF_COMMAND: os.path.join(
os.path.dirname(__file__), "test_command_line_cmd.sh"
),
command_line.CONF_ARGS: [],
command_line.CONF_META: False,
},
)
@pytest.fixture
def manager(hass, store, provider):
"""Mock manager."""
return AuthManager(hass, store, {(provider.type, provider.id): provider}, {})
async def test_create_new_credential(manager, provider):
"""Test that we create a new credential."""
credentials = await provider.async_get_or_create_credentials(
{"username": "good-user", "password": "good-pass"}
)
assert credentials.is_new is True
user = await manager.async_get_or_create_user(credentials)
assert user.is_active
async def test_match_existing_credentials(store, provider):
"""See if we match existing users."""
existing = auth_models.Credentials(
id=uuid.uuid4(),
auth_provider_type="command_line",
auth_provider_id=None,
data={"username": "good-user"},
is_new=False,
)
provider.async_credentials = AsyncMock(return_value=[existing])
credentials = await provider.async_get_or_create_credentials(
{"username": "good-user", "password": "irrelevant"}
)
assert credentials is existing
async def test_invalid_username(provider):
"""Test we raise if incorrect user specified."""
with pytest.raises(command_line.InvalidAuthError):
await provider.async_validate_login("bad-user", "good-pass")
async def test_invalid_password(provider):
"""Test we raise if incorrect password specified."""
with pytest.raises(command_line.InvalidAuthError):
await provider.async_validate_login("good-user", "bad-pass")
async def test_good_auth(provider):
"""Test nothing is raised with good credentials."""
await provider.async_validate_login("good-user", "good-pass")
async def test_good_auth_with_meta(manager, provider):
"""Test metadata is added upon successful authentication."""
provider.config[command_line.CONF_ARGS] = ["--with-meta"]
provider.config[command_line.CONF_META] = True
await provider.async_validate_login("good-user", "good-pass")
credentials = await provider.async_get_or_create_credentials(
{"username": "good-user", "password": "good-pass"}
)
assert credentials.is_new is True
user = await manager.async_get_or_create_user(credentials)
assert user.name == "Bob"
assert user.is_active
async def test_utf_8_username_password(provider):
"""Test that we create a new credential."""
credentials = await provider.async_get_or_create_credentials(
{"username": "ßßß", "password": "äöü"}
)
assert credentials.is_new is True
async def test_login_flow_validates(provider):
"""Test login flow."""
flow = await provider.async_login_flow({})
result = await flow.async_step_init()
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
result = await flow.async_step_init(
{"username": "bad-user", "password": "bad-pass"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"]["base"] == "invalid_auth"
result = await flow.async_step_init(
{"username": "good-user", "password": "good-pass"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["data"]["username"] == "good-user"
async def test_strip_username(provider):
"""Test authentication works with username with whitespace around."""
flow = await provider.async_login_flow({})
result = await flow.async_step_init(
{"username": "\t\ngood-user ", "password": "good-pass"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["data"]["username"] == "good-user"
|
import time
from Handler import Handler
try:
from influxdb.client import InfluxDBClient
except ImportError:
InfluxDBClient = None
class InfluxdbHandler(Handler):
"""
Sending data to Influxdb using batched format
"""
def __init__(self, config=None):
"""
Create a new instance of the InfluxdbeHandler
"""
# Initialize Handler
Handler.__init__(self, config)
if not InfluxDBClient:
self.log.error('influxdb.client.InfluxDBClient import failed. '
'Handler disabled')
self.enabled = False
return
# Initialize Options
if self.config['ssl'] == "True":
self.ssl = True
else:
self.ssl = False
self.hostname = self.config['hostname']
self.port = int(self.config['port'])
self.username = self.config['username']
self.password = self.config['password']
self.database = self.config['database']
self.batch_size = int(self.config['batch_size'])
self.metric_max_cache = int(self.config['cache_size'])
self.batch_count = 0
self.time_precision = self.config['time_precision']
# Initialize Data
self.batch = {}
self.influx = None
self.batch_timestamp = time.time()
self.time_multiplier = 1
# Connect
self._connect()
def get_default_config_help(self):
"""
Returns the help text for the configuration options for this handler
"""
config = super(InfluxdbHandler, self).get_default_config_help()
config.update({
'hostname': 'Hostname',
'port': 'Port',
'ssl': 'set to True to use HTTPS instead of http',
'batch_size': 'How many metrics to store before sending to the'
' influxdb server',
'cache_size': 'How many values to store in cache in case of'
' influxdb failure',
'username': 'Username for connection',
'password': 'Password for connection',
'database': 'Database name',
'time_precision': 'time precision in second(s), milisecond(ms) or '
'microsecond (u)',
})
return config
def get_default_config(self):
"""
Return the default config for the handler
"""
config = super(InfluxdbHandler, self).get_default_config()
config.update({
'hostname': 'localhost',
'port': 8086,
'ssl': False,
'username': 'root',
'password': 'root',
'database': 'graphite',
'batch_size': 1,
'cache_size': 20000,
'time_precision': 's',
})
return config
def __del__(self):
"""
Destroy instance of the InfluxdbHandler class
"""
self._close()
def process(self, metric):
if self.batch_count <= self.metric_max_cache:
# Add the data to the batch
self.batch.setdefault(metric.path, []).append([metric.timestamp,
metric.value])
self.batch_count += 1
# If there are sufficient metrics, then pickle and send
if self.batch_count >= self.batch_size and (
time.time() - self.batch_timestamp) > 2**self.time_multiplier:
# Log
self.log.debug(
"InfluxdbHandler: Sending batch sizeof : %d/%d after %fs",
self.batch_count,
self.batch_size,
(time.time() - self.batch_timestamp))
# reset the batch timer
self.batch_timestamp = time.time()
# Send pickled batch
self._send()
else:
self.log.debug(
"InfluxdbHandler: not sending batch of %d as timestamp is %f",
self.batch_count,
(time.time() - self.batch_timestamp))
def _send(self):
"""
Send data to Influxdb. Data that can not be sent will be kept in queued.
"""
# Check to see if we have a valid socket. If not, try to connect.
try:
if self.influx is None:
self.log.debug("InfluxdbHandler: Socket is not connected. "
"Reconnecting.")
self._connect()
if self.influx is None:
self.log.debug("InfluxdbHandler: Reconnect failed.")
else:
# build metrics data
metrics = []
for path in self.batch:
metrics.append({
"points": self.batch[path],
"name": path,
"columns": ["time", "value"]})
# Send data to influxdb
self.log.debug("InfluxdbHandler: writing %d series of data",
len(metrics))
self.influx.write_points(metrics,
time_precision=self.time_precision)
# empty batch buffer
self.batch = {}
self.batch_count = 0
self.time_multiplier = 1
except Exception:
self._close()
if self.time_multiplier < 5:
self.time_multiplier += 1
self._throttle_error(
"InfluxdbHandler: Error sending metrics, waiting for %ds.",
2**self.time_multiplier)
raise
def _connect(self):
"""
Connect to the influxdb server
"""
try:
# Open Connection
self.influx = InfluxDBClient(self.hostname, self.port,
self.username, self.password,
self.database, self.ssl)
# Log
self.log.debug("InfluxdbHandler: Established connection to "
"%s:%d/%s.",
self.hostname, self.port, self.database)
except Exception as ex:
# Log Error
self._throttle_error("InfluxdbHandler: Failed to connect to "
"%s:%d/%s. %s",
self.hostname, self.port, self.database, ex)
# Close Socket
self._close()
return
def _close(self):
"""
Close the socket = do nothing for influx which is http stateless
"""
self.influx = None
|
from vine import transform
from kombu.asynchronous.aws.connection import AsyncAWSQueryConnection
from .ext import boto3
from .message import AsyncMessage
from .queue import AsyncQueue
__all__ = ('AsyncSQSConnection',)
class AsyncSQSConnection(AsyncAWSQueryConnection):
"""Async SQS Connection."""
def __init__(self, sqs_connection, debug=0, region=None, **kwargs):
if boto3 is None:
raise ImportError('boto3 is not installed')
AsyncAWSQueryConnection.__init__(
self,
sqs_connection,
region_name=region, debug=debug,
**kwargs
)
def create_queue(self, queue_name,
visibility_timeout=None, callback=None):
params = {'QueueName': queue_name}
if visibility_timeout:
params['DefaultVisibilityTimeout'] = format(
visibility_timeout, 'd',
)
return self.get_object('CreateQueue', params,
callback=callback)
def delete_queue(self, queue, force_deletion=False, callback=None):
return self.get_status('DeleteQueue', None, queue.id,
callback=callback)
def get_queue_url(self, queue):
res = self.sqs_connection.get_queue_url(QueueName=queue)
return res['QueueUrl']
def get_queue_attributes(self, queue, attribute='All', callback=None):
return self.get_object(
'GetQueueAttributes', {'AttributeName': attribute},
queue.id, callback=callback,
)
def set_queue_attribute(self, queue, attribute, value, callback=None):
return self.get_status(
'SetQueueAttribute',
{'Attribute.Name': attribute, 'Attribute.Value': value},
queue.id, callback=callback,
)
def receive_message(self, queue, queue_url,
number_messages=1, visibility_timeout=None,
attributes=None, wait_time_seconds=None,
callback=None):
params = {'MaxNumberOfMessages': number_messages}
if visibility_timeout:
params['VisibilityTimeout'] = visibility_timeout
if attributes:
attrs = {}
for idx, attr in enumerate(attributes):
attrs['AttributeName.' + str(idx + 1)] = attr
params.update(attrs)
if wait_time_seconds is not None:
params['WaitTimeSeconds'] = wait_time_seconds
return self.get_list(
'ReceiveMessage', params, [('Message', AsyncMessage)],
queue_url, callback=callback, parent=queue,
)
def delete_message(self, queue, receipt_handle, callback=None):
return self.delete_message_from_handle(
queue, receipt_handle, callback,
)
def delete_message_batch(self, queue, messages, callback=None):
params = {}
for i, m in enumerate(messages):
prefix = 'DeleteMessageBatchRequestEntry.{}'.format(i + 1)
params.update({
f'{prefix}.Id': m.id,
f'{prefix}.ReceiptHandle': m.receipt_handle,
})
return self.get_object(
'DeleteMessageBatch', params, queue.id,
verb='POST', callback=callback,
)
def delete_message_from_handle(self, queue, receipt_handle,
callback=None):
return self.get_status(
'DeleteMessage', {'ReceiptHandle': receipt_handle},
queue, callback=callback,
)
def send_message(self, queue, message_content,
delay_seconds=None, callback=None):
params = {'MessageBody': message_content}
if delay_seconds:
params['DelaySeconds'] = int(delay_seconds)
return self.get_object(
'SendMessage', params, queue.id,
verb='POST', callback=callback,
)
def send_message_batch(self, queue, messages, callback=None):
params = {}
for i, msg in enumerate(messages):
prefix = 'SendMessageBatchRequestEntry.{}'.format(i + 1)
params.update({
f'{prefix}.Id': msg[0],
f'{prefix}.MessageBody': msg[1],
f'{prefix}.DelaySeconds': msg[2],
})
return self.get_object(
'SendMessageBatch', params, queue.id,
verb='POST', callback=callback,
)
def change_message_visibility(self, queue, receipt_handle,
visibility_timeout, callback=None):
return self.get_status(
'ChangeMessageVisibility',
{'ReceiptHandle': receipt_handle,
'VisibilityTimeout': visibility_timeout},
queue.id, callback=callback,
)
def change_message_visibility_batch(self, queue, messages, callback=None):
params = {}
for i, t in enumerate(messages):
pre = 'ChangeMessageVisibilityBatchRequestEntry.{}'.format(i + 1)
params.update({
f'{pre}.Id': t[0].id,
f'{pre}.ReceiptHandle': t[0].receipt_handle,
f'{pre}.VisibilityTimeout': t[1],
})
return self.get_object(
'ChangeMessageVisibilityBatch', params, queue.id,
verb='POST', callback=callback,
)
def get_all_queues(self, prefix='', callback=None):
params = {}
if prefix:
params['QueueNamePrefix'] = prefix
return self.get_list(
'ListQueues', params, [('QueueUrl', AsyncQueue)],
callback=callback,
)
def get_queue(self, queue_name, callback=None):
# TODO Does not support owner_acct_id argument
return self.get_all_queues(
queue_name,
transform(self._on_queue_ready, callback, queue_name),
)
lookup = get_queue
def _on_queue_ready(self, name, queues):
return next(
(q for q in queues if q.url.endswith(name)), None,
)
def get_dead_letter_source_queues(self, queue, callback=None):
return self.get_list(
'ListDeadLetterSourceQueues', {'QueueUrl': queue.url},
[('QueueUrl', AsyncQueue)],
callback=callback,
)
def add_permission(self, queue, label, aws_account_id, action_name,
callback=None):
return self.get_status(
'AddPermission',
{'Label': label,
'AWSAccountId': aws_account_id,
'ActionName': action_name},
queue.id, callback=callback,
)
def remove_permission(self, queue, label, callback=None):
return self.get_status(
'RemovePermission', {'Label': label}, queue.id, callback=callback,
)
|
from homeassistant.components.websocket_api.messages import (
_cached_event_message as lru_event_cache,
cached_event_message,
message_to_json,
)
from homeassistant.const import EVENT_STATE_CHANGED
from homeassistant.core import callback
async def test_cached_event_message(hass):
"""Test that we cache event messages."""
events = []
@callback
def _event_listener(event):
events.append(event)
hass.bus.async_listen(EVENT_STATE_CHANGED, _event_listener)
hass.states.async_set("light.window", "on")
hass.states.async_set("light.window", "off")
await hass.async_block_till_done()
assert len(events) == 2
lru_event_cache.cache_clear()
msg0 = cached_event_message(2, events[0])
assert msg0 == cached_event_message(2, events[0])
msg1 = cached_event_message(2, events[1])
assert msg1 == cached_event_message(2, events[1])
assert msg0 != msg1
cache_info = lru_event_cache.cache_info()
assert cache_info.hits == 2
assert cache_info.misses == 2
assert cache_info.currsize == 2
cached_event_message(2, events[1])
cache_info = lru_event_cache.cache_info()
assert cache_info.hits == 3
assert cache_info.misses == 2
assert cache_info.currsize == 2
async def test_cached_event_message_with_different_idens(hass):
"""Test that we cache event messages when the subscrition idens differ."""
events = []
@callback
def _event_listener(event):
events.append(event)
hass.bus.async_listen(EVENT_STATE_CHANGED, _event_listener)
hass.states.async_set("light.window", "on")
await hass.async_block_till_done()
assert len(events) == 1
lru_event_cache.cache_clear()
msg0 = cached_event_message(2, events[0])
msg1 = cached_event_message(3, events[0])
msg2 = cached_event_message(4, events[0])
assert msg0 != msg1
assert msg0 != msg2
cache_info = lru_event_cache.cache_info()
assert cache_info.hits == 2
assert cache_info.misses == 1
assert cache_info.currsize == 1
async def test_message_to_json(caplog):
"""Test we can serialize websocket messages."""
json_str = message_to_json({"id": 1, "message": "xyz"})
assert json_str == '{"id": 1, "message": "xyz"}'
json_str2 = message_to_json({"id": 1, "message": _Unserializeable()})
assert (
json_str2
== '{"id": 1, "type": "result", "success": false, "error": {"code": "unknown_error", "message": "Invalid JSON in response"}}'
)
assert "Unable to serialize to JSON" in caplog.text
class _Unserializeable:
"""A class that cannot be serialized."""
|
from unittest import TestCase
import pandas as pd
from scattertext.TermDocMatrixFromFrequencies import TermDocMatrixFromFrequencies
from scattertext.termcompaction.ClassPercentageCompactor import ClassPercentageCompactor
class TestClassPercentageCompactor(TestCase):
def test_compact(self):
term_doc_mat = TermDocMatrixFromFrequencies(pd.DataFrame({
'term': ['a', 'a b', 'a c', 'c', 'b', 'e b', 'e'],
'A freq': [6, 3, 3, 3, 50000, 0, 0],
'B freq': [600000, 3, 30, 3, 50, 1, 1],
}).set_index('term')).build()
new_tdm = ClassPercentageCompactor(term_count=10000).compact(term_doc_mat)
self.assertEqual(term_doc_mat.get_terms(), ['a', 'a b', 'a c', 'c', 'b', 'e b', 'e'])
self.assertEqual(set(new_tdm.get_terms()),
{'a','b'})
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import sys
try:
import faulthandler
except ImportError:
faulthandler = None
from absl import app
from absl import flags
FLAGS = flags.FLAGS
flags.DEFINE_boolean('faulthandler_sigsegv', False, 'raise SIGSEGV')
flags.DEFINE_boolean('raise_exception', False, 'Raise MyException from main.')
flags.DEFINE_boolean(
'raise_usage_error', False, 'Raise app.UsageError from main.')
flags.DEFINE_integer(
'usage_error_exitcode', None, 'The exitcode if app.UsageError if raised.')
flags.DEFINE_string(
'str_flag_with_unicode_args', u'thumb:\U0001F44D', u'smile:\U0001F604')
flags.DEFINE_boolean('print_init_callbacks', False,
'print init callbacks and exit')
class MyException(Exception):
pass
class MyExceptionHandler(app.ExceptionHandler):
def __init__(self, message):
self.message = message
def handle(self, exc):
sys.stdout.write('MyExceptionHandler: {}\n'.format(self.message))
def real_main(argv):
"""The main function."""
if os.environ.get('APP_TEST_PRINT_ARGV', False):
sys.stdout.write('argv: {}\n'.format(' '.join(argv)))
if FLAGS.raise_exception:
raise MyException
if FLAGS.raise_usage_error:
if FLAGS.usage_error_exitcode is not None:
raise app.UsageError('Error!', FLAGS.usage_error_exitcode)
else:
raise app.UsageError('Error!')
if FLAGS.faulthandler_sigsegv:
faulthandler._sigsegv() # pylint: disable=protected-access
sys.exit(1) # Should not reach here.
if FLAGS.print_init_callbacks:
app.call_after_init(lambda: _callback_results.append('during real_main'))
for value in _callback_results:
print('callback: {}'.format(value))
sys.exit(0)
# Ensure that we have a random C++ flag in flags.FLAGS; this shows
# us that app.run() did the right thing in conjunction with C++ flags.
helper_type = os.environ['APP_TEST_HELPER_TYPE']
if helper_type == 'clif':
if 'heap_check_before_constructors' in flags.FLAGS:
print('PASS: C++ flag present and helper_type is {}'.format(helper_type))
sys.exit(0)
else:
print('FAILED: C++ flag absent but helper_type is {}'.format(helper_type))
sys.exit(1)
elif helper_type == 'pure_python':
if 'heap_check_before_constructors' in flags.FLAGS:
print('FAILED: C++ flag present but helper_type is pure_python')
sys.exit(1)
else:
print('PASS: C++ flag absent and helper_type is pure_python')
sys.exit(0)
else:
print('Unexpected helper_type "{}"'.format(helper_type))
sys.exit(1)
def custom_main(argv):
print('Function called: custom_main.')
real_main(argv)
def main(argv):
print('Function called: main.')
real_main(argv)
flags_parser_argv_sentinel = object()
def flags_parser_main(argv):
print('Function called: main_with_flags_parser.')
if argv is not flags_parser_argv_sentinel:
sys.exit(
'FAILED: main function should be called with the return value of '
'flags_parser, but found: {}'.format(argv))
def flags_parser(argv):
print('Function called: flags_parser.')
if os.environ.get('APP_TEST_FLAGS_PARSER_PARSE_FLAGS', None):
FLAGS(argv)
return flags_parser_argv_sentinel
# Holds results from callbacks triggered by `app.run_after_init`.
_callback_results = []
if __name__ == '__main__':
kwargs = {'main': main}
main_function_name = os.environ.get('APP_TEST_CUSTOM_MAIN_FUNC', None)
if main_function_name:
kwargs['main'] = globals()[main_function_name]
custom_argv = os.environ.get('APP_TEST_CUSTOM_ARGV', None)
if custom_argv:
kwargs['argv'] = custom_argv.split(' ')
if os.environ.get('APP_TEST_USE_CUSTOM_PARSER', None):
kwargs['flags_parser'] = flags_parser
app.call_after_init(lambda: _callback_results.append('before app.run'))
app.install_exception_handler(MyExceptionHandler('first'))
app.install_exception_handler(MyExceptionHandler('second'))
app.run(**kwargs)
sys.exit('This is not reachable.')
|
from nikola.plugin_categories import Taxonomy
class Indexes(Taxonomy):
"""Classify for the blog's main index."""
name = "classify_indexes"
classification_name = "index"
overview_page_variable_name = None
more_than_one_classifications_per_post = False
has_hierarchy = False
show_list_as_index = True
template_for_single_list = "index.tmpl"
template_for_classification_overview = None
apply_to_posts = True
apply_to_pages = False
omit_empty_classifications = False
path_handler_docstrings = {
'index_index': False,
'index': """Link to a numbered index.
Example:
link://index/3 => /index-3.html""",
'index_atom': """Link to a numbered Atom index.
Example:
link://index_atom/3 => /index-3.atom""",
'index_rss': """A link to the RSS feed path.
Example:
link://rss => /blog/rss.xml""",
}
def set_site(self, site):
"""Set Nikola site."""
# Redirect automatically generated 'index_rss' path handler to 'rss' for compatibility with old rss plugin
site.register_path_handler('rss', lambda name, lang: site.path_handlers['index_rss'](name, lang))
site.path_handlers['rss'].__doc__ = """A link to the RSS feed path.
Example:
link://rss => /blog/rss.xml
""".strip()
return super().set_site(site)
def get_implicit_classifications(self, lang):
"""Return a list of classification strings which should always appear in posts_per_classification."""
return [""]
def classify(self, post, lang):
"""Classify the given post for the given language."""
return [""]
def get_classification_friendly_name(self, classification, lang, only_last_component=False):
"""Extract a friendly name from the classification."""
return self.site.config["BLOG_TITLE"](lang)
def get_path(self, classification, lang, dest_type='page'):
"""Return a path for the given classification."""
if dest_type == 'rss':
return [
self.site.config['RSS_PATH'](lang),
self.site.config['RSS_FILENAME_BASE'](lang)
], 'auto'
if dest_type == 'feed':
return [
self.site.config['ATOM_PATH'](lang),
self.site.config['ATOM_FILENAME_BASE'](lang)
], 'auto'
page_number = None
if dest_type == 'page':
# Interpret argument as page number
try:
page_number = int(classification)
except (ValueError, TypeError):
pass
return [self.site.config['INDEX_PATH'](lang)], 'always', page_number
def provide_context_and_uptodate(self, classification, lang, node=None):
"""Provide data for the context and the uptodate list for the list of the given classifiation."""
kw = {
"show_untranslated_posts": self.site.config["SHOW_UNTRANSLATED_POSTS"],
}
context = {
"title": self.site.config["INDEXES_TITLE"](lang) or self.site.config["BLOG_TITLE"](lang),
"description": self.site.config["BLOG_DESCRIPTION"](lang),
"pagekind": ["main_index", "index"],
"featured": [p for p in self.site.posts if p.post_status == 'featured' and
(lang in p.translated_to or kw["show_untranslated_posts"])],
}
kw.update(context)
return context, kw
def should_generate_classification_page(self, classification, post_list, lang):
"""Only generates list of posts for classification if this function returns True."""
return not self.site.config["DISABLE_INDEXES"]
def should_generate_atom_for_classification_page(self, classification, post_list, lang):
"""Only generates Atom feed for list of posts for classification if this function returns True."""
return not self.site.config["DISABLE_MAIN_ATOM_FEED"]
def should_generate_rss_for_classification_page(self, classification, post_list, lang):
"""Only generates RSS feed for list of posts for classification if this function returns True."""
return not self.site.config["DISABLE_MAIN_RSS_FEED"]
|
import asyncio
import io
import re
import voluptuous as vol
from homeassistant.components.image_processing import (
ATTR_CONFIDENCE,
ATTR_ENTITY_ID,
CONF_CONFIDENCE,
CONF_ENTITY_ID,
CONF_NAME,
CONF_SOURCE,
PLATFORM_SCHEMA,
ImageProcessingEntity,
)
from homeassistant.const import CONF_REGION
from homeassistant.core import callback, split_entity_id
import homeassistant.helpers.config_validation as cv
from homeassistant.util.async_ import run_callback_threadsafe
RE_ALPR_PLATE = re.compile(r"^plate\d*:")
RE_ALPR_RESULT = re.compile(r"- (\w*)\s*confidence: (\d*.\d*)")
EVENT_FOUND_PLATE = "image_processing.found_plate"
ATTR_PLATE = "plate"
ATTR_PLATES = "plates"
ATTR_VEHICLES = "vehicles"
OPENALPR_REGIONS = [
"au",
"auwide",
"br",
"eu",
"fr",
"gb",
"kr",
"kr2",
"mx",
"sg",
"us",
"vn2",
]
CONF_ALPR_BIN = "alpr_bin"
DEFAULT_BINARY = "alpr"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_REGION): vol.All(vol.Lower, vol.In(OPENALPR_REGIONS)),
vol.Optional(CONF_ALPR_BIN, default=DEFAULT_BINARY): cv.string,
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the OpenALPR local platform."""
command = [config[CONF_ALPR_BIN], "-c", config[CONF_REGION], "-"]
confidence = config[CONF_CONFIDENCE]
entities = []
for camera in config[CONF_SOURCE]:
entities.append(
OpenAlprLocalEntity(
camera[CONF_ENTITY_ID], command, confidence, camera.get(CONF_NAME)
)
)
async_add_entities(entities)
class ImageProcessingAlprEntity(ImageProcessingEntity):
"""Base entity class for ALPR image processing."""
def __init__(self):
"""Initialize base ALPR entity."""
self.plates = {}
self.vehicles = 0
@property
def state(self):
"""Return the state of the entity."""
confidence = 0
plate = None
# search high plate
for i_pl, i_co in self.plates.items():
if i_co > confidence:
confidence = i_co
plate = i_pl
return plate
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
return "alpr"
@property
def state_attributes(self):
"""Return device specific state attributes."""
return {ATTR_PLATES: self.plates, ATTR_VEHICLES: self.vehicles}
def process_plates(self, plates, vehicles):
"""Send event with new plates and store data."""
run_callback_threadsafe(
self.hass.loop, self.async_process_plates, plates, vehicles
).result()
@callback
def async_process_plates(self, plates, vehicles):
"""Send event with new plates and store data.
plates are a dict in follow format:
{ 'plate': confidence }
This method must be run in the event loop.
"""
plates = {
plate: confidence
for plate, confidence in plates.items()
if confidence >= self.confidence
}
new_plates = set(plates) - set(self.plates)
# Send events
for i_plate in new_plates:
self.hass.async_add_job(
self.hass.bus.async_fire,
EVENT_FOUND_PLATE,
{
ATTR_PLATE: i_plate,
ATTR_ENTITY_ID: self.entity_id,
ATTR_CONFIDENCE: plates.get(i_plate),
},
)
# Update entity store
self.plates = plates
self.vehicles = vehicles
class OpenAlprLocalEntity(ImageProcessingAlprEntity):
"""OpenALPR local api entity."""
def __init__(self, camera_entity, command, confidence, name=None):
"""Initialize OpenALPR local API."""
super().__init__()
self._cmd = command
self._camera = camera_entity
self._confidence = confidence
if name:
self._name = name
else:
self._name = f"OpenAlpr {split_entity_id(camera_entity)[1]}"
@property
def confidence(self):
"""Return minimum confidence for send events."""
return self._confidence
@property
def camera_entity(self):
"""Return camera entity id from process pictures."""
return self._camera
@property
def name(self):
"""Return the name of the entity."""
return self._name
async def async_process_image(self, image):
"""Process image.
This method is a coroutine.
"""
result = {}
vehicles = 0
alpr = await asyncio.create_subprocess_exec(
*self._cmd,
loop=self.hass.loop,
stdin=asyncio.subprocess.PIPE,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.DEVNULL,
)
# Send image
stdout, _ = await alpr.communicate(input=image)
stdout = io.StringIO(str(stdout, "utf-8"))
while True:
line = stdout.readline()
if not line:
break
new_plates = RE_ALPR_PLATE.search(line)
new_result = RE_ALPR_RESULT.search(line)
# Found new vehicle
if new_plates:
vehicles += 1
continue
# Found plate result
if new_result:
try:
result.update({new_result.group(1): float(new_result.group(2))})
except ValueError:
continue
self.async_process_plates(result, vehicles)
|
from functools import partial
from ...utils import verbose
from ..utils import (has_dataset, _data_path, _data_path_doc,
_get_version, _version_doc)
has_somato_data = partial(has_dataset, name='somato')
@verbose
def data_path(path=None, force_update=False, update_path=True, download=True,
verbose=None): # noqa: D103
return _data_path(path=path, force_update=force_update,
update_path=update_path, name='somato',
download=download)
data_path.__doc__ = _data_path_doc.format(name='somato',
conf='MNE_DATASETS_SOMATO_PATH')
def get_version(): # noqa: D103
return _get_version('somato')
get_version.__doc__ = _version_doc.format(name='somato')
|
import json
from aiohttp import ClientConnectionError
from py_nightscout.models import SGV, ServerStatus
from homeassistant.components.nightscout.const import DOMAIN
from homeassistant.const import CONF_URL
from tests.async_mock import patch
from tests.common import MockConfigEntry
GLUCOSE_READINGS = [
SGV.new_from_json_dict(
json.loads(
'{"_id":"5f2b01f5c3d0ac7c4090e223","device":"xDrip-LimiTTer","date":1596654066533,"dateString":"2020-08-05T19:01:06.533Z","sgv":169,"delta":-5.257,"direction":"FortyFiveDown","type":"sgv","filtered":182823.5157,"unfiltered":182823.5157,"rssi":100,"noise":1,"sysTime":"2020-08-05T19:01:06.533Z","utcOffset":-180}'
)
)
]
SERVER_STATUS = ServerStatus.new_from_json_dict(
json.loads(
'{"status":"ok","name":"nightscout","version":"13.0.1","serverTime":"2020-08-05T18:14:02.032Z","serverTimeEpoch":1596651242032,"apiEnabled":true,"careportalEnabled":true,"boluscalcEnabled":true,"settings":{},"extendedSettings":{},"authorized":null}'
)
)
SERVER_STATUS_STATUS_ONLY = ServerStatus.new_from_json_dict(
json.loads(
'{"status":"ok","name":"nightscout","version":"14.0.4","serverTime":"2020-09-25T21:03:59.315Z","serverTimeEpoch":1601067839315,"apiEnabled":true,"careportalEnabled":true,"boluscalcEnabled":true,"settings":{"units":"mg/dl","timeFormat":12,"nightMode":false,"editMode":true,"showRawbg":"never","customTitle":"Nightscout","theme":"default","alarmUrgentHigh":true,"alarmUrgentHighMins":[30,60,90,120],"alarmHigh":true,"alarmHighMins":[30,60,90,120],"alarmLow":true,"alarmLowMins":[15,30,45,60],"alarmUrgentLow":true,"alarmUrgentLowMins":[15,30,45],"alarmUrgentMins":[30,60,90,120],"alarmWarnMins":[30,60,90,120],"alarmTimeagoWarn":true,"alarmTimeagoWarnMins":15,"alarmTimeagoUrgent":true,"alarmTimeagoUrgentMins":30,"alarmPumpBatteryLow":false,"language":"en","scaleY":"log","showPlugins":"dbsize delta direction upbat","showForecast":"ar2","focusHours":3,"heartbeat":60,"baseURL":"","authDefaultRoles":"status-only","thresholds":{"bgHigh":260,"bgTargetTop":180,"bgTargetBottom":80,"bgLow":55},"insecureUseHttp":true,"secureHstsHeader":false,"secureHstsHeaderIncludeSubdomains":false,"secureHstsHeaderPreload":false,"secureCsp":false,"deNormalizeDates":false,"showClockDelta":false,"showClockLastTime":false,"bolusRenderOver":1,"frameUrl1":"","frameUrl2":"","frameUrl3":"","frameUrl4":"","frameUrl5":"","frameUrl6":"","frameUrl7":"","frameUrl8":"","frameName1":"","frameName2":"","frameName3":"","frameName4":"","frameName5":"","frameName6":"","frameName7":"","frameName8":"","DEFAULT_FEATURES":["bgnow","delta","direction","timeago","devicestatus","upbat","errorcodes","profile","dbsize"],"alarmTypes":["predict"],"enable":["careportal","boluscalc","food","bwp","cage","sage","iage","iob","cob","basal","ar2","rawbg","pushover","bgi","pump","openaps","treatmentnotify","bgnow","delta","direction","timeago","devicestatus","upbat","errorcodes","profile","dbsize","ar2"]},"extendedSettings":{"devicestatus":{"advanced":true,"days":1}},"authorized":null}'
)
)
async def init_integration(hass) -> MockConfigEntry:
"""Set up the Nightscout integration in Home Assistant."""
entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_URL: "https://some.url:1234"},
)
with patch(
"homeassistant.components.nightscout.NightscoutAPI.get_sgvs",
return_value=GLUCOSE_READINGS,
), patch(
"homeassistant.components.nightscout.NightscoutAPI.get_server_status",
return_value=SERVER_STATUS,
):
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
return entry
async def init_integration_unavailable(hass) -> MockConfigEntry:
"""Set up the Nightscout integration in Home Assistant."""
entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_URL: "https://some.url:1234"},
)
with patch(
"homeassistant.components.nightscout.NightscoutAPI.get_sgvs",
side_effect=ClientConnectionError(),
), patch(
"homeassistant.components.nightscout.NightscoutAPI.get_server_status",
return_value=SERVER_STATUS,
):
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
return entry
async def init_integration_empty_response(hass) -> MockConfigEntry:
"""Set up the Nightscout integration in Home Assistant."""
entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_URL: "https://some.url:1234"},
)
with patch(
"homeassistant.components.nightscout.NightscoutAPI.get_sgvs", return_value=[]
), patch(
"homeassistant.components.nightscout.NightscoutAPI.get_server_status",
return_value=SERVER_STATUS,
):
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
return entry
|
import logging
from functools import partial
import re
import numpy as np
from gensim import interfaces, matutils, utils
from gensim.utils import deprecated
logger = logging.getLogger(__name__)
def resolve_weights(smartirs):
"""Check the validity of `smartirs` parameters.
Parameters
----------
smartirs : str
`smartirs` or SMART (System for the Mechanical Analysis and Retrieval of Text)
Information Retrieval System, a mnemonic scheme for denoting tf-idf weighting
variants in the vector space model. The mnemonic for representing a combination
of weights takes the form ddd, where the letters represents the term weighting of the document vector.
for more information visit `SMART Information Retrieval System
<https://en.wikipedia.org/wiki/SMART_Information_Retrieval_System>`_.
Returns
-------
str of (local_letter, global_letter, normalization_letter)
local_letter : str
Term frequency weighing, one of:
* `b` - binary,
* `t` or `n` - raw,
* `a` - augmented,
* `l` - logarithm,
* `d` - double logarithm,
* `L` - log average.
global_letter : str
Document frequency weighting, one of:
* `x` or `n` - none,
* `f` - idf,
* `t` - zero-corrected idf,
* `p` - probabilistic idf.
normalization_letter : str
Document normalization, one of:
* `x` or `n` - none,
* `c` - cosine,
* `u` - pivoted unique,
* `b` - pivoted character length.
Raises
------
ValueError
If `smartirs` is not a string of length 3 or one of the decomposed value
doesn't fit the list of permissible values.
See Also
--------
~gensim.sklearn_api.tfidf.TfIdfTransformer, TfidfModel : Classes that also use the SMART scheme.
"""
if isinstance(smartirs, str) and re.match(r"...\....", smartirs):
match = re.match(r"(?P<ddd>...)\.(?P<qqq>...)", smartirs)
raise ValueError(
"The notation {ddd}.{qqq} specifies two term-weighting schemes, "
"one for collection documents ({ddd}) and one for queries ({qqq}). "
"You must train two separate tf-idf models.".format(
ddd=match.group("ddd"),
qqq=match.group("qqq"),
)
)
if not isinstance(smartirs, str) or len(smartirs) != 3:
raise ValueError("Expected a string of length 3 got " + smartirs)
w_tf, w_df, w_n = smartirs
if w_tf not in 'btnaldL':
raise ValueError("Expected term frequency weight to be one of 'btnaldL', got {}".format(w_tf))
if w_df not in 'xnftp':
raise ValueError("Expected inverse document frequency weight to be one of 'xnftp', got {}".format(w_df))
if w_n not in 'xncub':
raise ValueError("Expected normalization weight to be one of 'xncub', got {}".format(w_n))
# resolve aliases
if w_tf == "t":
w_tf = "n"
if w_df == "x":
w_df = "n"
if w_n == "x":
w_n = "n"
return w_tf + w_df + w_n
def df2idf(docfreq, totaldocs, log_base=2.0, add=0.0):
r"""Compute inverse-document-frequency for a term with the given document frequency `docfreq`:
:math:`idf = add + log_{log\_base} \frac{totaldocs}{docfreq}`
Parameters
----------
docfreq : {int, float}
Document frequency.
totaldocs : int
Total number of documents.
log_base : float, optional
Base of logarithm.
add : float, optional
Offset.
Returns
-------
float
Inverse document frequency.
"""
return add + np.log(float(totaldocs) / docfreq) / np.log(log_base)
def precompute_idfs(wglobal, dfs, total_docs):
"""Pre-compute the inverse document frequency mapping for all terms.
Parameters
----------
wglobal : function
Custom function for calculating the "global" weighting function.
See for example the SMART alternatives under :func:`~gensim.models.tfidfmodel.smartirs_wglobal`.
dfs : dict
Dictionary mapping `term_id` into how many documents did that term appear in.
total_docs : int
Total number of documents.
Returns
-------
dict of (int, float)
Inverse document frequencies in the format `{term_id_1: idfs_1, term_id_2: idfs_2, ...}`.
"""
# not strictly necessary and could be computed on the fly in TfidfModel__getitem__.
# this method is here just to speed things up a little.
return {termid: wglobal(df, total_docs) for termid, df in dfs.items()}
def smartirs_wlocal(tf, local_scheme):
"""Calculate local term weight for a term using the weighting scheme specified in `local_scheme`.
Parameters
----------
tf : int
Term frequency.
local : {'b', 'n', 'a', 'l', 'd', 'L'}
Local transformation scheme.
Returns
-------
float
Calculated local weight.
"""
if local_scheme == "n":
return tf
elif local_scheme == "l":
return 1 + np.log2(tf)
elif local_scheme == "d":
return 1 + np.log2(1 + np.log2(tf))
elif local_scheme == "a":
return 0.5 + (0.5 * tf / tf.max(axis=0))
elif local_scheme == "b":
return tf.astype('bool').astype('int')
elif local_scheme == "L":
return (1 + np.log2(tf)) / (1 + np.log2(tf.mean(axis=0)))
def smartirs_wglobal(docfreq, totaldocs, global_scheme):
"""Calculate global document weight based on the weighting scheme specified in `global_scheme`.
Parameters
----------
docfreq : int
Document frequency.
totaldocs : int
Total number of documents.
global_scheme : {'n', 'f', 't', 'p'}
Global transformation scheme.
Returns
-------
float
Calculated global weight.
"""
if global_scheme == "n":
return 1.0
elif global_scheme == "f":
return np.log2(1.0 * totaldocs / docfreq)
elif global_scheme == "t":
return np.log2((totaldocs + 1.0) / docfreq)
elif global_scheme == "p":
return max(0, np.log2((1.0 * totaldocs - docfreq) / docfreq))
@deprecated("Function will be removed in 4.0.0")
def smartirs_normalize(x, norm_scheme, return_norm=False):
"""Normalize a vector using the normalization scheme specified in `norm_scheme`.
Parameters
----------
x : numpy.ndarray
The tf-idf vector.
norm_scheme : {'n', 'c'}
Document length normalization scheme.
return_norm : bool, optional
Return the length of `x` as well?
Returns
-------
numpy.ndarray
Normalized array.
float (only if return_norm is set)
Norm of `x`.
"""
if norm_scheme == "n":
if return_norm:
_, length = matutils.unitvec(x, return_norm=return_norm)
return x, length
else:
return x
elif norm_scheme == "c":
return matutils.unitvec(x, return_norm=return_norm)
class TfidfModel(interfaces.TransformationABC):
"""Objects of this class realize the transformation between word-document co-occurrence matrix (int)
into a locally/globally weighted TF-IDF matrix (positive floats).
Examples
--------
.. sourcecode:: pycon
>>> import gensim.downloader as api
>>> from gensim.models import TfidfModel
>>> from gensim.corpora import Dictionary
>>>
>>> dataset = api.load("text8")
>>> dct = Dictionary(dataset) # fit dictionary
>>> corpus = [dct.doc2bow(line) for line in dataset] # convert corpus to BoW format
>>>
>>> model = TfidfModel(corpus) # fit model
>>> vector = model[corpus[0]] # apply model to the first corpus document
"""
def __init__(self, corpus=None, id2word=None, dictionary=None, wlocal=utils.identity,
wglobal=df2idf, normalize=True, smartirs=None, pivot=None, slope=0.25):
r"""Compute TF-IDF by multiplying a local component (term frequency) with a global component
(inverse document frequency), and normalizing the resulting documents to unit length.
Formula for non-normalized weight of term :math:`i` in document :math:`j` in a corpus of :math:`D` documents
.. math:: weight_{i,j} = frequency_{i,j} * log_2 \frac{D}{document\_freq_{i}}
or, more generally
.. math:: weight_{i,j} = wlocal(frequency_{i,j}) * wglobal(document\_freq_{i}, D)
so you can plug in your own custom :math:`wlocal` and :math:`wglobal` functions.
Parameters
----------
corpus : iterable of iterable of (int, int), optional
Input corpus
id2word : {dict, :class:`~gensim.corpora.Dictionary`}, optional
Mapping token - id, that was used for converting input data to bag of words format.
dictionary : :class:`~gensim.corpora.Dictionary`
If `dictionary` is specified, it must be a `corpora.Dictionary` object and it will be used.
to directly construct the inverse document frequency mapping (then `corpus`, if specified, is ignored).
wlocals : callable, optional
Function for local weighting, default for `wlocal` is :func:`~gensim.utils.identity`
(other options: :func:`numpy.sqrt`, `lambda tf: 0.5 + (0.5 * tf / tf.max())`, etc.).
wglobal : callable, optional
Function for global weighting, default is :func:`~gensim.models.tfidfmodel.df2idf`.
normalize : {bool, callable}, optional
Normalize document vectors to unit euclidean length? You can also inject your own function into `normalize`.
smartirs : str, optional
SMART (System for the Mechanical Analysis and Retrieval of Text) Information Retrieval System,
a mnemonic scheme for denoting tf-idf weighting variants in the vector space model.
The mnemonic for representing a combination of weights takes the form XYZ,
for example 'ntc', 'bpn' and so on, where the letters represents the term weighting of the document vector.
Term frequency weighing:
* `b` - binary,
* `t` or `n` - raw,
* `a` - augmented,
* `l` - logarithm,
* `d` - double logarithm,
* `L` - log average.
Document frequency weighting:
* `x` or `n` - none,
* `f` - idf,
* `t` - zero-corrected idf,
* `p` - probabilistic idf.
Document normalization:
* `x` or `n` - none,
* `c` - cosine,
* `u` - pivoted unique,
* `b` - pivoted character length.
Default is 'nfc'.
For more information visit `SMART Information Retrieval System
<https://en.wikipedia.org/wiki/SMART_Information_Retrieval_System>`_.
pivot : float or None, optional
In information retrieval, TF-IDF is biased against long documents [1]_. Pivoted document length
normalization solves this problem by changing the norm of a document to `slope * old_norm + (1.0 -
slope) * pivot`.
You can either set the `pivot` by hand, or you can let Gensim figure it out automatically with the following
two steps:
* Set either the `u` or `b` document normalization in the `smartirs` parameter.
* Set either the `corpus` or `dictionary` parameter. The `pivot` will be automatically determined from
the properties of the `corpus` or `dictionary`.
If `pivot` is None and you don't follow steps 1 and 2, then pivoted document length normalization will be
disabled. Default is None.
See also the blog post at https://rare-technologies.com/pivoted-document-length-normalisation/.
slope : float, optional
In information retrieval, TF-IDF is biased against long documents [1]_. Pivoted document length
normalization solves this problem by changing the norm of a document to `slope * old_norm + (1.0 -
slope) * pivot`.
Setting the `slope` to 0.0 uses only the `pivot` as the norm, and setting the `slope` to 1.0 effectively
disables pivoted document length normalization. Singhal [2]_ suggests setting the `slope` between 0.2 and
0.3 for best results. Default is 0.25.
See also the blog post at https://rare-technologies.com/pivoted-document-length-normalisation/.
See Also
--------
~gensim.sklearn_api.tfidf.TfIdfTransformer : Class that also uses the SMART scheme.
resolve_weights : Function that also uses the SMART scheme.
References
----------
.. [1] Singhal, A., Buckley, C., & Mitra, M. (1996). `Pivoted Document Length
Normalization <http://singhal.info/pivoted-dln.pdf>`_. *SIGIR Forum*, 51, 176–184.
.. [2] Singhal, A. (2001). `Modern information retrieval: A brief overview <http://singhal.info/ieee2001.pdf>`_.
*IEEE Data Eng. Bull.*, 24(4), 35–43.
"""
self.id2word = id2word
self.wlocal, self.wglobal, self.normalize = wlocal, wglobal, normalize
self.num_docs, self.num_nnz, self.idfs = None, None, None
self.smartirs = resolve_weights(smartirs) if smartirs is not None else None
self.slope = slope
self.pivot = pivot
self.eps = 1e-12
if smartirs:
n_tf, n_df, n_n = self.smartirs
self.wlocal = partial(smartirs_wlocal, local_scheme=n_tf)
self.wglobal = partial(smartirs_wglobal, global_scheme=n_df)
if dictionary:
# user supplied a Dictionary object, which already contains all the
# statistics we need to construct the IDF mapping. we can skip the
# step that goes through the corpus (= an optimization).
if corpus:
logger.warning(
"constructor received both corpus and explicit inverse document frequencies; ignoring the corpus"
)
self.num_docs, self.num_nnz = dictionary.num_docs, dictionary.num_nnz
self.cfs = dictionary.cfs.copy()
self.dfs = dictionary.dfs.copy()
self.term_lens = {termid: len(term) for termid, term in dictionary.items()}
self.idfs = precompute_idfs(self.wglobal, self.dfs, self.num_docs)
if not id2word:
self.id2word = dictionary
elif corpus:
self.initialize(corpus)
else:
# NOTE: everything is left uninitialized; presumably the model will
# be initialized in some other way
pass
# If smartirs is not None, override pivot and normalize
if not smartirs:
return
if self.pivot is not None:
if n_n in 'ub':
logger.warning("constructor received pivot; ignoring smartirs[2]")
return
if n_n in 'ub' and callable(self.normalize):
logger.warning("constructor received smartirs; ignoring normalize")
if n_n in 'ub' and not dictionary and not corpus:
logger.warning("constructor received no corpus or dictionary; ignoring smartirs[2]")
elif n_n == "u":
self.pivot = 1.0 * self.num_nnz / self.num_docs
elif n_n == "b":
self.pivot = 1.0 * sum(
self.cfs[termid] * (self.term_lens[termid] + 1.0) for termid in dictionary.keys()
) / self.num_docs
@classmethod
def load(cls, *args, **kwargs):
"""Load a previously saved TfidfModel class. Handles backwards compatibility from
older TfidfModel versions which did not use pivoted document normalization.
"""
model = super(TfidfModel, cls).load(*args, **kwargs)
if not hasattr(model, 'pivot'):
model.pivot = None
logger.info('older version of %s loaded without pivot arg', cls.__name__)
logger.info('Setting pivot to %s.', model.pivot)
if not hasattr(model, 'slope'):
model.slope = 0.65
logger.info('older version of %s loaded without slope arg', cls.__name__)
logger.info('Setting slope to %s.', model.slope)
if not hasattr(model, 'smartirs'):
model.smartirs = None
logger.info('older version of %s loaded without smartirs arg', cls.__name__)
logger.info('Setting smartirs to %s.', model.smartirs)
return model
def __str__(self):
return "TfidfModel(num_docs=%s, num_nnz=%s)" % (self.num_docs, self.num_nnz)
def initialize(self, corpus):
"""Compute inverse document weights, which will be used to modify term frequencies for documents.
Parameters
----------
corpus : iterable of iterable of (int, int)
Input corpus.
"""
logger.info("collecting document frequencies")
dfs = {}
numnnz, docno = 0, -1
for docno, bow in enumerate(corpus):
if docno % 10000 == 0:
logger.info("PROGRESS: processing document #%i", docno)
numnnz += len(bow)
for termid, _ in bow:
dfs[termid] = dfs.get(termid, 0) + 1
# keep some stats about the training corpus
self.num_docs = docno + 1
self.num_nnz = numnnz
self.cfs = None
self.dfs = dfs
self.term_lengths = None
# and finally compute the idf weights
logger.info(
"calculating IDF weights for %i documents and %i features (%i matrix non-zeros)",
self.num_docs, max(dfs.keys()) + 1 if dfs else 0, self.num_nnz
)
self.idfs = precompute_idfs(self.wglobal, self.dfs, self.num_docs)
def __getitem__(self, bow, eps=1e-12):
"""Get the tf-idf representation of an input vector and/or corpus.
bow : {list of (int, int), iterable of iterable of (int, int)}
Input document in the `sparse Gensim bag-of-words format
<https://radimrehurek.com/gensim/intro.html#core-concepts>`_,
or a streamed corpus of such documents.
eps : float
Threshold value, will remove all position that have tfidf-value less than `eps`.
Returns
-------
vector : list of (int, float)
TfIdf vector, if `bow` is a single document
:class:`~gensim.interfaces.TransformedCorpus`
TfIdf corpus, if `bow` is a corpus.
"""
self.eps = eps
# if the input vector is in fact a corpus, return a transformed corpus as a result
is_corpus, bow = utils.is_corpus(bow)
if is_corpus:
return self._apply(bow)
# unknown (new) terms will be given zero weight (NOT infinity/huge weight,
# as strict application of the IDF formula would dictate)
termid_array, tf_array = [], []
for termid, tf in bow:
termid_array.append(termid)
tf_array.append(tf)
tf_array = self.wlocal(np.array(tf_array))
vector = [
(termid, tf * self.idfs.get(termid))
for termid, tf in zip(termid_array, tf_array) if abs(self.idfs.get(termid, 0.0)) > self.eps
]
# and finally, normalize the vector either to unit length, or use a
# user-defined normalization function
if self.smartirs:
n_n = self.smartirs[2]
if n_n == "n" or (n_n in 'ub' and self.pivot is None):
if self.pivot is not None:
_, old_norm = matutils.unitvec(vector, return_norm=True)
norm_vector = vector
elif n_n == "c":
if self.pivot is not None:
_, old_norm = matutils.unitvec(vector, return_norm=True)
else:
norm_vector = matutils.unitvec(vector)
elif n_n == "u":
_, old_norm = matutils.unitvec(vector, return_norm=True, norm='unique')
elif n_n == "b":
old_norm = sum(freq * (self.term_lens[termid] + 1.0) for termid, freq in bow)
else:
if self.normalize is True:
self.normalize = matutils.unitvec
elif self.normalize is False:
self.normalize = utils.identity
if self.pivot is not None:
_, old_norm = self.normalize(vector, return_norm=True)
else:
norm_vector = self.normalize(vector)
if self.pivot is None:
norm_vector = [(termid, weight) for termid, weight in norm_vector if abs(weight) > self.eps]
else:
pivoted_norm = (1 - self.slope) * self.pivot + self.slope * old_norm
norm_vector = [
(termid, weight / float(pivoted_norm))
for termid, weight in vector
if abs(weight / float(pivoted_norm)) > self.eps
]
return norm_vector
|
import logging
import pymsteams
import voluptuous as vol
from homeassistant.components.notify import (
ATTR_DATA,
ATTR_TITLE,
ATTR_TITLE_DEFAULT,
PLATFORM_SCHEMA,
BaseNotificationService,
)
from homeassistant.const import CONF_URL
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
ATTR_FILE_URL = "image_url"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({vol.Required(CONF_URL): cv.url})
def get_service(hass, config, discovery_info=None):
"""Get the Microsoft Teams notification service."""
webhook_url = config.get(CONF_URL)
try:
return MSTeamsNotificationService(webhook_url)
except RuntimeError as err:
_LOGGER.exception("Error in creating a new Microsoft Teams message: %s", err)
return None
class MSTeamsNotificationService(BaseNotificationService):
"""Implement the notification service for Microsoft Teams."""
def __init__(self, webhook_url):
"""Initialize the service."""
self._webhook_url = webhook_url
def send_message(self, message=None, **kwargs):
"""Send a message to the webhook."""
teams_message = pymsteams.connectorcard(self._webhook_url)
title = kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT)
data = kwargs.get(ATTR_DATA)
teams_message.title(title)
teams_message.text(message)
if data is not None:
file_url = data.get(ATTR_FILE_URL)
if file_url is not None:
if not file_url.startswith("http"):
_LOGGER.error("URL should start with http or https")
return
message_section = pymsteams.cardsection()
message_section.addImage(file_url)
teams_message.addSection(message_section)
try:
teams_message.send()
except RuntimeError as err:
_LOGGER.error("Could not send notification. Error: %s", err)
|
import os
import pytest
import sh
from molecule import util
from molecule.command.init import base
from molecule.model import schema_v2
class CommandBase(base.Base):
pass
@pytest.fixture
def _base_class():
return CommandBase
@pytest.fixture
def _instance(_base_class):
return _base_class()
@pytest.fixture
def _command_args():
return {
"dependency_name": "galaxy",
"driver_name": "docker",
"lint_name": "yamllint",
"provisioner_name": "ansible",
"scenario_name": "default",
"role_name": "test-role",
"verifier_name": "testinfra",
}
@pytest.fixture
def _role_directory():
return '.'
@pytest.fixture
def _molecule_file(_role_directory):
return os.path.join(_role_directory, 'test-role', 'molecule', 'default',
'molecule.yml')
def test_valid(temp_dir, _molecule_file, _role_directory, _command_args,
_instance):
_instance._process_templates('molecule', _command_args, _role_directory)
data = util.safe_load_file(_molecule_file)
assert {} == schema_v2.validate(data)
cmd = sh.yamllint.bake('-s', _molecule_file)
pytest.helpers.run_command(cmd)
def test_vagrant_driver(temp_dir, _molecule_file, _role_directory,
_command_args, _instance):
_command_args['driver_name'] = 'vagrant'
_instance._process_templates('molecule', _command_args, _role_directory)
data = util.safe_load_file(_molecule_file)
assert {} == schema_v2.validate(data)
cmd = sh.yamllint.bake('-s', _molecule_file)
pytest.helpers.run_command(cmd)
@pytest.mark.parametrize('driver', [
('azure'),
('digitalocean'),
('docker'),
('ec2'),
('gce'),
('linode'),
('lxc'),
('lxd'),
('openstack'),
('vagrant'),
])
def test_drivers(driver, temp_dir, _molecule_file, _role_directory,
_command_args, _instance):
_command_args['driver_name'] = driver
_instance._process_templates('molecule', _command_args, _role_directory)
data = util.safe_load_file(_molecule_file)
assert {} == schema_v2.validate(data)
cmd = sh.yamllint.bake('-s', _molecule_file)
pytest.helpers.run_command(cmd)
def test_verifier_lint_when_verifier_inspec(
temp_dir, _molecule_file, _role_directory, _command_args, _instance):
_command_args['verifier_name'] = 'inspec'
_command_args['verifier_lint_name'] = 'rubocop'
_instance._process_templates('molecule', _command_args, _role_directory)
data = util.safe_load_file(_molecule_file)
assert {} == schema_v2.validate(data)
cmd = sh.yamllint.bake('-s', _molecule_file)
pytest.helpers.run_command(cmd)
def test_verifier_lint_when_verifier_goss(
temp_dir, _molecule_file, _role_directory, _command_args, _instance):
_command_args['verifier_name'] = 'goss'
_command_args['verifier_lint_name'] = 'yamllint'
_instance._process_templates('molecule', _command_args, _role_directory)
data = util.safe_load_file(_molecule_file)
assert {} == schema_v2.validate(data)
cmd = sh.yamllint.bake('-s', _molecule_file)
pytest.helpers.run_command(cmd)
|
import asyncio
from datetime import timedelta
from distutils.version import StrictVersion
import logging
import async_timeout
from distro import linux_distribution # pylint: disable=import-error
import voluptuous as vol
from homeassistant.const import __version__ as current_version
from homeassistant.helpers import discovery, update_coordinator
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
ATTR_RELEASE_NOTES = "release_notes"
ATTR_NEWEST_VERSION = "newest_version"
CONF_REPORTING = "reporting"
CONF_COMPONENT_REPORTING = "include_used_components"
DOMAIN = "updater"
UPDATER_URL = "https://updater.home-assistant.io/"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: {
vol.Optional(CONF_REPORTING, default=True): cv.boolean,
vol.Optional(CONF_COMPONENT_REPORTING, default=False): cv.boolean,
}
},
extra=vol.ALLOW_EXTRA,
)
RESPONSE_SCHEMA = vol.Schema(
{vol.Required("version"): cv.string, vol.Required("release-notes"): cv.url}
)
class Updater:
"""Updater class for data exchange."""
def __init__(self, update_available: bool, newest_version: str, release_notes: str):
"""Initialize attributes."""
self.update_available = update_available
self.release_notes = release_notes
self.newest_version = newest_version
async def async_setup(hass, config):
"""Set up the updater component."""
if "dev" in current_version:
# This component only makes sense in release versions
_LOGGER.info("Running on 'dev', only analytics will be submitted")
conf = config.get(DOMAIN, {})
if conf.get(CONF_REPORTING):
huuid = await hass.helpers.instance_id.async_get()
else:
huuid = None
include_components = conf.get(CONF_COMPONENT_REPORTING)
async def check_new_version() -> Updater:
"""Check if a new version is available and report if one is."""
newest, release_notes = await get_newest_version(
hass, huuid, include_components
)
_LOGGER.debug("Fetched version %s: %s", newest, release_notes)
# Skip on dev
if "dev" in current_version:
return Updater(False, "", "")
# Load data from Supervisor
if hass.components.hassio.is_hassio():
core_info = hass.components.hassio.get_core_info()
newest = core_info["version_latest"]
# Validate version
update_available = False
if StrictVersion(newest) > StrictVersion(current_version):
_LOGGER.debug(
"The latest available version of Home Assistant is %s", newest
)
update_available = True
elif StrictVersion(newest) == StrictVersion(current_version):
_LOGGER.debug(
"You are on the latest version (%s) of Home Assistant", newest
)
elif StrictVersion(newest) < StrictVersion(current_version):
_LOGGER.debug("Local version is newer than the latest version (%s)", newest)
_LOGGER.debug("Update available: %s", update_available)
return Updater(update_available, newest, release_notes)
coordinator = hass.data[DOMAIN] = update_coordinator.DataUpdateCoordinator[Updater](
hass,
_LOGGER,
name="Home Assistant update",
update_method=check_new_version,
update_interval=timedelta(days=1),
)
# This can take up to 15s which can delay startup
asyncio.create_task(coordinator.async_refresh())
hass.async_create_task(
discovery.async_load_platform(hass, "binary_sensor", DOMAIN, {}, config)
)
return True
async def get_newest_version(hass, huuid, include_components):
"""Get the newest Home Assistant version."""
if huuid:
info_object = await hass.helpers.system_info.async_get_system_info()
if include_components:
info_object["components"] = list(hass.config.components)
linux_dist = await hass.async_add_executor_job(linux_distribution, False)
info_object["distribution"] = linux_dist[0]
info_object["os_version"] = linux_dist[1]
info_object["huuid"] = huuid
else:
info_object = {}
session = async_get_clientsession(hass)
with async_timeout.timeout(30):
req = await session.post(UPDATER_URL, json=info_object)
_LOGGER.info(
(
"Submitted analytics to Home Assistant servers. "
"Information submitted includes %s"
),
info_object,
)
try:
res = await req.json()
except ValueError as err:
raise update_coordinator.UpdateFailed(
"Received invalid JSON from Home Assistant Update"
) from err
try:
res = RESPONSE_SCHEMA(res)
return res["version"], res["release-notes"]
except vol.Invalid as err:
raise update_coordinator.UpdateFailed(
f"Got unexpected response: {err}"
) from err
|
import unittest
import numpy as np
import chainer
from chainer.backends import cuda
from chainer import testing
from chainermn import create_communicator
from chainercv.links.model.mobilenet import ExpandedConv2D
from chainercv.utils.testing import attr
@testing.parameterize(*testing.product({
'expansion_size': [1, 2, 3],
}))
class TestExpandedConv2D(unittest.TestCase):
in_channels = 1
out_channels = 1
expand_pad = 'SAME'
depthwise_ksize = 3
depthwise_pad = 'SAME'
depthwise_stride = 1
project_pad = 'SAME'
def setUp(self):
self.x = np.random.uniform(
-1, 1, (5, self.in_channels, 5, 5)).astype(np.float32)
self.gy = np.random.uniform(
-1, 1, (5, self.out_channels, 5, 5)).astype(np.float32)
# Convolution is the identity function.
expand_initialW = np.ones((
self.expansion_size, self.in_channels),
dtype=np.float32).reshape(
(self.expansion_size, self.in_channels, 1, 1))
depthwise_initialW = np.array([[0, 0, 0], [0, 1, 0], [0, 0, 0]]*self.expansion_size,
dtype=np.float32).reshape((self.expansion_size, 1, 3, 3))
project_initialW = np.ones(
(self.out_channels, self.expansion_size),
dtype=np.float32).reshape(
(self.out_channels, self.expansion_size, 1, 1))
bn_kwargs = {'decay': 0.8}
self.l = ExpandedConv2D(
self.in_channels, self.out_channels, expansion_size=self.expansion_size,
expand_pad=self.expand_pad, depthwise_stride=self.depthwise_stride,
depthwise_ksize=self.depthwise_ksize, depthwise_pad=self.depthwise_pad,
project_pad=self.project_pad, bn_kwargs=bn_kwargs)
if self.expansion_size > self.in_channels:
self.l.expand.conv.W.array = expand_initialW
self.l.depthwise.conv.W.array = depthwise_initialW
self.l.project.conv.W.array = project_initialW
def check_forward(self, x_data):
x = chainer.Variable(x_data)
# Make the batch normalization to be the identity function.
if self.expansion_size != 1:
self.l.expand.bn.avg_var[:] = 1
self.l.expand.bn.avg_mean[:] = 0
self.l.depthwise.bn.avg_var[:] = 1
self.l.depthwise.bn.avg_mean[:] = 0
self.l.project.bn.avg_var[:] = 1
self.l.project.bn.avg_mean[:] = 0
with chainer.using_config('train', False):
y = self.l(x)
self.assertIsInstance(y, chainer.Variable)
self.assertIsInstance(y.array, self.l.xp.ndarray)
_x_data = x_data
if self.expansion_size > self.in_channels:
np.testing.assert_almost_equal(
cuda.to_cpu(y.array), cuda.to_cpu(_x_data)+self.expansion_size *
np.maximum(np.minimum(cuda.to_cpu(_x_data), 6), 0),
decimal=4
)
else:
np.testing.assert_almost_equal(
cuda.to_cpu(y.array), cuda.to_cpu(_x_data) +
np.maximum(np.minimum(cuda.to_cpu(_x_data), 6), 0),
decimal=4
)
def test_forward_cpu(self):
self.check_forward(self.x)
@attr.gpu
def test_forward_gpu(self):
self.l.to_gpu()
self.check_forward(cuda.to_gpu(self.x))
def check_backward(self, x_data, y_grad):
x = chainer.Variable(x_data)
y = self.l(x)
y.grad = y_grad
y.backward()
def test_backward_cpu(self):
self.check_backward(self.x, self.gy)
@attr.gpu
def test_backward_gpu(self):
self.l.to_gpu()
self.check_backward(cuda.to_gpu(self.x), cuda.to_gpu(self.gy))
testing.run_module(__name__, __file__)
|
import asyncio
import logging
from typing import Any, Dict, Iterable, Optional
from homeassistant.const import (
ATTR_ENTITY_ID,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_OFF,
STATE_ON,
)
from homeassistant.core import Context, State
from homeassistant.helpers.typing import HomeAssistantType
from . import (
ATTR_AWAY_MODE,
ATTR_OPERATION_MODE,
ATTR_TEMPERATURE,
DOMAIN,
SERVICE_SET_AWAY_MODE,
SERVICE_SET_OPERATION_MODE,
SERVICE_SET_TEMPERATURE,
STATE_ECO,
STATE_ELECTRIC,
STATE_GAS,
STATE_HEAT_PUMP,
STATE_HIGH_DEMAND,
STATE_PERFORMANCE,
)
_LOGGER = logging.getLogger(__name__)
VALID_STATES = {
STATE_ECO,
STATE_ELECTRIC,
STATE_GAS,
STATE_HEAT_PUMP,
STATE_HIGH_DEMAND,
STATE_OFF,
STATE_ON,
STATE_PERFORMANCE,
}
async def _async_reproduce_state(
hass: HomeAssistantType,
state: State,
*,
context: Optional[Context] = None,
reproduce_options: Optional[Dict[str, Any]] = None,
) -> None:
"""Reproduce a single state."""
cur_state = hass.states.get(state.entity_id)
if cur_state is None:
_LOGGER.warning("Unable to find entity %s", state.entity_id)
return
if state.state not in VALID_STATES:
_LOGGER.warning(
"Invalid state specified for %s: %s", state.entity_id, state.state
)
return
# Return if we are already at the right state.
if (
cur_state.state == state.state
and cur_state.attributes.get(ATTR_TEMPERATURE)
== state.attributes.get(ATTR_TEMPERATURE)
and cur_state.attributes.get(ATTR_AWAY_MODE)
== state.attributes.get(ATTR_AWAY_MODE)
):
return
service_data = {ATTR_ENTITY_ID: state.entity_id}
if state.state != cur_state.state:
if state.state == STATE_ON:
service = SERVICE_TURN_ON
elif state.state == STATE_OFF:
service = SERVICE_TURN_OFF
else:
service = SERVICE_SET_OPERATION_MODE
service_data[ATTR_OPERATION_MODE] = state.state
await hass.services.async_call(
DOMAIN, service, service_data, context=context, blocking=True
)
if (
state.attributes.get(ATTR_TEMPERATURE)
!= cur_state.attributes.get(ATTR_TEMPERATURE)
and state.attributes.get(ATTR_TEMPERATURE) is not None
):
await hass.services.async_call(
DOMAIN,
SERVICE_SET_TEMPERATURE,
{
ATTR_ENTITY_ID: state.entity_id,
ATTR_TEMPERATURE: state.attributes.get(ATTR_TEMPERATURE),
},
context=context,
blocking=True,
)
if (
state.attributes.get(ATTR_AWAY_MODE) != cur_state.attributes.get(ATTR_AWAY_MODE)
and state.attributes.get(ATTR_AWAY_MODE) is not None
):
await hass.services.async_call(
DOMAIN,
SERVICE_SET_AWAY_MODE,
{
ATTR_ENTITY_ID: state.entity_id,
ATTR_AWAY_MODE: state.attributes.get(ATTR_AWAY_MODE),
},
context=context,
blocking=True,
)
async def async_reproduce_states(
hass: HomeAssistantType,
states: Iterable[State],
*,
context: Optional[Context] = None,
reproduce_options: Optional[Dict[str, Any]] = None,
) -> None:
"""Reproduce Water heater states."""
await asyncio.gather(
*(
_async_reproduce_state(
hass, state, context=context, reproduce_options=reproduce_options
)
for state in states
)
)
|
import sys
import textwrap
from PyQt5.Qt import * # noqa
from PyQt5.Qsci import QsciScintilla
from PyQt5.Qsci import QsciLexerCustom
from lark import Lark
class LexerJson(QsciLexerCustom):
def __init__(self, parent=None):
super().__init__(parent)
self.create_parser()
self.create_styles()
def create_styles(self):
deeppink = QColor(249, 38, 114)
khaki = QColor(230, 219, 116)
mediumpurple = QColor(174, 129, 255)
mediumturquoise = QColor(81, 217, 205)
yellowgreen = QColor(166, 226, 46)
lightcyan = QColor(213, 248, 232)
darkslategrey = QColor(39, 40, 34)
styles = {
0: mediumturquoise,
1: mediumpurple,
2: yellowgreen,
3: deeppink,
4: khaki,
5: lightcyan
}
for style, color in styles.items():
self.setColor(color, style)
self.setPaper(darkslategrey, style)
self.setFont(self.parent().font(), style)
self.token_styles = {
"COLON": 5,
"COMMA": 5,
"LBRACE": 5,
"LSQB": 5,
"RBRACE": 5,
"RSQB": 5,
"FALSE": 0,
"NULL": 0,
"TRUE": 0,
"STRING": 4,
"NUMBER": 1,
}
def create_parser(self):
grammar = '''
anons: ":" "{" "}" "," "[" "]"
TRUE: "true"
FALSE: "false"
NULL: "NULL"
%import common.ESCAPED_STRING -> STRING
%import common.SIGNED_NUMBER -> NUMBER
%import common.WS
%ignore WS
'''
self.lark = Lark(grammar, parser=None, lexer='standard')
# All tokens: print([t.name for t in self.lark.parser.lexer.tokens])
def defaultPaper(self, style):
return QColor(39, 40, 34)
def language(self):
return "Json"
def description(self, style):
return {v: k for k, v in self.token_styles.items()}.get(style, "")
def styleText(self, start, end):
self.startStyling(start)
text = self.parent().text()[start:end]
last_pos = 0
try:
for token in self.lark.lex(text):
ws_len = token.pos_in_stream - last_pos
if ws_len:
self.setStyling(ws_len, 0) # whitespace
token_len = len(bytearray(token, "utf-8"))
self.setStyling(
token_len, self.token_styles.get(token.type, 0))
last_pos = token.pos_in_stream + token_len
except Exception as e:
print(e)
class EditorAll(QsciScintilla):
def __init__(self, parent=None):
super().__init__(parent)
# Set font defaults
font = QFont()
font.setFamily('Consolas')
font.setFixedPitch(True)
font.setPointSize(8)
font.setBold(True)
self.setFont(font)
# Set margin defaults
fontmetrics = QFontMetrics(font)
self.setMarginsFont(font)
self.setMarginWidth(0, fontmetrics.width("000") + 6)
self.setMarginLineNumbers(0, True)
self.setMarginsForegroundColor(QColor(128, 128, 128))
self.setMarginsBackgroundColor(QColor(39, 40, 34))
self.setMarginType(1, self.SymbolMargin)
self.setMarginWidth(1, 12)
# Set indentation defaults
self.setIndentationsUseTabs(False)
self.setIndentationWidth(4)
self.setBackspaceUnindents(True)
self.setIndentationGuides(True)
# self.setFolding(QsciScintilla.CircledFoldStyle)
# Set caret defaults
self.setCaretForegroundColor(QColor(247, 247, 241))
self.setCaretWidth(2)
# Set selection color defaults
self.setSelectionBackgroundColor(QColor(61, 61, 52))
self.resetSelectionForegroundColor()
# Set multiselection defaults
self.SendScintilla(QsciScintilla.SCI_SETMULTIPLESELECTION, True)
self.SendScintilla(QsciScintilla.SCI_SETMULTIPASTE, 1)
self.SendScintilla(
QsciScintilla.SCI_SETADDITIONALSELECTIONTYPING, True)
lexer = LexerJson(self)
self.setLexer(lexer)
EXAMPLE_TEXT = textwrap.dedent("""\
{
"_id": "5b05ffcbcf8e597939b3f5ca",
"about": "Excepteur consequat commodo esse voluptate aute aliquip ad sint deserunt commodo eiusmod irure. Sint aliquip sit magna duis eu est culpa aliqua excepteur ut tempor nulla. Aliqua ex pariatur id labore sit. Quis sit ex aliqua veniam exercitation laboris anim adipisicing. Lorem nisi reprehenderit ullamco labore qui sit ut aliqua tempor consequat pariatur proident.",
"address": "665 Malbone Street, Thornport, Louisiana, 243",
"age": 23,
"balance": "$3,216.91",
"company": "BULLJUICE",
"email": "[email protected]",
"eyeColor": "brown",
"gender": "female",
"guid": "d3a6d865-0f64-4042-8a78-4f53de9b0707",
"index": 0,
"isActive": false,
"isActive2": true,
"latitude": -18.660714,
"longitude": -85.378048,
"name": "Elise Kelley",
"phone": "+1 (808) 543-3966",
"picture": "http://placehold.it/32x32",
"registered": "2017-09-30T03:47:40 -02:00",
"tags": [
"et",
"nostrud",
"in",
"fugiat",
"incididunt",
"labore",
"nostrud"
]
}\
""")
def main():
app = QApplication(sys.argv)
ex = EditorAll()
ex.setWindowTitle(__file__)
ex.setText(EXAMPLE_TEXT)
ex.resize(800, 600)
ex.show()
sys.exit(app.exec_())
if __name__ == "__main__":
main()
|
from smhi.smhi_lib import Smhi, SmhiForecastException
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import aiohttp_client
import homeassistant.helpers.config_validation as cv
from homeassistant.util import slugify
from .const import DOMAIN, HOME_LOCATION_NAME
@callback
def smhi_locations(hass: HomeAssistant):
"""Return configurations of SMHI component."""
return {
(slugify(entry.data[CONF_NAME]))
for entry in hass.config_entries.async_entries(DOMAIN)
}
@config_entries.HANDLERS.register(DOMAIN)
class SmhiFlowHandler(config_entries.ConfigFlow):
"""Config flow for SMHI component."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
def __init__(self) -> None:
"""Initialize SMHI forecast configuration flow."""
self._errors = {}
async def async_step_user(self, user_input=None):
"""Handle a flow initialized by the user."""
self._errors = {}
if user_input is not None:
is_ok = await self._check_location(
user_input[CONF_LONGITUDE], user_input[CONF_LATITUDE]
)
if is_ok:
name = slugify(user_input[CONF_NAME])
if not self._name_in_configuration_exists(name):
return self.async_create_entry(
title=user_input[CONF_NAME], data=user_input
)
self._errors[CONF_NAME] = "name_exists"
else:
self._errors["base"] = "wrong_location"
# If hass config has the location set and is a valid coordinate the
# default location is set as default values in the form
if not smhi_locations(self.hass):
if await self._homeassistant_location_exists():
return await self._show_config_form(
name=HOME_LOCATION_NAME,
latitude=self.hass.config.latitude,
longitude=self.hass.config.longitude,
)
return await self._show_config_form()
async def _homeassistant_location_exists(self) -> bool:
"""Return true if default location is set and is valid."""
if self.hass.config.latitude != 0.0 and self.hass.config.longitude != 0.0:
# Return true if valid location
if await self._check_location(
self.hass.config.longitude, self.hass.config.latitude
):
return True
return False
def _name_in_configuration_exists(self, name: str) -> bool:
"""Return True if name exists in configuration."""
if name in smhi_locations(self.hass):
return True
return False
async def _show_config_form(
self, name: str = None, latitude: str = None, longitude: str = None
):
"""Show the configuration form to edit location data."""
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(CONF_NAME, default=name): str,
vol.Required(CONF_LATITUDE, default=latitude): cv.latitude,
vol.Required(CONF_LONGITUDE, default=longitude): cv.longitude,
}
),
errors=self._errors,
)
async def _check_location(self, longitude: str, latitude: str) -> bool:
"""Return true if location is ok."""
try:
session = aiohttp_client.async_get_clientsession(self.hass)
smhi_api = Smhi(longitude, latitude, session=session)
await smhi_api.async_get_forecast()
return True
except SmhiForecastException:
# The API will throw an exception if faulty location
pass
return False
|
import datetime
import mock
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import patch
from test import run_only
from mock import Mock
from diamond.collector import Collector
from elb import ElbCollector
def run_only_if_boto_is_available(func):
try:
import boto
except ImportError:
boto = None
pred = lambda: boto is not None
return run_only(func, pred)
class TestElbCollector(CollectorTestCase):
@run_only_if_boto_is_available
def test_throws_exception_when_interval_not_multiple_of_60(self):
config = get_collector_config('ElbCollector',
{'enabled': True,
'interval': 10})
assertRaisesAndContains(Exception, 'multiple of',
ElbCollector, *[config, None])
@run_only_if_boto_is_available
@patch('elb.cloudwatch')
@patch('boto.ec2.connect_to_region')
@patch('boto.ec2.elb.connect_to_region')
@patch.object(Collector, 'publish_metric')
def test_ignore(self, publish_metric, elb_connect_to_region,
ec2_connect_to_region, cloudwatch):
config = get_collector_config(
'ElbCollector',
{
'enabled': True,
'interval': 60,
'regions': {
'us-west-1': {}
},
'elbs_ignored': ['^to_ignore', ],
})
az = Mock()
az.name = 'us-west-1a'
ec2_conn = Mock()
ec2_conn.get_all_zones = Mock()
ec2_conn.get_all_zones.return_value = [az]
ec2_connect_to_region.return_value = ec2_conn
elb1 = Mock()
elb1.name = 'elb1'
elb2 = Mock()
elb2.name = 'to_ignore'
elb_conn = Mock()
elb_conn.get_all_load_balancers = Mock()
elb_conn.get_all_load_balancers.return_value = [elb1, elb2]
elb_connect_to_region.return_value = elb_conn
cw_conn = Mock()
cw_conn.region = Mock()
cw_conn.region.name = 'us-west-1'
cw_conn.get_metric_statistics = Mock()
ts = datetime.datetime.utcnow().replace(second=0, microsecond=0)
cw_conn.get_metric_statistics.side_effect = [
[{u'Timestamp': ts, u'Average': 1.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Average': 2.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Sum': 3.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Average': 4.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Sum': 6.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Sum': 7.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Sum': 8.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Sum': 9.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Sum': 10.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Sum': 11.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Sum': 12.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Maximum': 13.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Sum': 14.0, u'Unit': u'Count'}],
]
cloudwatch.connect_to_region = Mock()
cloudwatch.connect_to_region.return_value = cw_conn
collector = ElbCollector(config, handlers=[])
target = ts + datetime.timedelta(minutes=1)
with mock.patch.object(datetime, 'datetime',
mock.Mock(wraps=datetime.datetime)) as patched:
patched.utcnow.return_value = target
collector.collect()
self.assertPublishedMetricMany(
publish_metric,
{
'us-west-1a.elb1.HealthyHostCount': 1,
'us-west-1a.elb1.UnHealthyHostCount': 2,
'us-west-1a.elb1.RequestCount': 3,
'us-west-1a.elb1.Latency': 4,
'us-west-1a.elb1.HTTPCode_ELB_4XX': 6,
'us-west-1a.elb1.HTTPCode_ELB_5XX': 7,
'us-west-1a.elb1.HTTPCode_Backend_2XX': 8,
'us-west-1a.elb1.HTTPCode_Backend_3XX': 9,
'us-west-1a.elb1.HTTPCode_Backend_4XX': 10,
'us-west-1a.elb1.HTTPCode_Backend_5XX': 11,
'us-west-1a.elb1.BackendConnectionErrors': 12,
'us-west-1a.elb1.SurgeQueueLength': 13,
'us-west-1a.elb1.SpilloverCount': 14,
})
@run_only_if_boto_is_available
@patch('elb.cloudwatch')
@patch('boto.ec2.connect_to_region')
@patch.object(Collector, 'publish_metric')
def test_collect(self, publish_metric, connect_to_region, cloudwatch):
config = get_collector_config(
'ElbCollector',
{
'enabled': True,
'interval': 60,
'regions': {
'us-west-1': {
'elb_names': ['elb1'],
}
}
})
az = Mock()
az.name = 'us-west-1a'
ec2_conn = Mock()
ec2_conn.get_all_zones = Mock()
ec2_conn.get_all_zones.return_value = [az]
connect_to_region.return_value = ec2_conn
cw_conn = Mock()
cw_conn.region = Mock()
cw_conn.region.name = 'us-west-1'
cw_conn.get_metric_statistics = Mock()
ts = datetime.datetime.utcnow().replace(second=0, microsecond=0)
cw_conn.get_metric_statistics.side_effect = [
[{u'Timestamp': ts, u'Average': 1.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Average': 2.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Sum': 3.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Average': 4.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Sum': 6.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Sum': 7.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Sum': 8.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Sum': 9.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Sum': 10.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Sum': 11.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Sum': 12.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Maximum': 13.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Sum': 14.0, u'Unit': u'Count'}],
]
cloudwatch.connect_to_region = Mock()
cloudwatch.connect_to_region.return_value = cw_conn
collector = ElbCollector(config, handlers=[])
target = ts + datetime.timedelta(minutes=1)
with mock.patch.object(datetime, 'datetime',
mock.Mock(wraps=datetime.datetime)) as patched:
patched.utcnow.return_value = target
collector.collect()
self.assertPublishedMetricMany(
publish_metric,
{
'us-west-1a.elb1.HealthyHostCount': 1,
'us-west-1a.elb1.UnHealthyHostCount': 2,
'us-west-1a.elb1.RequestCount': 3,
'us-west-1a.elb1.Latency': 4,
'us-west-1a.elb1.HTTPCode_ELB_4XX': 6,
'us-west-1a.elb1.HTTPCode_ELB_5XX': 7,
'us-west-1a.elb1.HTTPCode_Backend_2XX': 8,
'us-west-1a.elb1.HTTPCode_Backend_3XX': 9,
'us-west-1a.elb1.HTTPCode_Backend_4XX': 10,
'us-west-1a.elb1.HTTPCode_Backend_5XX': 11,
'us-west-1a.elb1.BackendConnectionErrors': 12,
'us-west-1a.elb1.SurgeQueueLength': 13,
'us-west-1a.elb1.SpilloverCount': 14,
})
def assertRaisesAndContains(excClass, contains_str, callableObj, *args,
**kwargs):
try:
callableObj(*args, **kwargs)
except excClass as e:
msg = str(e)
if contains_str in msg:
return
else:
raise AssertionError(
"Exception message does not contain '%s': '%s'" % (
contains_str, msg))
else:
if hasattr(excClass, '__name__'):
excName = excClass.__name__
else:
excName = str(excClass)
raise AssertionError("%s not raised" % excName)
if __name__ == "__main__":
unittest.main()
|
import voluptuous as vol
from homeassistant.components import ads
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_NAME, CONF_UNIT_OF_MEASUREMENT
import homeassistant.helpers.config_validation as cv
from . import CONF_ADS_FACTOR, CONF_ADS_TYPE, CONF_ADS_VAR, STATE_KEY_STATE, AdsEntity
DEFAULT_NAME = "ADS sensor"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_ADS_VAR): cv.string,
vol.Optional(CONF_ADS_FACTOR): cv.positive_int,
vol.Optional(CONF_ADS_TYPE, default=ads.ADSTYPE_INT): vol.In(
[
ads.ADSTYPE_INT,
ads.ADSTYPE_UINT,
ads.ADSTYPE_BYTE,
ads.ADSTYPE_DINT,
ads.ADSTYPE_UDINT,
]
),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_UNIT_OF_MEASUREMENT, default=""): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up an ADS sensor device."""
ads_hub = hass.data.get(ads.DATA_ADS)
ads_var = config[CONF_ADS_VAR]
ads_type = config[CONF_ADS_TYPE]
name = config[CONF_NAME]
unit_of_measurement = config.get(CONF_UNIT_OF_MEASUREMENT)
factor = config.get(CONF_ADS_FACTOR)
entity = AdsSensor(ads_hub, ads_var, ads_type, name, unit_of_measurement, factor)
add_entities([entity])
class AdsSensor(AdsEntity):
"""Representation of an ADS sensor entity."""
def __init__(self, ads_hub, ads_var, ads_type, name, unit_of_measurement, factor):
"""Initialize AdsSensor entity."""
super().__init__(ads_hub, name, ads_var)
self._unit_of_measurement = unit_of_measurement
self._ads_type = ads_type
self._factor = factor
async def async_added_to_hass(self):
"""Register device notification."""
await self.async_initialize_device(
self._ads_var,
self._ads_hub.ADS_TYPEMAP[self._ads_type],
STATE_KEY_STATE,
self._factor,
)
@property
def state(self):
"""Return the state of the device."""
return self._state_dict[STATE_KEY_STATE]
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return self._unit_of_measurement
|
import unittest
from chainer.testing.attr import check_available
from chainer.testing.attr import gpu # NOQA
from chainer.testing.attr import slow # NOQA
try:
import pytest
pfnci_skip = pytest.mark.pfnci_skip
except ImportError:
from chainer.testing.attr import _dummy_callable
pfnci_skip = _dummy_callable
def mpi(f):
check_available()
import pytest
try:
import mpi4py.MPI # NOQA
available = True
except ImportError:
available = False
return unittest.skipUnless(
available, 'mpi4py is not installed')(pytest.mark.mpi(f))
|
import urwid
def exit_on_q(key):
if key in ('q', 'Q'):
raise urwid.ExitMainLoop()
palette = [
('banner', '', '', '', '#ffa', '#60d'),
('streak', '', '', '', 'g50', '#60a'),
('inside', '', '', '', 'g38', '#808'),
('outside', '', '', '', 'g27', '#a06'),
('bg', '', '', '', 'g7', '#d06'),]
placeholder = urwid.SolidFill()
loop = urwid.MainLoop(placeholder, palette, unhandled_input=exit_on_q)
loop.screen.set_terminal_properties(colors=256)
loop.widget = urwid.AttrMap(placeholder, 'bg')
loop.widget.original_widget = urwid.Filler(urwid.Pile([]))
div = urwid.Divider()
outside = urwid.AttrMap(div, 'outside')
inside = urwid.AttrMap(div, 'inside')
txt = urwid.Text(('banner', u" Hello World "), align='center')
streak = urwid.AttrMap(txt, 'streak')
pile = loop.widget.base_widget # .base_widget skips the decorations
for item in [outside, inside, streak, inside, outside]:
pile.contents.append((item, pile.options()))
loop.run()
|
import pytest
import http.client
from cherrypy.lib import httputil
@pytest.mark.parametrize(
'script_name,path_info,expected_url',
[
('/sn/', '/pi/', '/sn/pi/'),
('/sn/', '/pi', '/sn/pi'),
('/sn/', '/', '/sn/'),
('/sn/', '', '/sn/'),
('/sn', '/pi/', '/sn/pi/'),
('/sn', '/pi', '/sn/pi'),
('/sn', '/', '/sn/'),
('/sn', '', '/sn'),
('/', '/pi/', '/pi/'),
('/', '/pi', '/pi'),
('/', '/', '/'),
('/', '', '/'),
('', '/pi/', '/pi/'),
('', '/pi', '/pi'),
('', '/', '/'),
('', '', '/'),
]
)
def test_urljoin(script_name, path_info, expected_url):
"""Test all slash+atom combinations for SCRIPT_NAME and PATH_INFO."""
actual_url = httputil.urljoin(script_name, path_info)
assert actual_url == expected_url
EXPECTED_200 = (200, 'OK', 'Request fulfilled, document follows')
EXPECTED_500 = (
500,
'Internal Server Error',
'The server encountered an unexpected condition which '
'prevented it from fulfilling the request.',
)
EXPECTED_404 = (404, 'Not Found', 'Nothing matches the given URI')
EXPECTED_444 = (444, 'Non-existent reason', '')
@pytest.mark.parametrize(
'status,expected_status',
[
(None, EXPECTED_200),
(200, EXPECTED_200),
('500', EXPECTED_500),
(http.client.NOT_FOUND, EXPECTED_404),
('444 Non-existent reason', EXPECTED_444),
]
)
def test_valid_status(status, expected_status):
"""Check valid int, string and http.client-constants
statuses processing."""
assert httputil.valid_status(status) == expected_status
@pytest.mark.parametrize(
'status_code,error_msg',
[
(
'hey',
r"Illegal response status from server \('hey' is non-numeric\)."
),
(
{'hey': 'hi'},
r'Illegal response status from server '
r"\(\{'hey': 'hi'\} is non-numeric\).",
),
(1, r'Illegal response status from server \(1 is out of range\).'),
(600, r'Illegal response status from server \(600 is out of range\).'),
]
)
def test_invalid_status(status_code, error_msg):
"""Check that invalid status cause certain errors."""
with pytest.raises(ValueError, match=error_msg):
httputil.valid_status(status_code)
|
import asyncio
import logging
import os
import re
from typing import Dict, Union
import aiohttp
from aiohttp import web
from aiohttp.hdrs import CONTENT_LENGTH, CONTENT_TYPE
from aiohttp.web_exceptions import HTTPBadGateway
import async_timeout
from homeassistant.components.http import KEY_AUTHENTICATED, HomeAssistantView
from homeassistant.components.onboarding import async_is_onboarded
from homeassistant.const import HTTP_UNAUTHORIZED
from .const import X_HASS_IS_ADMIN, X_HASS_USER_ID, X_HASSIO
_LOGGER = logging.getLogger(__name__)
MAX_UPLOAD_SIZE = 1024 * 1024 * 1024
NO_TIMEOUT = re.compile(
r"^(?:"
r"|homeassistant/update"
r"|hassos/update"
r"|hassos/update/cli"
r"|supervisor/update"
r"|addons/[^/]+/(?:update|install|rebuild)"
r"|snapshots/.+/full"
r"|snapshots/.+/partial"
r"|snapshots/[^/]+/(?:upload|download)"
r")$"
)
NO_AUTH_ONBOARDING = re.compile(
r"^(?:" r"|supervisor/logs" r"|snapshots/[^/]+/.+" r")$"
)
NO_AUTH = re.compile(
r"^(?:" r"|app/.*" r"|addons/[^/]+/logo" r"|addons/[^/]+/icon" r")$"
)
class HassIOView(HomeAssistantView):
"""Hass.io view to handle base part."""
name = "api:hassio"
url = "/api/hassio/{path:.+}"
requires_auth = False
def __init__(self, host: str, websession: aiohttp.ClientSession):
"""Initialize a Hass.io base view."""
self._host = host
self._websession = websession
async def _handle(
self, request: web.Request, path: str
) -> Union[web.Response, web.StreamResponse]:
"""Route data to Hass.io."""
hass = request.app["hass"]
if _need_auth(hass, path) and not request[KEY_AUTHENTICATED]:
return web.Response(status=HTTP_UNAUTHORIZED)
return await self._command_proxy(path, request)
delete = _handle
get = _handle
post = _handle
async def _command_proxy(
self, path: str, request: web.Request
) -> Union[web.Response, web.StreamResponse]:
"""Return a client request with proxy origin for Hass.io supervisor.
This method is a coroutine.
"""
read_timeout = _get_timeout(path)
data = None
headers = _init_header(request)
if path == "snapshots/new/upload":
# We need to reuse the full content type that includes the boundary
headers[
"Content-Type"
] = request._stored_content_type # pylint: disable=protected-access
# Snapshots are big, so we need to adjust the allowed size
request._client_max_size = ( # pylint: disable=protected-access
MAX_UPLOAD_SIZE
)
try:
with async_timeout.timeout(10):
data = await request.read()
method = getattr(self._websession, request.method.lower())
client = await method(
f"http://{self._host}/{path}",
data=data,
headers=headers,
timeout=read_timeout,
)
# Simple request
if int(client.headers.get(CONTENT_LENGTH, 0)) < 4194000:
# Return Response
body = await client.read()
return web.Response(
content_type=client.content_type, status=client.status, body=body
)
# Stream response
response = web.StreamResponse(status=client.status, headers=client.headers)
response.content_type = client.content_type
await response.prepare(request)
async for data in client.content.iter_chunked(4096):
await response.write(data)
return response
except aiohttp.ClientError as err:
_LOGGER.error("Client error on api %s request %s", path, err)
except asyncio.TimeoutError:
_LOGGER.error("Client timeout error on API request %s", path)
raise HTTPBadGateway()
def _init_header(request: web.Request) -> Dict[str, str]:
"""Create initial header."""
headers = {
X_HASSIO: os.environ.get("HASSIO_TOKEN", ""),
CONTENT_TYPE: request.content_type,
}
# Add user data
user = request.get("hass_user")
if user is not None:
headers[X_HASS_USER_ID] = request["hass_user"].id
headers[X_HASS_IS_ADMIN] = str(int(request["hass_user"].is_admin))
return headers
def _get_timeout(path: str) -> int:
"""Return timeout for a URL path."""
if NO_TIMEOUT.match(path):
return 0
return 300
def _need_auth(hass, path: str) -> bool:
"""Return if a path need authentication."""
if not async_is_onboarded(hass) and NO_AUTH_ONBOARDING.match(path):
return False
if NO_AUTH.match(path):
return False
return True
|
import io
import os
import threading
import pytest
from yandextank.stepper.main import LoadProfile
from yandextank.common.util import get_test_path
from yandextank.common.util import read_resource
from yandextank.common.interfaces import TankInfo
from yandextank.core import TankCore
from yandextank.stepper import Stepper
from yandextank.stepper.load_plan import create, Const, Line, Composite, Stairway, StepFactory
from yandextank.stepper.util import take
class TestLine(object):
def test_get_rps_list(self):
lp = create(["line(1, 100, 10s)"])
rps_list = lp.get_rps_list()
assert len(rps_list) == 11
assert rps_list[-1][0] == 100
@pytest.mark.parametrize(
"rps, duration, rps_list",
[(100, 3000, [(100, 3)]), (0, 3000, [(0, 3)]), (100, 0, [(100, 0)])])
class TestConst(object):
@pytest.mark.parametrize(
"check_point, expected",
[(lambda duration: 0, lambda rps: rps),
(lambda duration: duration / 2, lambda rps: rps),
(lambda duration: duration + 1, lambda rps: 0),
(lambda duration: -1, lambda rps: 0)])
def test_rps_at(self, rps, duration, rps_list, check_point, expected):
assert Const(rps,
duration).rps_at(check_point(duration)) == expected(rps)
def test_get_rps_list(self, rps, duration, rps_list):
assert Const(rps, duration).get_rps_list() == rps_list
assert isinstance(rps_list[0][1], int)
class TestLineNew(object):
@pytest.mark.parametrize(
"min_rps, max_rps, duration, check_point, expected",
[(0, 10, 30 * 1000, 0, 0), (0, 10, 30 * 1000, 10, 3),
(0, 10, 30 * 1000, 29, 10), (9, 10, 30 * 1000, 1, 9),
(9, 10, 30 * 1000, 20, 10)])
def test_rps_at(self, min_rps, max_rps, duration, check_point, expected):
assert round(Line(min_rps, max_rps, duration).rps_at(
check_point)) == expected
@pytest.mark.parametrize(
"min_rps, max_rps, duration, check_point, expected",
[
(0, 10, 20 * 1000, 9, (9, 2)),
(0, 10, 30 * 1000, 0, (0, 2)),
(0, 10, 30 * 1000, 5, (5, 3)),
(0, 10, 30 * 1000, 10, (10, 2)),
(0, 10, 3 * 1000, 0, (0, 1)),
(0, 10, 3 * 1000, 1, (3, 1)),
(0, 10, 3 * 1000, 2, (7, 1)),
(0, 10, 3 * 1000, 3, (10, 1)),
(9, 10, 30 * 1000, 0, (9, 15)),
(9, 10, 30 * 1000, 1, (10, 16)),
(10, 10, 30 * 1000, 0, (10, 31)), # strange
(10, 0, 30 * 1000, 0, (10, 2)),
(10, 0, 30 * 1000, 1, (9, 3)),
(10, 0, 30 * 1000, 9, (1, 3)),
(10, 0, 30 * 1000, 10, (0, 2)),
])
def test_get_rps_list(self, min_rps, max_rps, duration, check_point, expected):
assert Line(min_rps, max_rps, duration).get_rps_list()[check_point] == expected
@pytest.mark.parametrize(
"min_rps, max_rps, duration, expected_len, threshold, len_above_threshold",
[
(2, 12, 25000, 175, 5000, 160),
(2, 12, 25000, 175, 10000, 135),
(2, 12, 25000, 175, 15000, 100),
(2, 12, 25000, 175, 20000, 55),
(0, 10, 25000, 125, 15000, 80),
(10, 12, 20000, 220, 10000, 115),
(10, 10, 20000, 200, 10000, 100),
(10, 0, 25000, 125, 10000, 45),
(10, 0, 25000, 125, 15000, 20),
])
def test_iter(self, min_rps, max_rps, duration, expected_len, threshold, len_above_threshold):
load_plan = Line(min_rps, max_rps, duration)
assert len(load_plan) == expected_len
assert len(
[ts for ts in load_plan if ts >= threshold]) == len_above_threshold
class TestComposite(object):
@pytest.mark.parametrize(
"steps, expected_len", [([Line(0, 10, 20000), Const(10, 10000)], 200),
([Line(0, 10, 20000), Line(10, 0, 20000)], 200),
([Const(5, 10000), Const(10, 5000)], 100)])
def test_iter(self, steps, expected_len):
assert len(Composite(steps)) == expected_len
@pytest.mark.parametrize(
"steps, check_point, expected", [
([Line(0, 10, 20000), Const(10, 10000)], 9, (9, 2)),
([Line(0, 10, 20000), Const(10, 10000)], 10, (10, 2)),
([Line(0, 10, 20000), Const(10, 10000)], 11, (10, 10)),
])
def test_rps_list(self, steps, check_point, expected):
assert Composite(steps).get_rps_list()[check_point] == expected
class TestStairway(object):
@pytest.mark.parametrize(
"min_rps, max_rps, increment, step_duration, expected_len, threshold, len_above_threshold",
[(0, 1000, 50, 3000, 31500, 9000, 31050),
(0, 1000, 50, 3000, 31500, 15000, 30000),
(0, 1000, 50, 3000, 31500, 45000, 15750)])
def test_iter(self, min_rps, max_rps, increment, step_duration, expected_len, threshold, len_above_threshold):
load_plan = Stairway(min_rps, max_rps, increment, step_duration)
assert len(load_plan) == expected_len
assert len(
[ts for ts in load_plan if ts >= threshold]) == len_above_threshold
class TestCreate(object):
@pytest.mark.parametrize(
'rps_schedule, check_point, expected', [
(['line(1, 5, 2s)'], 100, [0, 618, 1000, 1302, 1561, 1791]),
(['line(1.1, 5.8, 2s)'], 100, [0, 566, 917, 1196, 1435, 1647]),
(['line(5, 1, 2s)'], 100, [0, 208, 438, 697, 1000, 1381]),
(['const(1, 10s)'], 100,
[0, 1000, 2000, 3000, 4000, 5000, 6000, 7000, 8000, 9000]),
(['const(200, 0.1s)'], 100, [
0, 5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 55, 60, 65, 70, 75,
80, 85, 90, 95
]),
(['const(1, 2s)', 'const(2, 2s)'], 100,
[0, 1000, 2000, 2500, 3000, 3500]),
(['const(1.5, 10s)'], 100, [
0, 666, 1333, 2000, 2666, 3333, 4000, 4666, 5333, 6000, 6666,
7333, 8000, 8666, 9333
]),
(['step(1, 5, 1, 5s)'], 10,
[0, 1000, 2000, 3000, 4000, 5000, 5500, 6000, 6500, 7000]),
(['step(1.2, 5.7, 1.1, 5s)'], 10,
[0, 833, 1666, 2500, 3333, 4166, 5000, 5434, 5869, 6304]),
(['const(1, 1)'], 10, [0]),
])
def test_create(self, rps_schedule, check_point, expected):
# pytest.set_trace()
assert take(check_point, (create(rps_schedule))) == expected
# ([0-9.]+d)?([0-9.]+h)?([0-9.]+m)?([0-9.]+s)?
@pytest.mark.parametrize('step_config, expected_duration', [
('line(1,500,1m30s)', 90),
('const(50,1h30s)', 3630 * 1000),
('step(10,200,10,1h20m)', 4800 * 1000)
])
def test_step_factory(step_config, expected_duration):
steps = StepFactory.produce(step_config)
assert steps.duration == expected_duration
@pytest.mark.parametrize('stepper_kwargs, expected_stpd', [
({'ammo_file': os.path.join(get_test_path(), 'yandextank/stepper/tests/test-ammo.txt')},
'yandextank/stepper/tests/expected.stpd'),
({'ammo_type': 'uripost',
'ammo_file': os.path.join(get_test_path(), 'yandextank/stepper/tests/test-uripost.txt')},
'yandextank/stepper/tests/uripost-expected.stpd'),
({'uris': ['/case1?sleep=1000', '/case2?sleep=100', '/case3?sleep=10'],
'headers': ['[Connection: close]']},
'yandextank/stepper/tests/uris-expected.stpd'),
({'ammo_file': os.path.join(get_test_path(), 'yandextank/stepper/tests/test-unicode.txt'),
'headers': ['Connection: close', 'Host: web-load03f.yandex.ru']},
'yandextank/stepper/tests/unicode-expected.stpd'),
({'ammo_type': 'caseline',
'ammo_file': os.path.join(get_test_path(), 'yandextank/stepper/tests/test-caseline.txt')},
'yandextank/stepper/tests/caseline-expected.stpd'),
({'ammo_file': os.path.join(get_test_path(), 'yandextank/stepper/tests/test-protobuf-autocases.txt'),
'autocases': 2},
'yandextank/stepper/tests/protobuf-expected.stpd')
])
def test_ammo(stepper_kwargs, expected_stpd):
stepper = Stepper(
TankCore([{}], threading.Event(), TankInfo({})),
rps_schedule=["const(10,10s)"],
http_ver="1.1",
instances_schedule=None,
instances=10,
loop_limit=1000,
ammo_limit=1000,
enum_ammo=False,
**stepper_kwargs
)
stepper_output = io.BytesIO()
stepper.write(stepper_output)
stepper_output.seek(0)
expected_lines = read_resource(os.path.join(get_test_path(), expected_stpd), 'rb').split(b'\n')
for i, (result, expected) in enumerate(zip(stepper_output, expected_lines)):
assert result.strip() == expected.strip(), 'Line {} mismatch'.format(i)
@pytest.mark.parametrize('load_type, schedule, expected', [
('rps', 'const(10,10s)', ['const(10,10s)']),
('rps', 'line(1,12,30s)const(12,15m)line(12,10,15m)', ['line(1,12,30s)', 'const(12,15m)', 'line(12,10,15m)'])
])
def test_load_profile(load_type, schedule, expected):
schedule = LoadProfile(load_type, schedule).schedule
assert schedule == expected
|
from homeassistant import config_entries, data_entry_flow, setup
from homeassistant.components.devolo_home_control.const import DOMAIN
from homeassistant.config_entries import SOURCE_USER
from tests.async_mock import patch
from tests.common import MockConfigEntry
async def test_form(hass):
"""Test we get the form."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
with patch(
"homeassistant.components.devolo_home_control.async_setup",
return_value=True,
) as mock_setup, patch(
"homeassistant.components.devolo_home_control.async_setup_entry",
return_value=True,
) as mock_setup_entry, patch(
"homeassistant.components.devolo_home_control.config_flow.Mydevolo.credentials_valid",
return_value=True,
), patch(
"homeassistant.components.devolo_home_control.config_flow.Mydevolo.get_gateway_ids",
return_value=["123456"],
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"username": "test-username", "password": "test-password"},
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "devolo Home Control"
assert result2["data"] == {
"username": "test-username",
"password": "test-password",
"home_control_url": "https://homecontrol.mydevolo.com",
"mydevolo_url": "https://www.mydevolo.com",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_invalid_credentials(hass):
"""Test if we get the error message on invalid credentials."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
with patch(
"homeassistant.components.devolo_home_control.config_flow.Mydevolo.credentials_valid",
return_value=False,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"username": "test-username", "password": "test-password"},
)
assert result["errors"] == {"base": "invalid_auth"}
async def test_form_already_configured(hass):
"""Test if we get the error message on already configured."""
with patch(
"homeassistant.components.devolo_home_control.config_flow.Mydevolo.get_gateway_ids",
return_value=["1234567"],
), patch(
"homeassistant.components.devolo_home_control.config_flow.Mydevolo.credentials_valid",
return_value=True,
):
MockConfigEntry(domain=DOMAIN, unique_id="1234567", data={}).add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={"username": "test-username", "password": "test-password"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_form_advanced_options(hass):
"""Test if we get the advanced options if user has enabled it."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user", "show_advanced_options": True}
)
assert result["type"] == "form"
assert result["errors"] == {}
with patch(
"homeassistant.components.devolo_home_control.async_setup",
return_value=True,
) as mock_setup, patch(
"homeassistant.components.devolo_home_control.async_setup_entry",
return_value=True,
) as mock_setup_entry, patch(
"homeassistant.components.devolo_home_control.config_flow.Mydevolo.credentials_valid",
return_value=True,
), patch(
"homeassistant.components.devolo_home_control.config_flow.Mydevolo.get_gateway_ids",
return_value=["123456"],
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"username": "test-username",
"password": "test-password",
"home_control_url": "https://test_url.test",
"mydevolo_url": "https://test_mydevolo_url.test",
},
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "devolo Home Control"
assert result2["data"] == {
"username": "test-username",
"password": "test-password",
"home_control_url": "https://test_url.test",
"mydevolo_url": "https://test_mydevolo_url.test",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
|
import mock
from paasta_tools import check_services_replication_tools
def test_main_kubernetes():
with mock.patch(
"paasta_tools.check_services_replication_tools.check_services_replication",
autospec=True,
) as mock_check_services_replication, mock.patch(
"paasta_tools.check_services_replication_tools.parse_args", autospec=True
) as mock_parse_args, mock.patch(
"paasta_tools.check_services_replication_tools.get_kubernetes_pods_and_nodes",
autospec=True,
return_value=([mock.Mock()], [mock.Mock()]),
), mock.patch(
"paasta_tools.check_services_replication_tools.load_system_paasta_config",
autospec=True,
) as mock_load_system_paasta_config, mock.patch(
"paasta_tools.check_services_replication_tools.yelp_meteorite", autospec=True,
) as mock_yelp_meteorite, mock.patch(
"paasta_tools.check_services_replication_tools.sys.exit", autospec=True,
) as mock_sys_exit:
mock_parse_args.return_value.under_replicated_crit_pct = 5
mock_parse_args.return_value.min_count_critical = 1
mock_check_services_replication.return_value = (6, 100)
check_services_replication_tools.main(
instance_type_class=None, check_service_replication=None, namespace="baz",
)
assert mock_check_services_replication.called
mock_yelp_meteorite.create_gauge.assert_called_once_with(
"paasta.pct_services_under_replicated",
{
"paasta_cluster": mock_load_system_paasta_config.return_value.get_cluster.return_value,
"scheduler": "kubernetes",
},
)
mock_gauge = mock_yelp_meteorite.create_gauge.return_value
mock_gauge.set.assert_called_once_with(6)
mock_sys_exit.assert_called_once_with(2)
def test_main_mesos():
with mock.patch(
"paasta_tools.check_services_replication_tools.check_services_replication",
autospec=True,
) as mock_check_services_replication, mock.patch(
"paasta_tools.check_services_replication_tools.parse_args", autospec=True
) as mock_parse_args, mock.patch(
"paasta_tools.check_services_replication_tools.get_mesos_tasks_and_slaves",
autospec=True,
return_value=([mock.Mock()], [mock.Mock()]),
), mock.patch(
"paasta_tools.check_services_replication_tools.load_system_paasta_config",
autospec=True,
) as mock_load_system_paasta_config, mock.patch(
"paasta_tools.check_services_replication_tools.yelp_meteorite", autospec=True,
) as mock_yelp_meteorite, mock.patch(
"paasta_tools.check_services_replication_tools.sys.exit", autospec=True,
) as mock_sys_exit:
mock_parse_args.return_value.under_replicated_crit_pct = 5
mock_parse_args.return_value.min_count_critical = 1
mock_check_services_replication.return_value = (6, 100)
check_services_replication_tools.main(
instance_type_class=None,
check_service_replication=None,
namespace=None,
mesos=True,
)
assert mock_check_services_replication.called
mock_yelp_meteorite.create_gauge.assert_called_once_with(
"paasta.pct_services_under_replicated",
{
"paasta_cluster": mock_load_system_paasta_config.return_value.get_cluster.return_value,
"scheduler": "mesos",
},
)
mock_gauge = mock_yelp_meteorite.create_gauge.return_value
mock_gauge.set.assert_called_once_with(6)
mock_sys_exit.assert_called_once_with(2)
def test_check_services_replication():
soa_dir = "anw"
instance_config = mock.Mock()
instance_config.get_docker_image.return_value = True
with mock.patch(
"paasta_tools.check_services_replication_tools.list_services",
autospec=True,
return_value=["a"],
), mock.patch(
"paasta_tools.check_kubernetes_services_replication.check_kubernetes_pod_replication",
autospec=True,
) as mock_check_service_replication, mock.patch(
"paasta_tools.check_services_replication_tools.PaastaServiceConfigLoader",
autospec=True,
) as mock_paasta_service_config_loader, mock.patch(
"paasta_tools.check_services_replication_tools.KubeClient", autospec=True
) as mock_kube_client:
mock_kube_client.return_value = mock.Mock()
mock_paasta_service_config_loader.return_value.instance_configs.return_value = [
instance_config
]
mock_client = mock.Mock()
mock_client.list_tasks.return_value = []
mock_replication_checker = mock.Mock()
mock_pods = [mock.Mock(), mock.Mock()]
mock_check_service_replication.return_value = True
(
count_under_replicated,
total,
) = check_services_replication_tools.check_services_replication(
soa_dir=soa_dir,
cluster="westeros-prod",
service_instances=[],
instance_type_class=None,
check_service_replication=mock_check_service_replication,
replication_checker=mock_replication_checker,
all_tasks_or_pods=mock_pods,
)
mock_paasta_service_config_loader.assert_called_once_with(
service="a", soa_dir=soa_dir
)
instance_config.get_docker_image.assert_called_once_with()
mock_check_service_replication.assert_called_once_with(
instance_config=instance_config,
all_tasks_or_pods=mock_pods,
replication_checker=mock_replication_checker,
)
assert count_under_replicated == 0
assert total == 1
|
class ThrottlingMiddleware:
def __init__(self, get_response=None):
self.get_response = get_response
def __call__(self, request):
response = self.get_response(request)
if "throttling_state" in request.META:
throttling = request.META["throttling_state"]
response["X-RateLimit-Limit"] = throttling.num_requests
response["X-RateLimit-Remaining"] = throttling.num_requests - len(
throttling.history
)
if throttling.history:
remaining_duration = throttling.duration - (
throttling.now - throttling.history[-1]
)
else:
remaining_duration = throttling.duration
response["X-RateLimit-Reset"] = int(remaining_duration)
return response
|
import logging
import gammu # pylint: disable=import-error, no-member
from homeassistant.const import DEVICE_CLASS_SIGNAL_STRENGTH, SIGNAL_STRENGTH_DECIBELS
from homeassistant.helpers.entity import Entity
from .const import DOMAIN, SMS_GATEWAY
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the GSM Signal Sensor sensor."""
gateway = hass.data[DOMAIN][SMS_GATEWAY]
entities = []
imei = await gateway.get_imei_async()
name = f"gsm_signal_imei_{imei}"
entities.append(
GSMSignalSensor(
hass,
gateway,
name,
)
)
async_add_entities(entities, True)
class GSMSignalSensor(Entity):
"""Implementation of a GSM Signal sensor."""
def __init__(
self,
hass,
gateway,
name,
):
"""Initialize the GSM Signal sensor."""
self._hass = hass
self._gateway = gateway
self._name = name
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return SIGNAL_STRENGTH_DECIBELS
@property
def device_class(self):
"""Return the class of this sensor."""
return DEVICE_CLASS_SIGNAL_STRENGTH
@property
def available(self):
"""Return if the sensor data are available."""
return self._state is not None
@property
def state(self):
"""Return the state of the device."""
return self._state["SignalStrength"]
async def async_update(self):
"""Get the latest data from the modem."""
try:
self._state = await self._gateway.get_signal_quality_async()
except gammu.GSMError as exc: # pylint: disable=no-member
_LOGGER.error("Failed to read signal quality: %s", exc)
@property
def device_state_attributes(self):
"""Return the sensor attributes."""
return self._state
@property
def entity_registry_enabled_default(self) -> bool:
"""Return if the entity should be enabled when first added to the entity registry."""
return False
|
import pytest
from homeassistant.components.camera import SUPPORT_STREAM as CAMERA_SUPPORT_STREAM
from homeassistant.components.mobile_app.const import CONF_SECRET
from homeassistant.components.zone import DOMAIN as ZONE_DOMAIN
from homeassistant.const import CONF_WEBHOOK_ID
from homeassistant.core import callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.setup import async_setup_component
from .const import CALL_SERVICE, FIRE_EVENT, REGISTER_CLEARTEXT, RENDER_TEMPLATE, UPDATE
from tests.async_mock import patch
from tests.common import async_mock_service
def encrypt_payload(secret_key, payload):
"""Return a encrypted payload given a key and dictionary of data."""
try:
from nacl.encoding import Base64Encoder
from nacl.secret import SecretBox
except (ImportError, OSError):
pytest.skip("libnacl/libsodium is not installed")
return
import json
keylen = SecretBox.KEY_SIZE
prepped_key = secret_key.encode("utf-8")
prepped_key = prepped_key[:keylen]
prepped_key = prepped_key.ljust(keylen, b"\0")
payload = json.dumps(payload).encode("utf-8")
return (
SecretBox(prepped_key).encrypt(payload, encoder=Base64Encoder).decode("utf-8")
)
def decrypt_payload(secret_key, encrypted_data):
"""Return a decrypted payload given a key and a string of encrypted data."""
try:
from nacl.encoding import Base64Encoder
from nacl.secret import SecretBox
except (ImportError, OSError):
pytest.skip("libnacl/libsodium is not installed")
return
import json
keylen = SecretBox.KEY_SIZE
prepped_key = secret_key.encode("utf-8")
prepped_key = prepped_key[:keylen]
prepped_key = prepped_key.ljust(keylen, b"\0")
decrypted_data = SecretBox(prepped_key).decrypt(
encrypted_data, encoder=Base64Encoder
)
decrypted_data = decrypted_data.decode("utf-8")
return json.loads(decrypted_data)
async def test_webhook_handle_render_template(create_registrations, webhook_client):
"""Test that we render templates properly."""
resp = await webhook_client.post(
"/api/webhook/{}".format(create_registrations[1]["webhook_id"]),
json={
"type": "render_template",
"data": {
"one": {"template": "Hello world"},
"two": {"template": "{{ now() | random }}"},
"three": {"template": "{{ now() 3 }}"},
},
},
)
assert resp.status == 200
json = await resp.json()
assert json == {
"one": "Hello world",
"two": {"error": "TypeError: object of type 'datetime.datetime' has no len()"},
"three": {
"error": "TemplateSyntaxError: expected token 'end of print statement', got 'integer'"
},
}
async def test_webhook_handle_call_services(hass, create_registrations, webhook_client):
"""Test that we call services properly."""
calls = async_mock_service(hass, "test", "mobile_app")
resp = await webhook_client.post(
"/api/webhook/{}".format(create_registrations[1]["webhook_id"]),
json=CALL_SERVICE,
)
assert resp.status == 200
assert len(calls) == 1
async def test_webhook_handle_fire_event(hass, create_registrations, webhook_client):
"""Test that we can fire events."""
events = []
@callback
def store_event(event):
"""Helepr to store events."""
events.append(event)
hass.bus.async_listen("test_event", store_event)
resp = await webhook_client.post(
"/api/webhook/{}".format(create_registrations[1]["webhook_id"]), json=FIRE_EVENT
)
assert resp.status == 200
json = await resp.json()
assert json == {}
assert len(events) == 1
assert events[0].data["hello"] == "yo world"
async def test_webhook_update_registration(webhook_client, authed_api_client):
"""Test that a we can update an existing registration via webhook."""
register_resp = await authed_api_client.post(
"/api/mobile_app/registrations", json=REGISTER_CLEARTEXT
)
assert register_resp.status == 201
register_json = await register_resp.json()
webhook_id = register_json[CONF_WEBHOOK_ID]
update_container = {"type": "update_registration", "data": UPDATE}
update_resp = await webhook_client.post(
f"/api/webhook/{webhook_id}", json=update_container
)
assert update_resp.status == 200
update_json = await update_resp.json()
assert update_json["app_version"] == "2.0.0"
assert CONF_WEBHOOK_ID not in update_json
assert CONF_SECRET not in update_json
async def test_webhook_handle_get_zones(hass, create_registrations, webhook_client):
"""Test that we can get zones properly."""
await async_setup_component(
hass,
ZONE_DOMAIN,
{ZONE_DOMAIN: {}},
)
resp = await webhook_client.post(
"/api/webhook/{}".format(create_registrations[1]["webhook_id"]),
json={"type": "get_zones"},
)
assert resp.status == 200
json = await resp.json()
assert len(json) == 1
zones = sorted(json, key=lambda entry: entry["entity_id"])
assert zones[0]["entity_id"] == "zone.home"
async def test_webhook_handle_get_config(hass, create_registrations, webhook_client):
"""Test that we can get config properly."""
resp = await webhook_client.post(
"/api/webhook/{}".format(create_registrations[1]["webhook_id"]),
json={"type": "get_config"},
)
assert resp.status == 200
json = await resp.json()
if "components" in json:
json["components"] = set(json["components"])
if "allowlist_external_dirs" in json:
json["allowlist_external_dirs"] = set(json["allowlist_external_dirs"])
hass_config = hass.config.as_dict()
expected_dict = {
"latitude": hass_config["latitude"],
"longitude": hass_config["longitude"],
"elevation": hass_config["elevation"],
"unit_system": hass_config["unit_system"],
"location_name": hass_config["location_name"],
"time_zone": hass_config["time_zone"],
"components": hass_config["components"],
"version": hass_config["version"],
"theme_color": "#03A9F4", # Default frontend theme color
}
assert expected_dict == json
async def test_webhook_returns_error_incorrect_json(
webhook_client, create_registrations, caplog
):
"""Test that an error is returned when JSON is invalid."""
resp = await webhook_client.post(
"/api/webhook/{}".format(create_registrations[1]["webhook_id"]), data="not json"
)
assert resp.status == 400
json = await resp.json()
assert json == {}
assert "invalid JSON" in caplog.text
async def test_webhook_handle_decryption(webhook_client, create_registrations):
"""Test that we can encrypt/decrypt properly."""
key = create_registrations[0]["secret"]
data = encrypt_payload(key, RENDER_TEMPLATE["data"])
container = {"type": "render_template", "encrypted": True, "encrypted_data": data}
resp = await webhook_client.post(
"/api/webhook/{}".format(create_registrations[0]["webhook_id"]), json=container
)
assert resp.status == 200
webhook_json = await resp.json()
assert "encrypted_data" in webhook_json
decrypted_data = decrypt_payload(key, webhook_json["encrypted_data"])
assert decrypted_data == {"one": "Hello world"}
async def test_webhook_requires_encryption(webhook_client, create_registrations):
"""Test that encrypted registrations only accept encrypted data."""
resp = await webhook_client.post(
"/api/webhook/{}".format(create_registrations[0]["webhook_id"]),
json=RENDER_TEMPLATE,
)
assert resp.status == 400
webhook_json = await resp.json()
assert "error" in webhook_json
assert webhook_json["success"] is False
assert webhook_json["error"]["code"] == "encryption_required"
async def test_webhook_update_location(hass, webhook_client, create_registrations):
"""Test that location can be updated."""
resp = await webhook_client.post(
"/api/webhook/{}".format(create_registrations[1]["webhook_id"]),
json={
"type": "update_location",
"data": {"gps": [1, 2], "gps_accuracy": 10, "altitude": -10},
},
)
assert resp.status == 200
state = hass.states.get("device_tracker.test_1_2")
assert state is not None
assert state.attributes["latitude"] == 1.0
assert state.attributes["longitude"] == 2.0
assert state.attributes["gps_accuracy"] == 10
assert state.attributes["altitude"] == -10
async def test_webhook_enable_encryption(hass, webhook_client, create_registrations):
"""Test that encryption can be added to a reg initially created without."""
webhook_id = create_registrations[1]["webhook_id"]
enable_enc_resp = await webhook_client.post(
f"/api/webhook/{webhook_id}",
json={"type": "enable_encryption"},
)
assert enable_enc_resp.status == 200
enable_enc_json = await enable_enc_resp.json()
assert len(enable_enc_json) == 1
assert CONF_SECRET in enable_enc_json
key = enable_enc_json["secret"]
enc_required_resp = await webhook_client.post(
f"/api/webhook/{webhook_id}",
json=RENDER_TEMPLATE,
)
assert enc_required_resp.status == 400
enc_required_json = await enc_required_resp.json()
assert "error" in enc_required_json
assert enc_required_json["success"] is False
assert enc_required_json["error"]["code"] == "encryption_required"
enc_data = encrypt_payload(key, RENDER_TEMPLATE["data"])
container = {
"type": "render_template",
"encrypted": True,
"encrypted_data": enc_data,
}
enc_resp = await webhook_client.post(f"/api/webhook/{webhook_id}", json=container)
assert enc_resp.status == 200
enc_json = await enc_resp.json()
assert "encrypted_data" in enc_json
decrypted_data = decrypt_payload(key, enc_json["encrypted_data"])
assert decrypted_data == {"one": "Hello world"}
async def test_webhook_camera_stream_non_existent(
hass, create_registrations, webhook_client
):
"""Test fetching camera stream URLs for a non-existent camera."""
webhook_id = create_registrations[1]["webhook_id"]
resp = await webhook_client.post(
f"/api/webhook/{webhook_id}",
json={
"type": "stream_camera",
"data": {"camera_entity_id": "camera.doesnt_exist"},
},
)
assert resp.status == 400
webhook_json = await resp.json()
assert webhook_json["success"] is False
async def test_webhook_camera_stream_non_hls(
hass, create_registrations, webhook_client
):
"""Test fetching camera stream URLs for a non-HLS/stream-supporting camera."""
hass.states.async_set("camera.non_stream_camera", "idle", {"supported_features": 0})
webhook_id = create_registrations[1]["webhook_id"]
resp = await webhook_client.post(
f"/api/webhook/{webhook_id}",
json={
"type": "stream_camera",
"data": {"camera_entity_id": "camera.non_stream_camera"},
},
)
assert resp.status == 200
webhook_json = await resp.json()
assert webhook_json["hls_path"] is None
assert (
webhook_json["mjpeg_path"]
== "/api/camera_proxy_stream/camera.non_stream_camera"
)
async def test_webhook_camera_stream_stream_available(
hass, create_registrations, webhook_client
):
"""Test fetching camera stream URLs for an HLS/stream-supporting camera."""
hass.states.async_set(
"camera.stream_camera", "idle", {"supported_features": CAMERA_SUPPORT_STREAM}
)
webhook_id = create_registrations[1]["webhook_id"]
with patch(
"homeassistant.components.camera.async_request_stream",
return_value="/api/streams/some_hls_stream",
):
resp = await webhook_client.post(
f"/api/webhook/{webhook_id}",
json={
"type": "stream_camera",
"data": {"camera_entity_id": "camera.stream_camera"},
},
)
assert resp.status == 200
webhook_json = await resp.json()
assert webhook_json["hls_path"] == "/api/streams/some_hls_stream"
assert webhook_json["mjpeg_path"] == "/api/camera_proxy_stream/camera.stream_camera"
async def test_webhook_camera_stream_stream_available_but_errors(
hass, create_registrations, webhook_client
):
"""Test fetching camera stream URLs for an HLS/stream-supporting camera but that streaming errors."""
hass.states.async_set(
"camera.stream_camera", "idle", {"supported_features": CAMERA_SUPPORT_STREAM}
)
webhook_id = create_registrations[1]["webhook_id"]
with patch(
"homeassistant.components.camera.async_request_stream",
side_effect=HomeAssistantError(),
):
resp = await webhook_client.post(
f"/api/webhook/{webhook_id}",
json={
"type": "stream_camera",
"data": {"camera_entity_id": "camera.stream_camera"},
},
)
assert resp.status == 200
webhook_json = await resp.json()
assert webhook_json["hls_path"] is None
assert webhook_json["mjpeg_path"] == "/api/camera_proxy_stream/camera.stream_camera"
async def test_webhook_handle_scan_tag(hass, create_registrations, webhook_client):
"""Test that we can scan tags."""
events = []
@callback
def store_event(event):
"""Helepr to store events."""
events.append(event)
hass.bus.async_listen("tag_scanned", store_event)
resp = await webhook_client.post(
"/api/webhook/{}".format(create_registrations[1]["webhook_id"]),
json={"type": "scan_tag", "data": {"tag_id": "mock-tag-id"}},
)
assert resp.status == 200
json = await resp.json()
assert json == {}
assert len(events) == 1
assert events[0].data["tag_id"] == "mock-tag-id"
assert events[0].data["device_id"] == "mock-device-id"
|
import json
import re
import sys
class TextChunkBuffer:
"""Hold onto text chunks until needed."""
def __init__(self):
self.buffer = []
def append(self, text):
"""Add `text` to the buffer."""
self.buffer.append(text)
def clear(self):
"""Clear the buffer."""
self.buffer = []
def flush(self):
"""Produce a ("text", text) tuple if there's anything here."""
buffered = "".join(self.buffer).strip()
if buffered:
yield ("text", buffered)
self.clear()
def parse_md(lines):
"""Parse markdown lines, producing (type, text) chunks."""
buffer = TextChunkBuffer()
for line in lines:
header_match = re.search(r"^(#+) (.+)$", line)
is_header = bool(header_match)
if is_header:
yield from buffer.flush()
hashes, text = header_match.groups()
yield (f"h{len(hashes)}", text)
else:
buffer.append(line)
yield from buffer.flush()
def sections(parsed_data):
"""Convert a stream of parsed tokens into sections with text and notes.
Yields a stream of:
('h-level', 'header text', 'text')
"""
header = None
text = []
for ttype, ttext in parsed_data:
if ttype.startswith('h'):
if header:
yield (*header, "\n".join(text))
text = []
header = (ttype, ttext)
elif ttype == "text":
text.append(ttext)
else:
raise Exception(f"Don't know ttype {ttype!r}")
yield (*header, "\n".join(text))
def refind(regex, text):
"""Find a regex in some text, and return the matched text, or None."""
m = re.search(regex, text)
if m:
return m.group()
else:
return None
def relnotes(mdlines):
r"""Yield (version, text) pairs from markdown lines.
Each tuple is a separate version mentioned in the release notes.
A version is any section with \d\.\d in the header text.
"""
for _, htext, text in sections(parse_md(mdlines)):
version = refind(r"\d+\.\d[^ ]*", htext)
if version:
prerelease = any(c in version for c in "abc")
when = refind(r"\d+-\d+-\d+", htext)
yield {
"version": version,
"text": text,
"prerelease": prerelease,
"when": when,
}
def parse(md_filename, json_filename):
"""Main function: parse markdown and write JSON."""
with open(md_filename) as mf:
markdown = mf.read()
with open(json_filename, "w") as jf:
json.dump(list(relnotes(markdown.splitlines(True))), jf, indent=4)
if __name__ == "__main__":
parse(*sys.argv[1:]) # pylint: disable=no-value-for-parameter
|
from __future__ import absolute_import, print_function, division
import os
try:
from .logging import logger
warning, info = logger.warning, logger.info
except Exception:
warning = info = print
try:
from urllib.request import urlopen
except ImportError:
try:
from urllib2 import urlopen # Legacy Python
except ImportError:
raise RuntimeError('Could not import urlopen.')
# Definition of remote resources, optionally versioned ('{}' in url becomes tag)
phosphor_url = 'https://raw.githubusercontent.com/flexxui/phosphor-all/{}/dist/'
RESOURCES = {
#'bsdf.js': ('https://gitlab.com/almarklein/bsdf/raw/{}/javascript/bsdf.js', ''),
'phosphor-all.js': (phosphor_url + 'phosphor-all.js', '94d59b003849f'),
'phosphor-all.css': (phosphor_url + 'phosphor-all.css', '94d59b003849f'),
}
def get_resoure_path(filename):
""" Get the full path to a resource, corresponding to the given
filename. Will use cached version if available. Otherwise will
download and cache.
"""
# Get location of resource dir
dest = os.path.abspath(os.path.join(__file__, '..', '..', 'resources'))
if not os.path.isdir(dest):
raise ValueError('Resource dest dir %r is not a directory.' % dest)
# Get full filename for downloaded file
path = os.path.join(dest, filename)
url = ''
if filename in RESOURCES:
# Get url
url, tag = RESOURCES[filename]
# Modify url and path based on tag
if tag:
url = url.replace('{}', tag)
basename, ext = path.rsplit('.', 1)
path = basename + '.' + tag + '.' + ext
# Download if needed
if not os.path.isfile(path):
data = _fetch_file(url)
with open(path, 'wb') as f:
f.write(data)
else:
# Resource is supposed to just be in the dir
if not os.path.isfile(path):
raise ValueError('Unknown/unavailable resource %r' % filename)
return path
def get_resource(filename):
""" Get the bytes of the resource corresponding to the given filename.
"""
return open(get_resoure_path(filename), 'rb').read()
def _fetch_file(url):
""" Fetches a file from the internet. Retry a few times before
giving up on failure.
"""
info('Downloading %s' % url)
for tries in range(4):
try:
return urlopen(url, timeout=5.0).read()
except Exception as e:
warning('Error while fetching file: %s' % str(e))
raise IOError('Unable to download %r. Perhaps there is a no internet '
'connection? If there is, please report this problem.' % url)
# Running this file as a script will download all downloadable resources
if __name__ == '__main__':
for key in RESOURCES:
get_resource(key)
|
import requests
from nikola.plugin_categories import ShortcodePlugin
class Plugin(ShortcodePlugin):
"""Plugin for gist directive."""
name = "gist"
def get_raw_gist_with_filename(self, gistID, filename):
"""Get raw gist text for a filename."""
url = '/'.join(("https://gist.github.com/raw", gistID, filename))
return requests.get(url).text
def get_raw_gist(self, gistID):
"""Get raw gist text."""
url = "https://gist.github.com/raw/{0}".format(gistID)
try:
return requests.get(url).text
except requests.exceptions.RequestException:
raise self.error('Cannot get gist for url={0}'.format(url))
def handler(self, gistID, filename=None, site=None, data=None, lang=None, post=None):
"""Create HTML for gist."""
if 'https://' in gistID:
gistID = gistID.split('/')[-1].strip()
else:
gistID = gistID.strip()
embedHTML = ""
rawGist = ""
if filename is not None:
rawGist = (self.get_raw_gist_with_filename(gistID, filename))
embedHTML = ('<script src="https://gist.github.com/{0}.js'
'?file={1}"></script>').format(gistID, filename)
else:
rawGist = (self.get_raw_gist(gistID))
embedHTML = ('<script src="https://gist.github.com/{0}.js">'
'</script>').format(gistID)
output = '''{}
<noscript><pre>{}</pre></noscript>'''.format(embedHTML, rawGist)
return output, []
|
from __future__ import print_function
# TODO (hartsocks): Introduce logging to remove the need for print function.
"""
This module is for ISO 8601 parsing
"""
__author__ = 'VMware, Inc.'
from six import iteritems
import time
from datetime import datetime, timedelta, tzinfo
import re
""" Regular expression to parse a subset of ISO 8601 format """
_dtExpr = re.compile(
# XMLSchema datetime. Mandatory to have - and :
# See: http://www.w3.org/TR/xmlschema-2/#isoformats
# Note: python datetime cannot handle the following:
# - leap second, ie. 0-60 seconds (not 0-59)
# - BC (negative years)
# year [-]0000..9999
r'(?P<year>-?\d{4})' \
# month 01..12
r'(-(?P<month>(0[1-9]|1[0-2]))' \
# day 01..31
r'(-(?P<day>(0[1-9]|[1-2]\d|3[01])))?)?' \
# time separator 'T'
r'(T' \
# hour 00..24
r'(?P<hour>([01]\d|2[0-4]))' \
# minute 00..59
r'((:(?P<minute>[0-5]\d))' \
# seconds 00..60 (leap second ok)
r'(:(?P<second>([0-5]\d|60))' \
# microsecond. max 16 digits
# - Should not allows trailing zeros. But python isoformat() put zeros
# after microseconds. Oh well, allows trailing zeros, quite harmless
r'(\.(?P<microsecond>\d{1,16}))?)?)?' \
# UTC 'Z', or...
r'((?P<tzutc>Z)' \
# tz [+-]00..13:0..59|14:00
r'|((?P<tzhr>[+-](([0]\d)|(1[0-3])|(?P<tzlimit>)14))' \
r'(:(?P<tzmin>(?(tzlimit)00|([0-5]\d))))?))?' \
r')?$')
""" Default date time val. Key should match the tags in _dtExpr """
_dtExprKeyDefValMap = {'year' : None, 'month' : 1, 'day' : 1,
'hour' : 0, 'minute' : 0, 'second' : 0,
'microsecond' : 0}
class TZInfo(tzinfo):
""" Timezone info class """
timedelta0 = timedelta(hours=0)
timedelta1 = timedelta(hours=1)
def __init__(self, tzname='UTC', utcOffset=None, dst=None):
self._tzname = tzname
if not utcOffset:
utcOffset = self.timedelta0
self._utcOffset = utcOffset
if not dst:
dst = None
self._dst = dst
def utcoffset(self, dt):
return self._utcOffset + self.dst(dt)
def tzname(self, dt):
return self._tzname
def dst(self, dt):
ret = self.timedelta0
if self._dst:
if self._dst[0] <= dt.replace(tzinfo=None) < self._dst[1]:
ret = self.timedelta1
return ret
class TZManager:
""" Time zone manager """
_tzInfos = {}
@staticmethod
def GetTZInfo(tzname='UTC', utcOffset=None, dst=None):
""" Get / Add timezone info """
key = (tzname, utcOffset, dst)
tzInfo = TZManager._tzInfos.get(key)
if not tzInfo:
tzInfo = TZInfo(tzname, utcOffset, dst)
TZManager._tzInfos[key] = tzInfo
return tzInfo
def ParseISO8601(datetimeStr):
"""
Parse ISO 8601 date time from string.
Returns datetime if ok, None otherwise
Note: Allows YYYY / YYYY-MM, but truncate YYYY -> YYYY-01-01,
YYYY-MM -> YYYY-MM-01
Truncate microsecond to most significant 6 digits
"""
datetimeVal = None
match = _dtExpr.match(datetimeStr)
if match:
try:
dt = {}
for key, defaultVal in iteritems(_dtExprKeyDefValMap):
val = match.group(key)
if val:
if key == 'microsecond':
val = val[:6] + '0' * (6 - len(val))
dt[key] = int(val)
elif defaultVal:
dt[key] = defaultVal
# Orig. XMLSchema don't allow all zeros year. But newer draft is ok
#if dt['year'] == 0:
# # Year cannot be all zeros
# raise Exception('Year cannot be all zeros')
# 24 is a special case. It is actually represented as next day 00:00
delta = None
if dt.get('hour', 0) == 24:
# Must be 24:00:00.0
if dt.get('minute', 0) == 0 and dt.get('second', 0) == 0 and \
dt.get('microsecond', 0) == 0:
dt['hour'] = 23
delta = timedelta(hours=1)
else:
return None
# Set tzinfo
# TODO: dst
tzInfo = None
val = match.group('tzutc')
if val:
tzInfo = TZManager.GetTZInfo()
else:
val = match.group('tzhr')
if val:
# tz hours offset
tzhr = int(val)
utcsign = val[0]
# tz minutes offset
tzmin = 0
val = match.group('tzmin')
if val:
tzmin = tzhr >= 0 and int(val) or -int(val)
# Better tzname (map UTC +-00:00 to UTC)
tzname = 'UTC'
if tzhr != 0 or tzmin != 0:
tzname += ' %s%02d:%02d' % (utcsign, abs(tzhr), abs(tzmin))
tzInfo = TZManager.GetTZInfo(tzname=tzname,
utcOffset=timedelta(hours=tzhr,
minutes=tzmin))
if tzInfo:
dt['tzinfo'] = tzInfo
datetimeVal = datetime(**dt)
if delta:
datetimeVal += delta
except Exception as e:
pass
return datetimeVal
def ISO8601Format(dt):
"""
Python datetime isoformat() has the following problems:
- leave trailing 0 at the end of microseconds (violates XMLSchema rule)
- tz print +00:00 instead of Z
- Missing timezone offset for datetime without tzinfo
"""
isoStr = dt.strftime('%Y-%m-%dT%H:%M:%S')
if dt.microsecond:
isoStr += ('.%06d' % dt.microsecond).rstrip('0')
if dt.tzinfo:
tz = dt.strftime('%z')
else:
if time.daylight and time.localtime().tm_isdst:
utcOffset_minutes = -time.altzone / 60
else:
utcOffset_minutes = -time.timezone / 60
tz = "%+.2d%.2d" % (utcOffset_minutes / 60, (abs(utcOffset_minutes) % 60))
if tz == '+0000':
return isoStr + 'Z'
elif tz:
return isoStr + tz[:3] + ':' + tz[3:]
else:
# Local offset is unknown
return isoStr + '-00:00'
# Testing
if __name__ == '__main__':
# Valid entries
for testStr in [
'1971', # 1971-01-01
'1971-11', # 1971-11-01
'1971-11-02',
'1971-11-02T23',
'1971-11-02T23Z',
'1971-11-02T23:04',
'1971-11-02T23:04Z',
'1971-11-02T23:04:15',
'1971-11-02T23:04:15Z',
'1971-11-02T23:04:15.1',
'1971-11-02T23:04:15.01',
'1971-11-02T23:04:15.023456',
'1971-11-02T23:04:15.103456Z',
'1971-11-02T23:04:15.123456+11',
'1971-11-02T23:04:15.123456-11',
'1971-11-02T23:04:15.123456+11:30',
'1971-11-02T23:04:15.123456-11:30',
'1971-11-02T23:04:15.123456+00:00', # Same as Z
'1971-11-02T23:04:15.123456-00:00', # Same as Z
'1971-01-02T23:04:15+14',
'1971-01-02T23:04:15+14:00',
'1971-01-02T23:04:15-14',
'1971-01-02T23:04:15-14:00',
# Valid: Truncate microsec to 6 digits
'1971-01-02T23:04:15.123456891+11',
'1971-01-02T24', # 24 is valid. It should represent the 00:00 the
# next day
'1971-01-02T24:00',
'1971-01-02T24:00:00',
'1971-01-02T24:00:00.0',
# Should NOT be valid but python isoformat adding trailing zeros
'1971-01-02T23:04:15.123430', # Microseconds ends in zero
'1971-01-02T23:04:15.0', # Microseconds ends in zero
# Should be valid but python datetime don't support it
#'2005-12-31T23:59:60Z', # Leap second
#'-0001', # BC 1
]:
dt = ParseISO8601(testStr)
if dt == None:
print('Failed to parse ({0})'.format(testStr))
assert(False)
# Make sure we can translate back
isoformat = ISO8601Format(dt)
dt1 = ParseISO8601(isoformat)
if dt.tzinfo is None:
dt = dt.replace(tzinfo=dt1.tzinfo)
if dt1 != dt:
print('ParseISO8601 -> ISO8601Format -> ParseISO8601 failed ({0})'.format(testStr))
assert(False)
# Make sure we can parse python isoformat()
dt2 = ParseISO8601(dt.isoformat())
if dt2 == None:
print('ParseISO8601("{0}".isoformat()) failed'.format(testStr))
assert(False)
print(testStr, '->', dt, isoformat)
# Basic form
for testStr in [
'197111', # 1971-11-01
'19711102',
'19711102T23',
'19711102T23Z',
'19711102T2304',
'19711102T2304Z',
'19711102T230415',
'19711102T230415Z',
'19711102T230415.123456',
'19711102T230415.123456Z',
'19711102T230415.123456+11',
'19711102T230415.123456-11',
'19711102T230415.123456+1130',
'19711102T230415.123456-1130',
]:
# Reject for now
dt = ParseISO8601(testStr)
if dt != None:
print('ParseISO8601 ({0}) should fail, but it did not'.format(testStr))
assert(False)
#print testStr, '->', dt
#assert(dt != None)
# Invalid entries
for testStr in [
# Xml schema reject year 0
'0000', # 0 years are not allowed
'+0001', # Leading + is not allowed
'', # Empty datetime str
'09', # Years must be at least 4 digits
'1971-01-02T', # T not follow by time
'1971-01-02TZ', # T not follow by time
'1971-01-02T+10', # T not follow by time
'1971-01-02T-10', # T not follow by time
'1971-01-02T23:', # extra :
'1971-01-02T23:04:', # extra :
'1971-01-02T23:0d', # 0d
'1971-01-02T23:04:15.', # Dot not follows by microsec
'1971-01-02+12', # time without T
'1971Z', # Z without T
'1971-01-02T23:04:15.123456Z+11', # Z follows by +
'1971-01-02T23:04:15.123456Z-11', # Z follows by -
'1971-01-02T23:04:15.123456+:30', # extra :
'1971-01-02T23:04:15.123456+30:', # extra :
'1971-01-02T23:04:15.01234567890123456789', # Too many microseconds digits
# Python isoformat leave trailing zeros in microseconds
# Relax regular expression to accept it
#'1971-01-02T23:04:15.123430', # Microseconds ends in zero
#'1971-01-02T23:04:15.0', # Microseconds ends in zero
# Timezone must be between +14 / -14
'1971-01-02T23:04:15+15',
'1971-01-02T23:04:15-15',
'1971-01-02T23:04:15+14:01',
'1971-01-02T23:04:15-14:01',
# Mix basic form with extended format
'197101-02T23:04:15.123456',
'19710102T23:04:15.123456',
'19710102T230415.123456+11:30',
'1971-01-02T230415.123456',
'1971-01-02T23:04:15.123456+1130',
# Error captured by datetime class
'1971-00-02', # Less than 1 month
'1971-13-02', # Larger than 12 months
'1971-01-00', # Less than 1 day
'1971-11-32', # Larger than 30 days for Nov
'1971-12-32', # Larger than 31 days
'1971-01-02T24:01', # Larger than 23 hr
'1971-01-02T23:61', # Larger than 60 min
'1971-01-02T23:60:61', # Larger than 61 sec
]:
dt = ParseISO8601(testStr)
if dt != None:
print('ParseISO8601 ({0}) should fail, but it did not'.format(testStr))
assert(False)
|
import voluptuous as vol
from homeassistant.const import (
ATTR_FRIENDLY_NAME,
CONF_ENTITY_ID,
CONF_EVENT,
CONF_PLATFORM,
CONF_ZONE,
)
from homeassistant.core import HassJob, callback
from homeassistant.helpers import condition, config_validation as cv, location
from homeassistant.helpers.event import async_track_state_change_event
# mypy: allow-untyped-defs, no-check-untyped-defs
EVENT_ENTER = "enter"
EVENT_LEAVE = "leave"
DEFAULT_EVENT = EVENT_ENTER
_EVENT_DESCRIPTION = {EVENT_ENTER: "entering", EVENT_LEAVE: "leaving"}
TRIGGER_SCHEMA = vol.Schema(
{
vol.Required(CONF_PLATFORM): "zone",
vol.Required(CONF_ENTITY_ID): cv.entity_ids,
vol.Required(CONF_ZONE): cv.entity_id,
vol.Required(CONF_EVENT, default=DEFAULT_EVENT): vol.Any(
EVENT_ENTER, EVENT_LEAVE
),
}
)
async def async_attach_trigger(hass, config, action, automation_info):
"""Listen for state changes based on configuration."""
entity_id = config.get(CONF_ENTITY_ID)
zone_entity_id = config.get(CONF_ZONE)
event = config.get(CONF_EVENT)
job = HassJob(action)
@callback
def zone_automation_listener(zone_event):
"""Listen for state changes and calls action."""
entity = zone_event.data.get("entity_id")
from_s = zone_event.data.get("old_state")
to_s = zone_event.data.get("new_state")
if (
from_s
and not location.has_location(from_s)
or not location.has_location(to_s)
):
return
zone_state = hass.states.get(zone_entity_id)
from_match = condition.zone(hass, zone_state, from_s) if from_s else False
to_match = condition.zone(hass, zone_state, to_s)
if (
event == EVENT_ENTER
and not from_match
and to_match
or event == EVENT_LEAVE
and from_match
and not to_match
):
description = f"{entity} {_EVENT_DESCRIPTION[event]} {zone_state.attributes[ATTR_FRIENDLY_NAME]}"
hass.async_run_hass_job(
job,
{
"trigger": {
"platform": "zone",
"entity_id": entity,
"from_state": from_s,
"to_state": to_s,
"zone": zone_state,
"event": event,
"description": description,
}
},
to_s.context,
)
return async_track_state_change_event(hass, entity_id, zone_automation_listener)
|
import pytest
import voluptuous as vol
from homeassistant import config_entries, data_entry_flow
from homeassistant.components.risco.config_flow import (
CannotConnectError,
UnauthorizedError,
)
from homeassistant.components.risco.const import DOMAIN
from tests.async_mock import PropertyMock, patch
from tests.common import MockConfigEntry
TEST_SITE_NAME = "test-site-name"
TEST_DATA = {
"username": "test-username",
"password": "test-password",
"pin": "1234",
}
TEST_RISCO_TO_HA = {
"arm": "armed_away",
"partial_arm": "armed_home",
"A": "armed_home",
"B": "armed_home",
"C": "armed_night",
"D": "armed_night",
}
TEST_HA_TO_RISCO = {
"armed_away": "arm",
"armed_home": "partial_arm",
"armed_night": "C",
}
TEST_OPTIONS = {
"scan_interval": 10,
"code_arm_required": True,
"code_disarm_required": True,
}
async def test_form(hass):
"""Test we get the form."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
with patch(
"homeassistant.components.risco.config_flow.RiscoAPI.login",
return_value=True,
), patch(
"homeassistant.components.risco.config_flow.RiscoAPI.site_name",
new_callable=PropertyMock(return_value=TEST_SITE_NAME),
), patch(
"homeassistant.components.risco.config_flow.RiscoAPI.close"
) as mock_close, patch(
"homeassistant.components.risco.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.risco.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"], TEST_DATA
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == TEST_SITE_NAME
assert result2["data"] == TEST_DATA
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
mock_close.assert_awaited_once()
async def test_form_invalid_auth(hass):
"""Test we handle invalid auth."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.risco.config_flow.RiscoAPI.login",
side_effect=UnauthorizedError,
), patch("homeassistant.components.risco.config_flow.RiscoAPI.close") as mock_close:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"], TEST_DATA
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "invalid_auth"}
mock_close.assert_awaited_once()
async def test_form_cannot_connect(hass):
"""Test we handle cannot connect error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.risco.config_flow.RiscoAPI.login",
side_effect=CannotConnectError,
), patch("homeassistant.components.risco.config_flow.RiscoAPI.close") as mock_close:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"], TEST_DATA
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "cannot_connect"}
mock_close.assert_awaited_once()
async def test_form_exception(hass):
"""Test we handle unknown exception."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.risco.config_flow.RiscoAPI.login",
side_effect=Exception,
), patch("homeassistant.components.risco.config_flow.RiscoAPI.close") as mock_close:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"], TEST_DATA
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "unknown"}
mock_close.assert_awaited_once()
async def test_form_already_exists(hass):
"""Test that a flow with an existing username aborts."""
entry = MockConfigEntry(
domain=DOMAIN,
unique_id=TEST_DATA["username"],
data=TEST_DATA,
)
entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"], TEST_DATA
)
assert result2["type"] == "abort"
assert result2["reason"] == "already_configured"
async def test_options_flow(hass):
"""Test options flow."""
entry = MockConfigEntry(
domain=DOMAIN,
unique_id=TEST_DATA["username"],
data=TEST_DATA,
)
entry.add_to_hass(hass)
result = await hass.config_entries.options.async_init(entry.entry_id)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "init"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input=TEST_OPTIONS,
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "risco_to_ha"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input=TEST_RISCO_TO_HA,
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "ha_to_risco"
with patch("homeassistant.components.risco.async_setup_entry", return_value=True):
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input=TEST_HA_TO_RISCO,
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert entry.options == {
**TEST_OPTIONS,
"risco_states_to_ha": TEST_RISCO_TO_HA,
"ha_states_to_risco": TEST_HA_TO_RISCO,
}
async def test_ha_to_risco_schema(hass):
"""Test that the schema for the ha-to-risco mapping step is generated properly."""
entry = MockConfigEntry(
domain=DOMAIN,
unique_id=TEST_DATA["username"],
data=TEST_DATA,
)
entry.add_to_hass(hass)
result = await hass.config_entries.options.async_init(entry.entry_id)
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input=TEST_OPTIONS,
)
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input=TEST_RISCO_TO_HA,
)
# Test an HA state that isn't used
with pytest.raises(vol.error.MultipleInvalid):
await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={**TEST_HA_TO_RISCO, "armed_custom_bypass": "D"},
)
# Test a combo that can't be selected
with pytest.raises(vol.error.MultipleInvalid):
await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={**TEST_HA_TO_RISCO, "armed_night": "A"},
)
|
from datetime import datetime as dt
import pytest
from bson import ObjectId
from mock import patch
from pandas.util.testing import assert_frame_equal
from pymongo.errors import OperationFailure
from arctic._util import mongo_count
from arctic.exceptions import ConcurrentModificationException
from arctic.store.audit import ArcticTransaction
from ...util import read_str_as_pandas
ts1 = read_str_as_pandas(""" times | near
2012-09-08 17:06:11.040 | 1.0
2012-10-08 17:06:11.040 | 2.0
2012-10-09 17:06:11.040 | 2.5
2012-11-08 17:06:11.040 | 3.0""")
ts2 = read_str_as_pandas(""" times | near
2012-09-08 17:06:11.040 | 1.0
2012-10-08 17:06:11.040 | 4.0
2012-10-09 17:06:11.040 | 4.5
2012-10-10 17:06:11.040 | 5.0
2012-11-08 17:06:11.040 | 3.0""")
ts3 = read_str_as_pandas(""" times | near
2012-09-08 17:06:11.040 | 1.0
2012-10-08 17:06:11.040 | 4.0
2012-10-09 17:06:11.040 | 4.5
2012-10-10 17:06:11.040 | 5.0
2012-11-08 17:06:11.040 | 3.0
2012-11-09 17:06:11.040 | 44.0""")
ts1_append = read_str_as_pandas(""" times | near
2012-09-08 17:06:11.040 | 1.0
2012-10-08 17:06:11.040 | 2.0
2012-10-09 17:06:11.040 | 2.5
2012-11-08 17:06:11.040 | 3.0
2012-11-09 17:06:11.040 | 3.0""")
symbol = 'TS1'
symbol2 = 'TS2'
symbol3 = 'TS3'
def test_ArcticTransaction_can_do_first_writes(library):
with ArcticTransaction(library, 'SYMBOL_NOT_HERE', 'user', 'log') as cwb:
cwb.write('SYMBOL_NOT_HERE', ts1)
wrote_vi = library.read('SYMBOL_NOT_HERE')
assert_frame_equal(wrote_vi.data, ts1)
def test_ArcticTransaction_detects_concurrent_writes(library):
library.write('FOO', ts1)
from threading import Event, Thread
e1 = Event()
e2 = Event()
def losing_writer():
# will attempt to write version 2, should find that version 2 is there and it ends up writing version 3
with pytest.raises(ConcurrentModificationException):
with ArcticTransaction(library, 'FOO', 'user', 'log') as cwb:
cwb.write('FOO', ts1_append, metadata={'foo': 'bar'})
e1.wait()
def winning_writer():
# will attempt to write version 2 as well
with ArcticTransaction(library, 'FOO', 'user', 'log') as cwb:
cwb.write('FOO', ts2, metadata={'foo': 'bar'})
e2.wait()
t1 = Thread(target=losing_writer)
t2 = Thread(target=winning_writer)
t1.start()
t2.start()
# both read the same timeseries and are locked doing some 'work'
e2.set()
# t2 should now be able to finish
t2.join()
e1.set()
t1.join()
# we're expecting the losing_writer to undo its write once it realises that it wrote v3 instead of v2
wrote_vi = library.read('FOO')
assert_frame_equal(wrote_vi.data, ts2)
assert {'foo': 'bar'} == wrote_vi.metadata
def test_audit_writes(library):
with ArcticTransaction(library, symbol, 'u1', 'l1') as mt:
mt.write(symbol, ts1)
with ArcticTransaction(library, symbol, 'u2', 'l2') as mt:
mt.write(symbol, ts2)
audit_log = library.read_audit_log(symbol)
assert audit_log == [{u'new_v': 2, u'symbol': u'TS1', u'message': u'l2', u'user': u'u2', u'orig_v': 1},
{u'new_v': 1, u'symbol': u'TS1', u'message': u'l1', u'user': u'u1', u'orig_v': 0}]
assert_frame_equal(ts1, library.read(symbol, audit_log[0]['orig_v']).data)
assert_frame_equal(ts2, library.read(symbol, audit_log[0]['new_v']).data)
def test_metadata_changes_writes(library):
with ArcticTransaction(library, symbol, 'u1', 'l1') as mt:
mt.write(symbol, ts1, metadata={'original': 'data'})
with ArcticTransaction(library, symbol, 'u2', 'l2') as mt:
mt.write(symbol, ts1, metadata={'some': 'data', 'original': 'data'})
audit_log = library.read_audit_log(symbol)
assert audit_log == [{u'new_v': 2, u'symbol': u'TS1', u'message': u'l2', u'user': u'u2', u'orig_v': 1},
{u'new_v': 1, u'symbol': u'TS1', u'message': u'l1', u'user': u'u1', u'orig_v': 0}]
assert_frame_equal(ts1, library.read(symbol, audit_log[0]['orig_v']).data)
assert_frame_equal(ts1, library.read(symbol, audit_log[0]['new_v']).data)
assert library.read(symbol, audit_log[0]['orig_v']).metadata == {'original': 'data'}
assert library.read(symbol, audit_log[0]['new_v']).metadata == {'some': 'data', 'original': 'data'}
def test_audit_read(library):
with ArcticTransaction(library, symbol3, 'u3', 'foo') as mt:
mt.write(symbol3, ts1)
with ArcticTransaction(library, symbol, 'u1', 'l1') as mt:
mt.write(symbol, ts1)
with ArcticTransaction(library, symbol, 'u2', 'l2') as mt:
mt.write(symbol, ts2)
with ArcticTransaction(library, symbol2, 'u2', 'l2') as mt:
mt.write(symbol2, ts2)
audit_log = library.read_audit_log()
assert audit_log == [{u'new_v': 1, u'symbol': u'TS2', u'message': u'l2', u'user': u'u2', u'orig_v': 0},
{u'new_v': 2, u'symbol': u'TS1', u'message': u'l2', u'user': u'u2', u'orig_v': 1},
{u'new_v': 1, u'symbol': u'TS1', u'message': u'l1', u'user': u'u1', u'orig_v': 0},
{u'new_v': 1, u'symbol': u'TS3', u'message': u'foo', u'user': u'u3', u'orig_v': 0},
]
l2_audit_log = library.read_audit_log(message='l2')
assert l2_audit_log == [{u'new_v': 1, u'symbol': u'TS2', u'message': u'l2', u'user': u'u2', u'orig_v': 0},
{u'new_v': 2, u'symbol': u'TS1', u'message': u'l2', u'user': u'u2', u'orig_v': 1},
]
symbol_audit_log = library.read_audit_log(symbol=symbol)
assert symbol_audit_log == [{u'new_v': 2, u'symbol': u'TS1', u'message': u'l2', u'user': u'u2', u'orig_v': 1},
{u'new_v': 1, u'symbol': u'TS1', u'message': u'l1', u'user': u'u1', u'orig_v': 0}]
symbols_audit_log = library.read_audit_log(symbol=[symbol, symbol2])
assert symbols_audit_log == [{u'new_v': 1, u'symbol': u'TS2', u'message': u'l2', u'user': u'u2', u'orig_v': 0},
{u'new_v': 2, u'symbol': u'TS1', u'message': u'l2', u'user': u'u2', u'orig_v': 1},
{u'new_v': 1, u'symbol': u'TS1', u'message': u'l1', u'user': u'u1', u'orig_v': 0}]
symbol_message_audit_log = library.read_audit_log(symbol=symbol, message='l2')
assert symbol_message_audit_log == [{u'new_v': 2, u'symbol': u'TS1', u'message': u'l2', u'user': u'u2', u'orig_v': 1}, ]
def test_cleanup_orphaned_versions_integration(library):
_id = ObjectId.from_datetime(dt(2013, 1, 1))
with patch('bson.ObjectId', return_value=_id):
with ArcticTransaction(library, symbol, 'u1', 'l1') as mt:
mt.write(symbol, ts1)
assert mongo_count(library._versions, filter={'parent': {'$size': 1}}) == 1
library._cleanup_orphaned_versions(False)
assert mongo_count(library._versions, filter={'parent': {'$size': 1}}) == 1
def test_corrupted_read_writes_new(library):
with ArcticTransaction(library, symbol, 'u1', 'l1') as mt:
mt.write(symbol, ts1)
res = library.read(symbol)
assert res.version == 1
with ArcticTransaction(library, symbol, 'u1', 'l2') as mt:
mt.write(symbol, ts2)
res = library.read(symbol)
assert res.version == 2
with patch.object(library, 'read') as l:
l.side_effect = OperationFailure('some failure')
with ArcticTransaction(library, symbol, 'u1', 'l2') as mt:
mt.write(symbol, ts3, metadata={'a': 1, 'b': 2})
res = library.read(symbol)
# Corrupted data still increments on write to next version correctly with new data
assert res.version == 3
assert_frame_equal(ts3, library.read(symbol, 3).data)
assert res.metadata == {'a': 1, 'b': 2}
with patch.object(library, 'read') as l:
l.side_effect = OperationFailure('some failure')
with ArcticTransaction(library, symbol, 'u1', 'l2') as mt:
mt.write(symbol, ts3, metadata={'a': 1, 'b': 2})
res = library.read(symbol)
# Corrupted data still increments to next version correctly with ts & metadata unchanged
assert res.version == 4
assert_frame_equal(ts3, library.read(symbol, 4).data)
assert res.metadata == {'a': 1, 'b': 2}
def test_write_after_delete(library):
with ArcticTransaction(library, symbol, 'u1', 'l') as mt:
mt.write(symbol, ts1)
library.delete(symbol)
with ArcticTransaction(library, symbol, 'u1', 'l') as mt:
mt.write(symbol, ts1_append)
assert_frame_equal(library.read(symbol).data, ts1_append)
def test_ArcticTransaction_write_skips_for_exact_match(library):
ts = read_str_as_pandas("""times | PX_LAST
2014-10-31 21:30:00.000 | 204324.674
2014-11-13 21:30:00.000 | 193964.45
2014-11-14 21:30:00.000 | 193650.403""")
with ArcticTransaction(library, symbol, 'u1', 'l1') as mt:
mt.write(symbol, ts)
version = library.read(symbol).version
# try and store same TimeSeries again
with ArcticTransaction(library, symbol, 'u1', 'l2') as mt:
mt.write(symbol, ts)
assert library.read(symbol).version == version
def test_ArcticTransaction_write_doesnt_skip_for_close_ts(library):
orig_ts = read_str_as_pandas("""times | PX_LAST
2014-10-31 21:30:00.000 | 204324.674
2014-11-13 21:30:00.000 | 193964.45
2014-11-14 21:30:00.000 | 193650.403""")
with ArcticTransaction(library, symbol, 'u1', 'l1') as mt:
mt.write(symbol, orig_ts)
assert_frame_equal(library.read(symbol).data, orig_ts)
# try and store slighty different TimeSeries
new_ts = read_str_as_pandas("""times | PX_LAST
2014-10-31 21:30:00.000 | 204324.672
2014-11-13 21:30:00.000 | 193964.453
2014-11-14 21:30:00.000 | 193650.406""")
with ArcticTransaction(library, symbol, 'u1', 'l2') as mt:
mt.write(symbol, new_ts)
assert_frame_equal(library.read(symbol).data, new_ts)
|
from homeassistant.components.homeassistant import SERVICE_RELOAD_CORE_CONFIG
from homeassistant.config import DATA_CUSTOMIZE
from homeassistant.core import DOMAIN
import homeassistant.helpers.config_validation as cv
from . import EditKeyBasedConfigView
CONFIG_PATH = "customize.yaml"
async def async_setup(hass):
"""Set up the Customize config API."""
async def hook(action, config_key):
"""post_write_hook for Config View that reloads groups."""
await hass.services.async_call(DOMAIN, SERVICE_RELOAD_CORE_CONFIG)
hass.http.register_view(
CustomizeConfigView(
"customize", "config", CONFIG_PATH, cv.entity_id, dict, post_write_hook=hook
)
)
return True
class CustomizeConfigView(EditKeyBasedConfigView):
"""Configure a list of entries."""
def _get_value(self, hass, data, config_key):
"""Get value."""
customize = hass.data.get(DATA_CUSTOMIZE, {}).get(config_key) or {}
return {"global": customize, "local": data.get(config_key, {})}
def _write_value(self, hass, data, config_key, new_value):
"""Set value."""
data[config_key] = new_value
state = hass.states.get(config_key)
state_attributes = dict(state.attributes)
state_attributes.update(new_value)
hass.states.async_set(config_key, state.state, state_attributes)
|
import logging
import numbers
import os
import sys
from logging.handlers import WatchedFileHandler
from .utils.encoding import safe_repr, safe_str
from .utils.functional import maybe_evaluate
from .utils.objects import cached_property
__all__ = ('LogMixin', 'LOG_LEVELS', 'get_loglevel', 'setup_logging')
try:
LOG_LEVELS = dict(logging._nameToLevel)
LOG_LEVELS.update(logging._levelToName)
except AttributeError:
LOG_LEVELS = dict(logging._levelNames)
LOG_LEVELS.setdefault('FATAL', logging.FATAL)
LOG_LEVELS.setdefault(logging.FATAL, 'FATAL')
DISABLE_TRACEBACKS = os.environ.get('DISABLE_TRACEBACKS')
def get_logger(logger):
"""Get logger by name."""
if isinstance(logger, str):
logger = logging.getLogger(logger)
if not logger.handlers:
logger.addHandler(logging.NullHandler())
return logger
def get_loglevel(level):
"""Get loglevel by name."""
if isinstance(level, str):
return LOG_LEVELS[level]
return level
def naive_format_parts(fmt):
parts = fmt.split('%')
for i, e in enumerate(parts[1:]):
yield None if not e or not parts[i - 1] else e[0]
def safeify_format(fmt, args, filters=None):
filters = {'s': safe_str, 'r': safe_repr} if not filters else filters
for index, type in enumerate(naive_format_parts(fmt)):
filt = filters.get(type)
yield filt(args[index]) if filt else args[index]
class LogMixin:
"""Mixin that adds severity methods to any class."""
def debug(self, *args, **kwargs):
return self.log(logging.DEBUG, *args, **kwargs)
def info(self, *args, **kwargs):
return self.log(logging.INFO, *args, **kwargs)
def warn(self, *args, **kwargs):
return self.log(logging.WARN, *args, **kwargs)
def error(self, *args, **kwargs):
kwargs.setdefault('exc_info', True)
return self.log(logging.ERROR, *args, **kwargs)
def critical(self, *args, **kwargs):
kwargs.setdefault('exc_info', True)
return self.log(logging.CRITICAL, *args, **kwargs)
def annotate(self, text):
return f'{self.logger_name} - {text}'
def log(self, severity, *args, **kwargs):
if DISABLE_TRACEBACKS:
kwargs.pop('exc_info', None)
if self.logger.isEnabledFor(severity):
log = self.logger.log
if len(args) > 1 and isinstance(args[0], str):
expand = [maybe_evaluate(arg) for arg in args[1:]]
return log(severity,
self.annotate(args[0].replace('%r', '%s')),
*list(safeify_format(args[0], expand)), **kwargs)
else:
return self.logger.log(
severity, self.annotate(' '.join(map(safe_str, args))),
**kwargs)
def get_logger(self):
return get_logger(self.logger_name)
def is_enabled_for(self, level):
return self.logger.isEnabledFor(self.get_loglevel(level))
def get_loglevel(self, level):
if not isinstance(level, numbers.Integral):
return LOG_LEVELS[level]
return level
@cached_property
def logger(self):
return self.get_logger()
@property
def logger_name(self):
return self.__class__.__name__
class Log(LogMixin):
def __init__(self, name, logger=None):
self._logger_name = name
self._logger = logger
def get_logger(self):
if self._logger:
return self._logger
return LogMixin.get_logger(self)
@property
def logger_name(self):
return self._logger_name
def setup_logging(loglevel=None, logfile=None):
"""Setup logging."""
logger = logging.getLogger()
loglevel = get_loglevel(loglevel or 'ERROR')
logfile = logfile if logfile else sys.__stderr__
if not logger.handlers:
if hasattr(logfile, 'write'):
handler = logging.StreamHandler(logfile)
else:
handler = WatchedFileHandler(logfile)
logger.addHandler(handler)
logger.setLevel(loglevel)
return logger
|
import logging
from homeassistant.components.cover import (
ATTR_POSITION,
DOMAIN,
SUPPORT_CLOSE,
SUPPORT_OPEN,
CoverEntity,
)
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from . import (
CONF_INVERT_OPENCLOSE_BUTTONS,
CONF_INVERT_PERCENT,
ZWaveDeviceEntity,
workaround,
)
from .const import (
COMMAND_CLASS_BARRIER_OPERATOR,
COMMAND_CLASS_SWITCH_BINARY,
COMMAND_CLASS_SWITCH_MULTILEVEL,
DATA_NETWORK,
)
_LOGGER = logging.getLogger(__name__)
SUPPORT_GARAGE = SUPPORT_OPEN | SUPPORT_CLOSE
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Z-Wave Cover from Config Entry."""
@callback
def async_add_cover(cover):
"""Add Z-Wave Cover."""
async_add_entities([cover])
async_dispatcher_connect(hass, "zwave_new_cover", async_add_cover)
def get_device(hass, values, node_config, **kwargs):
"""Create Z-Wave entity device."""
invert_buttons = node_config.get(CONF_INVERT_OPENCLOSE_BUTTONS)
invert_percent = node_config.get(CONF_INVERT_PERCENT)
if (
values.primary.command_class == COMMAND_CLASS_SWITCH_MULTILEVEL
and values.primary.index == 0
):
return ZwaveRollershutter(hass, values, invert_buttons, invert_percent)
if values.primary.command_class == COMMAND_CLASS_SWITCH_BINARY:
return ZwaveGarageDoorSwitch(values)
if values.primary.command_class == COMMAND_CLASS_BARRIER_OPERATOR:
return ZwaveGarageDoorBarrier(values)
return None
class ZwaveRollershutter(ZWaveDeviceEntity, CoverEntity):
"""Representation of an Z-Wave cover."""
def __init__(self, hass, values, invert_buttons, invert_percent):
"""Initialize the Z-Wave rollershutter."""
ZWaveDeviceEntity.__init__(self, values, DOMAIN)
self._network = hass.data[DATA_NETWORK]
self._open_id = None
self._close_id = None
self._current_position = None
self._invert_buttons = invert_buttons
self._invert_percent = invert_percent
self._workaround = workaround.get_device_mapping(values.primary)
if self._workaround:
_LOGGER.debug("Using workaround %s", self._workaround)
self.update_properties()
def update_properties(self):
"""Handle data changes for node values."""
# Position value
self._current_position = self.values.primary.data
if (
self.values.open
and self.values.close
and self._open_id is None
and self._close_id is None
):
if self._invert_buttons:
self._open_id = self.values.close.value_id
self._close_id = self.values.open.value_id
else:
self._open_id = self.values.open.value_id
self._close_id = self.values.close.value_id
@property
def is_closed(self):
"""Return if the cover is closed."""
if self.current_cover_position is None:
return None
if self.current_cover_position > 0:
return False
return True
@property
def current_cover_position(self):
"""Return the current position of Zwave roller shutter."""
if self._workaround == workaround.WORKAROUND_NO_POSITION:
return None
if self._current_position is not None:
if self._current_position <= 5:
return 100 if self._invert_percent else 0
if self._current_position >= 95:
return 0 if self._invert_percent else 100
return (
100 - self._current_position
if self._invert_percent
else self._current_position
)
def open_cover(self, **kwargs):
"""Move the roller shutter up."""
self._network.manager.pressButton(self._open_id)
def close_cover(self, **kwargs):
"""Move the roller shutter down."""
self._network.manager.pressButton(self._close_id)
def set_cover_position(self, **kwargs):
"""Move the roller shutter to a specific position."""
self.node.set_dimmer(
self.values.primary.value_id,
(100 - kwargs.get(ATTR_POSITION))
if self._invert_percent
else kwargs.get(ATTR_POSITION),
)
def stop_cover(self, **kwargs):
"""Stop the roller shutter."""
self._network.manager.releaseButton(self._open_id)
class ZwaveGarageDoorBase(ZWaveDeviceEntity, CoverEntity):
"""Base class for a Zwave garage door device."""
def __init__(self, values):
"""Initialize the zwave garage door."""
ZWaveDeviceEntity.__init__(self, values, DOMAIN)
self._state = None
self.update_properties()
def update_properties(self):
"""Handle data changes for node values."""
self._state = self.values.primary.data
_LOGGER.debug("self._state=%s", self._state)
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
return "garage"
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_GARAGE
class ZwaveGarageDoorSwitch(ZwaveGarageDoorBase):
"""Representation of a switch based Zwave garage door device."""
@property
def is_closed(self):
"""Return the current position of Zwave garage door."""
return not self._state
def close_cover(self, **kwargs):
"""Close the garage door."""
self.values.primary.data = False
def open_cover(self, **kwargs):
"""Open the garage door."""
self.values.primary.data = True
class ZwaveGarageDoorBarrier(ZwaveGarageDoorBase):
"""Representation of a barrier operator Zwave garage door device."""
@property
def is_opening(self):
"""Return true if cover is in an opening state."""
return self._state == "Opening"
@property
def is_closing(self):
"""Return true if cover is in a closing state."""
return self._state == "Closing"
@property
def is_closed(self):
"""Return the current position of Zwave garage door."""
return self._state == "Closed"
def close_cover(self, **kwargs):
"""Close the garage door."""
self.values.primary.data = "Closed"
def open_cover(self, **kwargs):
"""Open the garage door."""
self.values.primary.data = "Opened"
|
from homeassistant.helpers.entity import Entity
from . import BleBoxEntity, create_blebox_entities
from .const import BLEBOX_TO_HASS_DEVICE_CLASSES, BLEBOX_TO_UNIT_MAP
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up a BleBox entry."""
create_blebox_entities(
hass, config_entry, async_add_entities, BleBoxSensorEntity, "sensors"
)
class BleBoxSensorEntity(BleBoxEntity, Entity):
"""Representation of a BleBox sensor feature."""
@property
def state(self):
"""Return the state."""
return self._feature.current
@property
def unit_of_measurement(self):
"""Return the unit."""
return BLEBOX_TO_UNIT_MAP[self._feature.unit]
@property
def device_class(self):
"""Return the device class."""
return BLEBOX_TO_HASS_DEVICE_CLASSES[self._feature.device_class]
|
from datetime import timedelta
import logging
import neurio
import requests.exceptions
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_API_KEY, ENERGY_KILO_WATT_HOUR, POWER_WATT
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
import homeassistant.util.dt as dt_util
_LOGGER = logging.getLogger(__name__)
CONF_API_SECRET = "api_secret"
CONF_SENSOR_ID = "sensor_id"
ACTIVE_NAME = "Energy Usage"
DAILY_NAME = "Daily Energy Usage"
ACTIVE_TYPE = "active"
DAILY_TYPE = "daily"
ICON = "mdi:flash"
MIN_TIME_BETWEEN_DAILY_UPDATES = timedelta(seconds=150)
MIN_TIME_BETWEEN_ACTIVE_UPDATES = timedelta(seconds=10)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_API_SECRET): cv.string,
vol.Optional(CONF_SENSOR_ID): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Neurio sensor."""
api_key = config.get(CONF_API_KEY)
api_secret = config.get(CONF_API_SECRET)
sensor_id = config.get(CONF_SENSOR_ID)
data = NeurioData(api_key, api_secret, sensor_id)
@Throttle(MIN_TIME_BETWEEN_DAILY_UPDATES)
def update_daily():
"""Update the daily power usage."""
data.get_daily_usage()
@Throttle(MIN_TIME_BETWEEN_ACTIVE_UPDATES)
def update_active():
"""Update the active power usage."""
data.get_active_power()
update_daily()
update_active()
# Active power sensor
add_entities([NeurioEnergy(data, ACTIVE_NAME, ACTIVE_TYPE, update_active)])
# Daily power sensor
add_entities([NeurioEnergy(data, DAILY_NAME, DAILY_TYPE, update_daily)])
class NeurioData:
"""Stores data retrieved from Neurio sensor."""
def __init__(self, api_key, api_secret, sensor_id):
"""Initialize the data."""
self.api_key = api_key
self.api_secret = api_secret
self.sensor_id = sensor_id
self._daily_usage = None
self._active_power = None
self._state = None
neurio_tp = neurio.TokenProvider(key=api_key, secret=api_secret)
self.neurio_client = neurio.Client(token_provider=neurio_tp)
if not self.sensor_id:
user_info = self.neurio_client.get_user_information()
_LOGGER.warning(
"Sensor ID auto-detected: %s",
user_info["locations"][0]["sensors"][0]["sensorId"],
)
self.sensor_id = user_info["locations"][0]["sensors"][0]["sensorId"]
@property
def daily_usage(self):
"""Return latest daily usage value."""
return self._daily_usage
@property
def active_power(self):
"""Return latest active power value."""
return self._active_power
def get_active_power(self):
"""Return current power value."""
try:
sample = self.neurio_client.get_samples_live_last(self.sensor_id)
self._active_power = sample["consumptionPower"]
except (requests.exceptions.RequestException, ValueError, KeyError):
_LOGGER.warning("Could not update current power usage")
return None
def get_daily_usage(self):
"""Return current daily power usage."""
kwh = 0
start_time = dt_util.start_of_local_day().astimezone(dt_util.UTC).isoformat()
end_time = dt_util.utcnow().isoformat()
_LOGGER.debug("Start: %s, End: %s", start_time, end_time)
try:
history = self.neurio_client.get_samples_stats(
self.sensor_id, start_time, "days", end_time
)
except (requests.exceptions.RequestException, ValueError, KeyError):
_LOGGER.warning("Could not update daily power usage")
return None
for result in history:
kwh += result["consumptionEnergy"] / 3600000
self._daily_usage = round(kwh, 2)
class NeurioEnergy(Entity):
"""Implementation of a Neurio energy sensor."""
def __init__(self, data, name, sensor_type, update_call):
"""Initialize the sensor."""
self._name = name
self._data = data
self._sensor_type = sensor_type
self.update_sensor = update_call
self._state = None
if sensor_type == ACTIVE_TYPE:
self._unit_of_measurement = POWER_WATT
elif sensor_type == DAILY_TYPE:
self._unit_of_measurement = ENERGY_KILO_WATT_HOUR
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return ICON
def update(self):
"""Get the latest data, update state."""
self.update_sensor()
if self._sensor_type == ACTIVE_TYPE:
self._state = self._data.active_power
elif self._sensor_type == DAILY_TYPE:
self._state = self._data.daily_usage
|
from .const import DOMAIN
async def async_setup(hass, config):
"""Set up devices."""
hass.data[DOMAIN] = {}
return True
async def async_setup_entry(hass, entry):
"""Set up flood monitoring sensors for this config entry."""
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, "sensor")
)
return True
async def async_unload_entry(hass, config_entry):
"""Unload flood monitoring sensors."""
return await hass.config_entries.async_forward_entry_unload(config_entry, "sensor")
|
import datetime
from homeassistant.setup import async_setup_component
from tests.async_mock import patch
VALID_CONFIG_MINIMAL = {
"sensor": {"platform": "rmvtransport", "next_departure": [{"station": "3000010"}]}
}
VALID_CONFIG_NAME = {
"sensor": {
"platform": "rmvtransport",
"next_departure": [{"station": "3000010", "name": "My Station"}],
}
}
VALID_CONFIG_MISC = {
"sensor": {
"platform": "rmvtransport",
"next_departure": [
{
"station": "3000010",
"lines": [21, "S8"],
"max_journeys": 2,
"time_offset": 10,
}
],
}
}
VALID_CONFIG_DEST = {
"sensor": {
"platform": "rmvtransport",
"next_departure": [
{
"station": "3000010",
"destinations": [
"Frankfurt (Main) Flughafen Regionalbahnhof",
"Frankfurt (Main) Stadion",
],
}
],
}
}
def get_departures_mock():
"""Mock rmvtransport departures loading."""
return {
"station": "Frankfurt (Main) Hauptbahnhof",
"stationId": "3000010",
"filter": "11111111111",
"journeys": [
{
"product": "Tram",
"number": 12,
"trainId": "1123456",
"direction": "Frankfurt (Main) Hugo-Junkers-Straße/Schleife",
"departure_time": datetime.datetime(2018, 8, 6, 14, 21),
"minutes": 7,
"delay": 3,
"stops": [
"Frankfurt (Main) Willy-Brandt-Platz",
"Frankfurt (Main) Römer/Paulskirche",
"Frankfurt (Main) Börneplatz",
"Frankfurt (Main) Konstablerwache",
"Frankfurt (Main) Bornheim Mitte",
"Frankfurt (Main) Saalburg-/Wittelsbacherallee",
"Frankfurt (Main) Eissporthalle/Festplatz",
"Frankfurt (Main) Hugo-Junkers-Straße/Schleife",
],
"info": None,
"info_long": None,
"icon": "https://products/32_pic.png",
},
{
"product": "Bus",
"number": 21,
"trainId": "1234567",
"direction": "Frankfurt (Main) Hugo-Junkers-Straße/Schleife",
"departure_time": datetime.datetime(2018, 8, 6, 14, 22),
"minutes": 8,
"delay": 1,
"stops": [
"Frankfurt (Main) Weser-/Münchener Straße",
"Frankfurt (Main) Hugo-Junkers-Straße/Schleife",
],
"info": None,
"info_long": None,
"icon": "https://products/32_pic.png",
},
{
"product": "Bus",
"number": 12,
"trainId": "1234568",
"direction": "Frankfurt (Main) Hugo-Junkers-Straße/Schleife",
"departure_time": datetime.datetime(2018, 8, 6, 14, 25),
"minutes": 11,
"delay": 1,
"stops": ["Frankfurt (Main) Stadion"],
"info": None,
"info_long": None,
"icon": "https://products/32_pic.png",
},
{
"product": "Bus",
"number": 21,
"trainId": "1234569",
"direction": "Frankfurt (Main) Hugo-Junkers-Straße/Schleife",
"departure_time": datetime.datetime(2018, 8, 6, 14, 25),
"minutes": 11,
"delay": 1,
"stops": [],
"info": None,
"info_long": None,
"icon": "https://products/32_pic.png",
},
{
"product": "Bus",
"number": 12,
"trainId": "1234570",
"direction": "Frankfurt (Main) Hugo-Junkers-Straße/Schleife",
"departure_time": datetime.datetime(2018, 8, 6, 14, 25),
"minutes": 11,
"delay": 1,
"stops": [],
"info": None,
"info_long": None,
"icon": "https://products/32_pic.png",
},
{
"product": "Bus",
"number": 21,
"trainId": "1234571",
"direction": "Frankfurt (Main) Hugo-Junkers-Straße/Schleife",
"departure_time": datetime.datetime(2018, 8, 6, 14, 25),
"minutes": 11,
"delay": 1,
"stops": [],
"info": None,
"info_long": None,
"icon": "https://products/32_pic.png",
},
],
}
def get_no_departures_mock():
"""Mock no departures in results."""
return {
"station": "Frankfurt (Main) Hauptbahnhof",
"stationId": "3000010",
"filter": "11111111111",
"journeys": [],
}
async def test_rmvtransport_min_config(hass):
"""Test minimal rmvtransport configuration."""
with patch(
"RMVtransport.RMVtransport.get_departures",
return_value=get_departures_mock(),
):
assert await async_setup_component(hass, "sensor", VALID_CONFIG_MINIMAL) is True
await hass.async_block_till_done()
state = hass.states.get("sensor.frankfurt_main_hauptbahnhof")
assert state.state == "7"
assert state.attributes["departure_time"] == datetime.datetime(2018, 8, 6, 14, 21)
assert (
state.attributes["direction"] == "Frankfurt (Main) Hugo-Junkers-Straße/Schleife"
)
assert state.attributes["product"] == "Tram"
assert state.attributes["line"] == 12
assert state.attributes["icon"] == "mdi:tram"
assert state.attributes["friendly_name"] == "Frankfurt (Main) Hauptbahnhof"
async def test_rmvtransport_name_config(hass):
"""Test custom name configuration."""
with patch(
"RMVtransport.RMVtransport.get_departures",
return_value=get_departures_mock(),
):
assert await async_setup_component(hass, "sensor", VALID_CONFIG_NAME)
await hass.async_block_till_done()
state = hass.states.get("sensor.my_station")
assert state.attributes["friendly_name"] == "My Station"
async def test_rmvtransport_misc_config(hass):
"""Test misc configuration."""
with patch(
"RMVtransport.RMVtransport.get_departures",
return_value=get_departures_mock(),
):
assert await async_setup_component(hass, "sensor", VALID_CONFIG_MISC)
await hass.async_block_till_done()
state = hass.states.get("sensor.frankfurt_main_hauptbahnhof")
assert state.attributes["friendly_name"] == "Frankfurt (Main) Hauptbahnhof"
assert state.attributes["line"] == 21
async def test_rmvtransport_dest_config(hass):
"""Test destination configuration."""
with patch(
"RMVtransport.RMVtransport.get_departures",
return_value=get_departures_mock(),
):
assert await async_setup_component(hass, "sensor", VALID_CONFIG_DEST)
await hass.async_block_till_done()
state = hass.states.get("sensor.frankfurt_main_hauptbahnhof")
assert state.state == "11"
assert (
state.attributes["direction"] == "Frankfurt (Main) Hugo-Junkers-Straße/Schleife"
)
assert state.attributes["line"] == 12
assert state.attributes["minutes"] == 11
assert state.attributes["departure_time"] == datetime.datetime(2018, 8, 6, 14, 25)
async def test_rmvtransport_no_departures(hass):
"""Test for no departures."""
with patch(
"RMVtransport.RMVtransport.get_departures",
return_value=get_no_departures_mock(),
):
assert await async_setup_component(hass, "sensor", VALID_CONFIG_MINIMAL)
await hass.async_block_till_done()
state = hass.states.get("sensor.frankfurt_main_hauptbahnhof")
assert state.state == "unavailable"
|
import unittest
import argparse
from credstash import key_value_pair
class TestKeyValuePairExtraction(unittest.TestCase):
def test_key_value_pair_has_two_equals_test(self):
self.assertRaises(argparse.ArgumentTypeError, key_value_pair, "==")
def test_key_value_pair_has_zero_equals(self):
self.assertRaises(argparse.ArgumentTypeError, key_value_pair, "")
def test_key_value_pair_has_one_equals(self):
self.assertRaises(argparse.ArgumentTypeError, key_value_pair, "key1=key2=key3")
def test_key_value_pair_has_both_key_and_value(self):
self.assertRaises(argparse.ArgumentTypeError, key_value_pair, "key=")
self.assertRaises(argparse.ArgumentTypeError, key_value_pair, "=value")
def test_key_value_pair_has_one_equals_with_values(self):
self.assertEqual(key_value_pair("key1=value1"), ["key1", "value1"])
|
import asyncio
from pathlib import Path
from typing import Any, Dict
import aiohttp
from hass_nabucasa.client import CloudClient as Interface
from homeassistant.components.alexa import (
errors as alexa_errors,
smart_home as alexa_sh,
)
from homeassistant.components.google_assistant import const as gc, smart_home as ga
from homeassistant.const import HTTP_OK
from homeassistant.core import Context, callback
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.util.aiohttp import MockRequest
from . import alexa_config, google_config, utils
from .const import DISPATCHER_REMOTE_UPDATE, DOMAIN
from .prefs import CloudPreferences
class CloudClient(Interface):
"""Interface class for Home Assistant Cloud."""
def __init__(
self,
hass: HomeAssistantType,
prefs: CloudPreferences,
websession: aiohttp.ClientSession,
alexa_user_config: Dict[str, Any],
google_user_config: Dict[str, Any],
):
"""Initialize client interface to Cloud."""
self._hass = hass
self._prefs = prefs
self._websession = websession
self.google_user_config = google_user_config
self.alexa_user_config = alexa_user_config
self._alexa_config = None
self._google_config = None
@property
def base_path(self) -> Path:
"""Return path to base dir."""
return Path(self._hass.config.config_dir)
@property
def prefs(self) -> CloudPreferences:
"""Return Cloud preferences."""
return self._prefs
@property
def loop(self) -> asyncio.BaseEventLoop:
"""Return client loop."""
return self._hass.loop
@property
def websession(self) -> aiohttp.ClientSession:
"""Return client session for aiohttp."""
return self._websession
@property
def aiohttp_runner(self) -> aiohttp.web.AppRunner:
"""Return client webinterface aiohttp application."""
return self._hass.http.runner
@property
def cloudhooks(self) -> Dict[str, Dict[str, str]]:
"""Return list of cloudhooks."""
return self._prefs.cloudhooks
@property
def remote_autostart(self) -> bool:
"""Return true if we want start a remote connection."""
return self._prefs.remote_enabled
@property
def alexa_config(self) -> alexa_config.AlexaConfig:
"""Return Alexa config."""
if self._alexa_config is None:
assert self.cloud is not None
self._alexa_config = alexa_config.AlexaConfig(
self._hass, self.alexa_user_config, self._prefs, self.cloud
)
return self._alexa_config
async def get_google_config(self) -> google_config.CloudGoogleConfig:
"""Return Google config."""
if not self._google_config:
assert self.cloud is not None
cloud_user = await self._prefs.get_cloud_user()
self._google_config = google_config.CloudGoogleConfig(
self._hass, self.google_user_config, cloud_user, self._prefs, self.cloud
)
await self._google_config.async_initialize()
return self._google_config
async def logged_in(self) -> None:
"""When user logs in."""
await self.prefs.async_set_username(self.cloud.username)
if self.alexa_config.enabled and self.alexa_config.should_report_state:
try:
await self.alexa_config.async_enable_proactive_mode()
except alexa_errors.NoTokenAvailable:
pass
if self._prefs.google_enabled:
gconf = await self.get_google_config()
gconf.async_enable_local_sdk()
if gconf.should_report_state:
gconf.async_enable_report_state()
async def cleanups(self) -> None:
"""Cleanup some stuff after logout."""
await self.prefs.async_set_username(None)
self._google_config = None
@callback
def user_message(self, identifier: str, title: str, message: str) -> None:
"""Create a message for user to UI."""
self._hass.components.persistent_notification.async_create(
message, title, identifier
)
@callback
def dispatcher_message(self, identifier: str, data: Any = None) -> None:
"""Match cloud notification to dispatcher."""
if identifier.startswith("remote_"):
async_dispatcher_send(self._hass, DISPATCHER_REMOTE_UPDATE, data)
async def async_alexa_message(self, payload: Dict[Any, Any]) -> Dict[Any, Any]:
"""Process cloud alexa message to client."""
cloud_user = await self._prefs.get_cloud_user()
return await alexa_sh.async_handle_message(
self._hass,
self.alexa_config,
payload,
context=Context(user_id=cloud_user),
enabled=self._prefs.alexa_enabled,
)
async def async_google_message(self, payload: Dict[Any, Any]) -> Dict[Any, Any]:
"""Process cloud google message to client."""
if not self._prefs.google_enabled:
return ga.turned_off_response(payload)
gconf = await self.get_google_config()
return await ga.async_handle_message(
self._hass, gconf, gconf.cloud_user, payload, gc.SOURCE_CLOUD
)
async def async_webhook_message(self, payload: Dict[Any, Any]) -> Dict[Any, Any]:
"""Process cloud webhook message to client."""
cloudhook_id = payload["cloudhook_id"]
found = None
for cloudhook in self._prefs.cloudhooks.values():
if cloudhook["cloudhook_id"] == cloudhook_id:
found = cloudhook
break
if found is None:
return {"status": HTTP_OK}
request = MockRequest(
content=payload["body"].encode("utf-8"),
headers=payload["headers"],
method=payload["method"],
query_string=payload["query"],
mock_source=DOMAIN,
)
response = await self._hass.components.webhook.async_handle_webhook(
found["webhook_id"], request
)
response_dict = utils.aiohttp_serialize_response(response)
body = response_dict.get("body")
return {
"body": body,
"status": response_dict["status"],
"headers": {"Content-Type": response.content_type},
}
async def async_cloudhooks_update(self, data: Dict[str, Dict[str, str]]) -> None:
"""Update local list of cloudhooks."""
await self._prefs.async_update(cloudhooks=data)
|
from __future__ import division
from builtins import zip
from builtins import range
import numpy as np
from .format_data import format_data as formatter
def procrustes(source, target, scaling=True, reflection=True, reduction=False,
oblique=False, oblique_rcond=-1, format_data=True):
"""
Function to project from one space to another using Procrustean
transformation (shift + scaling + rotation + reflection).
The implementation of this function was based on the ProcrusteanMapper in
pyMVPA: https://github.com/PyMVPA/PyMVPA
See also: http://en.wikipedia.org/wiki/Procrustes_transformation
Parameters
----------
source : Numpy array
Array to be aligned to target's coordinate system.
target: Numpy array
Source is aligned to this target space
scaling : bool
Estimate a global scaling factor for the transformation
(no longer rigid body)
reflection : bool
Allow for the data to be reflected (so it might not be
a rotation. Effective only for non-oblique transformations.
reduction : bool
If true, it is allowed to map into lower-dimensional
space. Forward transformation might be suboptimal then and
reverse transformation might not recover all original
variance.
oblique : bool
Either to allow non-orthogonal transformation -- might
heavily overfit the data if there is less samples than
dimensions. Use `oblique_rcond`.
oblique_rcond : float
Cutoff for 'small' singular values to regularize the
inverse. See :class:`~numpy.linalg.lstsq` for more
information.
Returns
----------
aligned_source : Numpy array
The array source is aligned to target and returned
"""
def fit(source, target):
datas = (source, target)
sn, sm = source.shape
tn, tm = target.shape
# Check the sizes
if sn != tn:
raise ValueError("Data for both spaces should have the same " \
"number of samples. Got %d in template and %d in target space" \
% (sn, tn))
# Sums of squares
ssqs = [np.sum(d**2, axis=0) for d in datas]
# XXX check for being invariant?
# needs to be tuned up properly and not raise but handle
for i in range(2):
if np.all(ssqs[i] <= np.abs((np.finfo(datas[i].dtype).eps
* sn )**2)):
raise ValueError("For now do not handle invariant in time datasets")
norms = [ np.sqrt(np.sum(ssq)) for ssq in ssqs ]
normed = [ data/norm for (data, norm) in zip(datas, norms) ]
# add new blank dimensions to template space if needed
if sm < tm:
normed[0] = np.hstack( (normed[0], np.zeros((sn, tm-sm))) )
if sm > tm:
if reduction:
normed[1] = np.hstack( (normed[1], np.zeros((sn, sm-tm))) )
else:
raise ValueError("reduction=False, so mapping from " \
"higher dimensionality " \
"template space is not supported. template space had %d " \
"while target %d dimensions (features)" % (sm, tm))
source, target = normed
if oblique:
# Just do silly linear system of equations ;) or naive
# inverse problem
if sn == sm and tm == 1:
T = np.linalg.solve(source, target)
else:
T = np.linalg.lstsq(source, target, rcond=oblique_rcond)[0]
ss = 1.0
else:
# Orthogonal transformation
# figure out optimal rotation
U, s, Vh = np.linalg.svd(np.dot(target.T, source),
full_matrices=False)
T = np.dot(Vh.T, U.T)
if not reflection:
# then we need to assure that it is only rotation
# "recipe" from
# http://en.wikipedia.org/wiki/Orthogonal_Procrustes_problem
# for more and info and original references, see
# http://dx.doi.org/10.1007%2FBF02289451
nsv = len(s)
s[:-1] = 1
s[-1] = np.linalg.det(T)
T = np.dot(U[:, :nsv] * s, Vh)
# figure out scale and final translation
# XXX with reflection False -- not sure if here or there or anywhere...
ss = sum(s)
# if we were to collect standardized distance
# std_d = 1 - sD**2
# select out only relevant dimensions
if sm != tm:
T = T[:sm, :tm]
# Assign projection
if scaling:
scale = ss * norms[1] / norms[0]
proj = scale * T
else:
proj = T
return proj
def transform(data, proj):
if proj is None:
raise RuntimeError("Mapper needs to be trained before use.")
d = np.asmatrix(data)
# Do projection
res = (d * proj).A
return res
if format_data:
source, target = formatter([source, target])
# fit and transform
proj = fit(source, target)
return transform(source, proj)
|
from xknx.devices import Cover as XknxCover
from homeassistant.components.cover import (
ATTR_POSITION,
ATTR_TILT_POSITION,
DEVICE_CLASS_BLIND,
SUPPORT_CLOSE,
SUPPORT_OPEN,
SUPPORT_SET_POSITION,
SUPPORT_SET_TILT_POSITION,
SUPPORT_STOP,
CoverEntity,
)
from homeassistant.core import callback
from homeassistant.helpers.event import async_track_utc_time_change
from .const import DOMAIN
from .knx_entity import KnxEntity
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up cover(s) for KNX platform."""
entities = []
for device in hass.data[DOMAIN].xknx.devices:
if isinstance(device, XknxCover):
entities.append(KNXCover(device))
async_add_entities(entities)
class KNXCover(KnxEntity, CoverEntity):
"""Representation of a KNX cover."""
def __init__(self, device: XknxCover):
"""Initialize the cover."""
super().__init__(device)
self._unsubscribe_auto_updater = None
@callback
async def after_update_callback(self, device):
"""Call after device was updated."""
self.async_write_ha_state()
if self._device.is_traveling():
self.start_auto_updater()
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
if self._device.supports_angle:
return DEVICE_CLASS_BLIND
return None
@property
def supported_features(self):
"""Flag supported features."""
supported_features = SUPPORT_OPEN | SUPPORT_CLOSE | SUPPORT_SET_POSITION
if self._device.supports_stop:
supported_features |= SUPPORT_STOP
if self._device.supports_angle:
supported_features |= SUPPORT_SET_TILT_POSITION
return supported_features
@property
def current_cover_position(self):
"""Return the current position of the cover.
None is unknown, 0 is closed, 100 is fully open.
"""
# In KNX 0 is open, 100 is closed.
try:
return 100 - self._device.current_position()
except TypeError:
return None
@property
def is_closed(self):
"""Return if the cover is closed."""
return self._device.is_closed()
@property
def is_opening(self):
"""Return if the cover is opening or not."""
return self._device.is_opening()
@property
def is_closing(self):
"""Return if the cover is closing or not."""
return self._device.is_closing()
async def async_close_cover(self, **kwargs):
"""Close the cover."""
await self._device.set_down()
async def async_open_cover(self, **kwargs):
"""Open the cover."""
await self._device.set_up()
async def async_set_cover_position(self, **kwargs):
"""Move the cover to a specific position."""
knx_position = 100 - kwargs[ATTR_POSITION]
await self._device.set_position(knx_position)
async def async_stop_cover(self, **kwargs):
"""Stop the cover."""
await self._device.stop()
self.stop_auto_updater()
@property
def current_cover_tilt_position(self):
"""Return current tilt position of cover."""
if not self._device.supports_angle:
return None
try:
return 100 - self._device.current_angle()
except TypeError:
return None
async def async_set_cover_tilt_position(self, **kwargs):
"""Move the cover tilt to a specific position."""
knx_tilt_position = 100 - kwargs[ATTR_TILT_POSITION]
await self._device.set_angle(knx_tilt_position)
def start_auto_updater(self):
"""Start the autoupdater to update Home Assistant while cover is moving."""
if self._unsubscribe_auto_updater is None:
self._unsubscribe_auto_updater = async_track_utc_time_change(
self.hass, self.auto_updater_hook
)
def stop_auto_updater(self):
"""Stop the autoupdater."""
if self._unsubscribe_auto_updater is not None:
self._unsubscribe_auto_updater()
self._unsubscribe_auto_updater = None
@callback
def auto_updater_hook(self, now):
"""Call for the autoupdater."""
self.async_write_ha_state()
if self._device.position_reached():
self.stop_auto_updater()
self.hass.add_job(self._device.auto_stop_if_necessary())
|
from collections import namedtuple
from contextlib import closing
import json
import gpsoauth
import httplib2 # included with oauth2client
import mechanicalsoup
import oauth2client
from oauth2client.client import OAuth2Credentials
import requests
from gmusicapi.exceptions import (
AlreadyLoggedIn, NotLoggedIn, CallFailure
)
from gmusicapi.protocol import webclient
from gmusicapi.utils import utils
log = utils.DynamicClientLogger(__name__)
OAuthInfo = namedtuple('OAuthInfo', 'client_id client_secret scope redirect_uri')
def credentials_from_refresh_token(token, oauth_info):
# why doesn't Google provide this!?
cred_json = {"_module": "oauth2client.client",
"token_expiry": "2000-01-01T00:13:37Z", # to refresh now
"access_token": 'bogus',
"token_uri": "https://accounts.google.com/o/oauth2/token",
"invalid": False,
"token_response": {
"access_token": 'bogus',
"token_type": "Bearer",
"expires_in": 3600,
"refresh_token": token},
"client_id": oauth_info.client_id,
"id_token": None,
"client_secret": oauth_info.client_secret,
"revoke_uri": "https://accounts.google.com/o/oauth2/revoke",
"_class": "OAuth2Credentials",
"refresh_token": token,
"user_agent": None}
return OAuth2Credentials.new_from_json(json.dumps(cred_json))
class _Base:
def __init__(self, rsession_setup=None):
"""
:param rsession_setup: a callable that will be called with
the backing requests.Session to delegate config to callers.
"""
self._rsession = requests.Session()
if rsession_setup is None:
rsession_setup = lambda x: x # noqa
self._rsession_setup = rsession_setup
self._rsession_setup(self._rsession)
self.is_authenticated = False
def _send_with_auth(self, req_kwargs, desired_auth, rsession):
raise NotImplementedError
def _send_without_auth(self, req_kwargs, rsession):
return rsession.request(**req_kwargs)
def login(self, *args, **kwargs):
# subclasses extend / use super()
if self.is_authenticated:
raise AlreadyLoggedIn
def logout(self):
"""
Reset the session to an unauthenticated, default state.
"""
self._rsession.close()
self._rsession = requests.Session()
self._rsession_setup(self._rsession)
self.is_authenticated = False
def send(self, req_kwargs, desired_auth, rsession=None):
"""Send a request from a Call using this session's auth.
:param req_kwargs: kwargs for requests.Session.request
:param desired_auth: protocol.shared.AuthTypes to attach
:param rsession: (optional) a requests.Session to use
(default ``self._rsession`` - this is exposed for test purposes)
"""
res = None
if not any(desired_auth):
if rsession is None:
# use a throwaway session to ensure it's clean
with closing(requests.Session()) as new_session:
self._rsession_setup(new_session)
res = self._send_without_auth(req_kwargs, new_session)
else:
res = self._send_without_auth(req_kwargs, rsession)
else:
if not self.is_authenticated:
raise NotLoggedIn
if rsession is None:
rsession = self._rsession
res = self._send_with_auth(req_kwargs, desired_auth, rsession)
return res
class Webclient(_Base):
def login(self, email, password, *args, **kwargs):
"""
Perform serviceloginauth then retrieve webclient cookies.
:param email:
:param password:
"""
super().login()
# Google's login form has a bunch of hidden fields I'd rather not deal with manually.
browser = mechanicalsoup.Browser(soup_config={"features": "html.parser"})
login_page = browser.get('https://accounts.google.com/ServiceLoginAuth',
params={'service': 'sj',
'continue': 'https://play.google.com/music/listen'})
form_candidates = login_page.soup.select("form")
if len(form_candidates) > 1:
log.error("Google login form dom has changed; there are %s candidate forms:\n%s",
len(form_candidates), form_candidates)
return False
form = form_candidates[0]
form.select("#Email")[0]['value'] = email
response = browser.submit(form, 'https://accounts.google.com/AccountLoginInfo')
try:
response.raise_for_status()
except requests.HTTPError:
log.exception("submitting login form failed")
return False
form_candidates = response.soup.select("form")
if len(form_candidates) > 1:
log.error("Google login form dom has changed; there are %s candidate forms:\n%s",
len(form_candidates), form_candidates)
return False
form = form_candidates[0]
form.select("#Passwd")[0]['value'] = password
response = browser.submit(form, 'https://accounts.google.com/ServiceLoginAuth')
try:
response.raise_for_status()
except requests.HTTPError:
log.exception("submitting login form failed")
return False
# We can't use in without .keys(), since international users will see a
# CookieConflictError.
if 'SID' not in list(browser.session.cookies.keys()):
# Invalid auth.
return False
self._rsession.cookies.update(browser.session.cookies)
self.is_authenticated = True
# Get webclient cookies.
# They're stored automatically by requests on the webclient session.
try:
webclient.Init.perform(self, True)
except CallFailure:
log.exception("unable to initialize webclient cookies")
self.logout()
return self.is_authenticated
def _send_with_auth(self, req_kwargs, desired_auth, rsession):
if desired_auth.xt:
req_kwargs.setdefault('params', {})
req_kwargs['params'].update({'u': 0, 'xt': rsession.cookies['xt']})
return rsession.request(**req_kwargs)
class Musicmanager(_Base):
oauth = OAuthInfo(
'652850857958.apps.googleusercontent.com',
'ji1rklciNp2bfsFJnEH_i6al',
'https://www.googleapis.com/auth/musicmanager',
'urn:ietf:wg:oauth:2.0:oob'
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._oauth_creds = None
def login(self, oauth_credentials, *args, **kwargs):
"""Store an already-acquired oauth2client.Credentials."""
super().login()
try:
# refresh the token right away to check auth validity
oauth_credentials.refresh(httplib2.Http())
except oauth2client.client.Error:
log.exception("error when refreshing oauth credentials")
if oauth_credentials.access_token_expired:
log.info("could not refresh oauth credentials")
return False
self._oauth_creds = oauth_credentials
self.is_authenticated = True
return self.is_authenticated
def _send_with_auth(self, req_kwargs, desired_auth, rsession):
if desired_auth.oauth:
if self._oauth_creds.access_token_expired:
self._oauth_creds.refresh(httplib2.Http())
req_kwargs['headers'] = req_kwargs.get('headers', {})
req_kwargs['headers']['Authorization'] = \
'Bearer ' + self._oauth_creds.access_token
return rsession.request(**req_kwargs)
class Mobileclient(Musicmanager):
oauth = OAuthInfo(
'228293309116.apps.googleusercontent.com',
'GL1YV0XMp0RlL7ylCV3ilFz-',
'https://www.googleapis.com/auth/skyjam',
'urn:ietf:wg:oauth:2.0:oob'
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._master_token = None
self._authtoken = None
self._locale = None
self._is_subscribed = None
def gpsoauth_login(self, email, password, android_id, *args, **kwargs):
"""
Get a master token, then use it to get a skyjam OAuth token.
:param email:
:param password:
:param android_id:
"""
# TODO calling directly into base is weird
_Base.login(self, email, password, android_id, *args, **kwargs)
res = gpsoauth.perform_master_login(email, password, android_id)
if 'Token' not in res:
return False
self._master_token = res['Token']
res = gpsoauth.perform_oauth(
email, self._master_token, android_id,
service='sj', app='com.google.android.music',
client_sig='38918a453d07199354f8b19af05ec6562ced5788')
if 'Auth' not in res:
return False
self._authtoken = res['Auth']
self.is_authenticated = True
return True
def _send_with_auth(self, req_kwargs, desired_auth, rsession):
# Default to English (United States) if no locale given.
if not self._locale:
self._locale = 'en_US'
# Set locale for all Mobileclient calls.
req_kwargs.setdefault('params', {})
req_kwargs['params'].update({'hl': self._locale})
# As of API v2.5, dv is a required parameter for all calls.
# The dv value is part of the Android app version number,
# but setting this to 0 works fine.
req_kwargs['params'].update({'dv': 87701})
if self._is_subscribed:
req_kwargs['params'].update({'tier': 'aa'})
else:
req_kwargs['params'].update({'tier': 'fr'})
req_kwargs.setdefault('headers', {})
if desired_auth.gpsoauth:
# does this expire?
req_kwargs['headers']['Authorization'] = \
'GoogleLogin auth=' + self._authtoken
return rsession.request(**req_kwargs)
if desired_auth.oauth:
return super()._send_with_auth(req_kwargs, desired_auth, rsession)
raise ValueError("_send_with_auth got invalid desired_auth: {}".format(desired_auth))
|
import os.path as op
import numpy as np
import pytest
from mne import create_info
from mne.datasets import testing
from mne.io import RawArray, read_raw_fif
from mne.preprocessing import annotate_flat
data_path = testing.data_path(download=False)
skip_fname = op.join(data_path, 'misc', 'intervalrecording_raw.fif')
@pytest.mark.parametrize('first_samp', (0, 10000))
def test_annotate_flat(first_samp):
"""Test marking flat segments."""
# Test if ECG analysis will work on data that is not preloaded
n_ch, n_times = 11, 1000
data = np.random.RandomState(0).randn(n_ch, n_times)
assert not (np.diff(data, axis=-1) == 0).any() # nothing flat at first
info = create_info(n_ch, 1000., 'eeg')
info['meas_date'] = (1, 2)
# test first_samp != for gh-6295
raw = RawArray(data, info, first_samp=first_samp)
raw.info['bads'] = [raw.ch_names[-1]]
#
# First make a channel flat the whole time
#
raw_0 = raw.copy()
raw_0._data[0] = 0.
for kwargs, bads, want_times in [
# Anything < 1 will mark spatially
(dict(bad_percent=100.), [], 0),
(dict(bad_percent=99.9), [raw.ch_names[0]], n_times),
(dict(), [raw.ch_names[0]], n_times)]: # default (1)
raw_time = raw_0.copy()
annot, got_bads = annotate_flat(raw_0, verbose='debug', **kwargs)
assert got_bads == bads
raw_time.set_annotations(raw_time.annotations + annot)
raw_time.info['bads'] += got_bads
n_good_times = raw_time.get_data(reject_by_annotation='omit').shape[1]
assert n_good_times == want_times
#
# Now make a channel flat for 20% of the time points
#
raw_0 = raw.copy()
n_good_times = int(round(0.8 * n_times))
raw_0._data[0, n_good_times:] = 0.
threshold = 100 * (n_times - n_good_times) / n_times
for kwargs, bads, want_times in [
# Should change behavior at bad_percent=20
(dict(bad_percent=100), [], n_good_times),
(dict(bad_percent=threshold), [], n_good_times),
(dict(bad_percent=threshold - 1e-5), [raw.ch_names[0]], n_times),
(dict(), [raw.ch_names[0]], n_times)]:
annot, got_bads = annotate_flat(raw_0, verbose='debug', **kwargs)
assert got_bads == bads
raw_time = raw_0.copy()
raw_time.set_annotations(raw_time.annotations + annot)
raw_time.info['bads'] += got_bads
n_good_times = raw_time.get_data(reject_by_annotation='omit').shape[1]
assert n_good_times == want_times
with pytest.raises(TypeError, match='must be an instance of BaseRaw'):
annotate_flat(0.)
with pytest.raises(ValueError, match='not convert string to float'):
annotate_flat(raw, 'x')
@testing.requires_testing_data
def test_flat_acq_skip():
"""Test that acquisition skips are handled properly."""
raw = read_raw_fif(skip_fname).load_data()
annot, bads = annotate_flat(raw)
assert len(annot) == 0
assert bads == [ # MaxFilter finds the same 21 channels
'MEG%04d' % (int(num),) for num in
'141 331 421 431 611 641 1011 1021 1031 1241 1421 '
'1741 1841 2011 2131 2141 2241 2531 2541 2611 2621'.split()]
|
from __future__ import division
from pygal.colors import (
darken, desaturate, hsl_to_rgb, lighten, parse_color, rgb_to_hsl, rotate,
saturate, unparse_color
)
def test_parse_color():
"""Test color parse function"""
assert parse_color('#123') == (17, 34, 51, 1., '#rgb')
assert parse_color('#cdf') == (204, 221, 255, 1., '#rgb')
assert parse_color('#a3d7') == (170, 51, 221, 119 / 255, '#rgba')
assert parse_color('#584b4f') == (88, 75, 79, 1., '#rrggbb')
assert parse_color('#8cbe22') == (140, 190, 34, 1., '#rrggbb')
assert parse_color('#16cbf055') == (22, 203, 240, 1 / 3, '#rrggbbaa')
assert parse_color('rgb(134, 67, 216)') == (134, 67, 216, 1., 'rgb')
assert parse_color('rgb(0, 111, 222)') == (0, 111, 222, 1., 'rgb')
assert parse_color('rgba(237, 83, 48, .8)') == (237, 83, 48, .8, 'rgba')
assert parse_color('rgba(0, 1, 0, 0.1223)') == (0, 1, 0, .1223, 'rgba')
def test_unparse_color():
"""Test color unparse function"""
assert unparse_color(17, 34, 51, 1., '#rgb') == '#123'
assert unparse_color(204, 221, 255, 1., '#rgb') == '#cdf'
assert unparse_color(170, 51, 221, 119 / 255, '#rgba') == '#a3d7'
assert unparse_color(88, 75, 79, 1., '#rrggbb') == '#584b4f'
assert unparse_color(140, 190, 34, 1., '#rrggbb') == '#8cbe22'
assert unparse_color(22, 203, 240, 1 / 3, '#rrggbbaa') == '#16cbf055'
assert unparse_color(134, 67, 216, 1., 'rgb') == 'rgb(134, 67, 216)'
assert unparse_color(0, 111, 222, 1., 'rgb') == 'rgb(0, 111, 222)'
assert unparse_color(237, 83, 48, .8, 'rgba') == 'rgba(237, 83, 48, 0.8)'
assert unparse_color(0, 1, 0, .1223, 'rgba') == 'rgba(0, 1, 0, 0.1223)'
def test_darken():
"""Test darken color function"""
assert darken('#800', 20) == '#200'
assert darken('#800e', 20) == '#200e'
assert darken('#800', 0) == '#800'
assert darken('#ffffff', 10) == '#e6e6e6'
assert darken('#000000', 10) == '#000000'
assert darken('#f3148a', 25) == '#810747'
assert darken('#f3148aab', 25) == '#810747ab'
assert darken('#121212', 1) == '#0f0f0f'
assert darken('#999999', 100) == '#000000'
assert darken('#99999999', 100) == '#00000099'
assert darken('#1479ac', 8) == '#105f87'
assert darken('rgb(136, 0, 0)', 20) == 'rgb(34, 0, 0)'
assert darken('rgba(20, 121, 172, .13)', 8) == 'rgba(16, 95, 135, 0.13)'
def test_lighten():
"""Test lighten color function"""
assert lighten('#800', 20) == '#e00'
assert lighten('#800', 0) == '#800'
assert lighten('#ffffff', 10) == '#ffffff'
assert lighten('#000000', 10) == '#1a1a1a'
assert lighten('#f3148a', 25) == '#f98dc6'
assert lighten('#121212', 1) == '#151515'
assert lighten('#999999', 100) == '#ffffff'
assert lighten('#1479ac', 8) == '#1893d1'
def test_saturate():
"""Test color saturation function"""
assert saturate('#000', 20) == '#000'
assert saturate('#fff', 20) == '#fff'
assert saturate('#8a8', 100) == '#3f3'
assert saturate('#855', 20) == '#9e3f3f'
def test_desaturate():
"""Test color desaturation function"""
assert desaturate('#000', 20) == '#000'
assert desaturate('#fff', 20) == '#fff'
assert desaturate('#8a8', 100) == '#999'
assert desaturate('#855', 20) == '#726b6b'
def test_rotate():
"""Test color rotation function"""
assert rotate('#000', 45) == '#000'
assert rotate('#fff', 45) == '#fff'
assert rotate('#811', 45) == '#886a11'
assert rotate('#8a8', 360) == '#8a8'
assert rotate('#8a8', 0) == '#8a8'
assert rotate('#8a8', -360) == '#8a8'
def test_hsl_to_rgb_part_0():
"""Test hsl to rgb color function"""
assert hsl_to_rgb(0, 100, 50) == (255, 0, 0)
assert hsl_to_rgb(60, 100, 50) == (255, 255, 0)
assert hsl_to_rgb(120, 100, 50) == (0, 255, 0)
assert hsl_to_rgb(180, 100, 50) == (0, 255, 255)
assert hsl_to_rgb(240, 100, 50) == (0, 0, 255)
assert hsl_to_rgb(300, 100, 50) == (255, 0, 255)
def test_rgb_to_hsl_part_0():
"""Test rgb to hsl color function"""
assert rgb_to_hsl(255, 0, 0) == (0, 100, 50)
assert rgb_to_hsl(255, 255, 0) == (60, 100, 50)
assert rgb_to_hsl(0, 255, 0) == (120, 100, 50)
assert rgb_to_hsl(0, 255, 255) == (180, 100, 50)
assert rgb_to_hsl(0, 0, 255) == (240, 100, 50)
assert rgb_to_hsl(255, 0, 255) == (300, 100, 50)
def test_hsl_to_rgb_part_1():
"""Test hsl to rgb color function"""
assert hsl_to_rgb(-360, 100, 50) == (255, 0, 0)
assert hsl_to_rgb(-300, 100, 50) == (255, 255, 0)
assert hsl_to_rgb(-240, 100, 50) == (0, 255, 0)
assert hsl_to_rgb(-180, 100, 50) == (0, 255, 255)
assert hsl_to_rgb(-120, 100, 50) == (0, 0, 255)
assert hsl_to_rgb(-60, 100, 50) == (255, 0, 255)
def test_rgb_to_hsl_part_1():
"""Test rgb to hsl color function"""
# assert rgb_to_hsl(255, 0, 0) == (-360, 100, 50)
# assert rgb_to_hsl(255, 255, 0) == (-300, 100, 50)
# assert rgb_to_hsl(0, 255, 0) == (-240, 100, 50)
# assert rgb_to_hsl(0, 255, 255) == (-180, 100, 50)
# assert rgb_to_hsl(0, 0, 255) == (-120, 100, 50)
# assert rgb_to_hsl(255, 0, 255) == (-60, 100, 50)
pass
def test_hsl_to_rgb_part_2():
"""Test hsl to rgb color function"""
assert hsl_to_rgb(360, 100, 50) == (255, 0, 0)
assert hsl_to_rgb(420, 100, 50) == (255, 255, 0)
assert hsl_to_rgb(480, 100, 50) == (0, 255, 0)
assert hsl_to_rgb(540, 100, 50) == (0, 255, 255)
assert hsl_to_rgb(600, 100, 50) == (0, 0, 255)
assert hsl_to_rgb(660, 100, 50) == (255, 0, 255)
def test_rgb_to_hsl_part_2():
"""Test rgb to hsl color function"""
# assert rgb_to_hsl(255, 0, 0) == (360, 100, 50)
# assert rgb_to_hsl(255, 255, 0) == (420, 100, 50)
# assert rgb_to_hsl(0, 255, 0) == (480, 100, 50)
# assert rgb_to_hsl(0, 255, 255) == (540, 100, 50)
# assert rgb_to_hsl(0, 0, 255) == (600, 100, 50)
# assert rgb_to_hsl(255, 0, 255) == (660, 100, 50)
pass
def test_hsl_to_rgb_part_3():
"""Test hsl to rgb color function"""
assert hsl_to_rgb(6120, 100, 50) == (255, 0, 0)
assert hsl_to_rgb(-9660, 100, 50) == (255, 255, 0)
assert hsl_to_rgb(99840, 100, 50) == (0, 255, 0)
assert hsl_to_rgb(-900, 100, 50) == (0, 255, 255)
assert hsl_to_rgb(-104880, 100, 50) == (0, 0, 255)
assert hsl_to_rgb(2820, 100, 50) == (255, 0, 255)
def test_rgb_to_hsl_part_3():
"""Test rgb to hsl color function"""
# assert rgb_to_hsl(255, 0, 0) == (6120, 100, 50)
# assert rgb_to_hsl(255, 255, 0) == (-9660, 100, 50)
# assert rgb_to_hsl(0, 255, 0) == (99840, 100, 50)
# assert rgb_to_hsl(0, 255, 255) == (-900, 100, 50)
# assert rgb_to_hsl(0, 0, 255) == (-104880, 100, 50)
# assert rgb_to_hsl(255, 0, 255) == (2820, 100, 50)
pass
def test_hsl_to_rgb_part_4():
"""Test hsl to rgb color function"""
assert hsl_to_rgb(0, 100, 50) == (255, 0, 0)
assert hsl_to_rgb(12, 100, 50) == (255, 51, 0)
assert hsl_to_rgb(24, 100, 50) == (255, 102, 0)
assert hsl_to_rgb(36, 100, 50) == (255, 153, 0)
assert hsl_to_rgb(48, 100, 50) == (255, 204, 0)
assert hsl_to_rgb(60, 100, 50) == (255, 255, 0)
assert hsl_to_rgb(72, 100, 50) == (204, 255, 0)
assert hsl_to_rgb(84, 100, 50) == (153, 255, 0)
assert hsl_to_rgb(96, 100, 50) == (102, 255, 0)
assert hsl_to_rgb(108, 100, 50) == (51, 255, 0)
assert hsl_to_rgb(120, 100, 50) == (0, 255, 0)
def test_rgb_to_hsl_part_4():
"""Test rgb to hsl color function"""
assert rgb_to_hsl(255, 0, 0) == (0, 100, 50)
assert rgb_to_hsl(255, 51, 0) == (12, 100, 50)
assert rgb_to_hsl(255, 102, 0) == (24, 100, 50)
assert rgb_to_hsl(255, 153, 0) == (36, 100, 50)
assert rgb_to_hsl(255, 204, 0) == (48, 100, 50)
assert rgb_to_hsl(255, 255, 0) == (60, 100, 50)
assert rgb_to_hsl(204, 255, 0) == (72, 100, 50)
assert rgb_to_hsl(153, 255, 0) == (84, 100, 50)
assert rgb_to_hsl(102, 255, 0) == (96, 100, 50)
assert rgb_to_hsl(51, 255, 0) == (108, 100, 50)
assert rgb_to_hsl(0, 255, 0) == (120, 100, 50)
def test_hsl_to_rgb_part_5():
"""Test hsl to rgb color function"""
assert hsl_to_rgb(120, 100, 50) == (0, 255, 0)
assert hsl_to_rgb(132, 100, 50) == (0, 255, 51)
assert hsl_to_rgb(144, 100, 50) == (0, 255, 102)
assert hsl_to_rgb(156, 100, 50) == (0, 255, 153)
assert hsl_to_rgb(168, 100, 50) == (0, 255, 204)
assert hsl_to_rgb(180, 100, 50) == (0, 255, 255)
assert hsl_to_rgb(192, 100, 50) == (0, 204, 255)
assert hsl_to_rgb(204, 100, 50) == (0, 153, 255)
assert hsl_to_rgb(216, 100, 50) == (0, 102, 255)
assert hsl_to_rgb(228, 100, 50) == (0, 51, 255)
assert hsl_to_rgb(240, 100, 50) == (0, 0, 255)
def test_rgb_to_hsl_part_5():
"""Test rgb to hsl color function"""
assert rgb_to_hsl(0, 255, 0) == (120, 100, 50)
assert rgb_to_hsl(0, 255, 51) == (132, 100, 50)
assert rgb_to_hsl(0, 255, 102) == (144, 100, 50)
assert rgb_to_hsl(0, 255, 153) == (156, 100, 50)
assert rgb_to_hsl(0, 255, 204) == (168, 100, 50)
assert rgb_to_hsl(0, 255, 255) == (180, 100, 50)
assert rgb_to_hsl(0, 204, 255) == (192, 100, 50)
assert rgb_to_hsl(0, 153, 255) == (204, 100, 50)
assert rgb_to_hsl(0, 102, 255) == (216, 100, 50)
assert rgb_to_hsl(0, 51, 255) == (228, 100, 50)
assert rgb_to_hsl(0, 0, 255) == (240, 100, 50)
def test_hsl_to_rgb_part_6():
"""Test hsl to rgb color function"""
assert hsl_to_rgb(240, 100, 50) == (0, 0, 255)
assert hsl_to_rgb(252, 100, 50) == (51, 0, 255)
assert hsl_to_rgb(264, 100, 50) == (102, 0, 255)
assert hsl_to_rgb(276, 100, 50) == (153, 0, 255)
assert hsl_to_rgb(288, 100, 50) == (204, 0, 255)
assert hsl_to_rgb(300, 100, 50) == (255, 0, 255)
assert hsl_to_rgb(312, 100, 50) == (255, 0, 204)
assert hsl_to_rgb(324, 100, 50) == (255, 0, 153)
assert hsl_to_rgb(336, 100, 50) == (255, 0, 102)
assert hsl_to_rgb(348, 100, 50) == (255, 0, 51)
assert hsl_to_rgb(360, 100, 50) == (255, 0, 0)
def test_rgb_to_hsl_part_6():
"""Test rgb to hsl color function"""
assert rgb_to_hsl(0, 0, 255) == (240, 100, 50)
assert rgb_to_hsl(51, 0, 255) == (252, 100, 50)
assert rgb_to_hsl(102, 0, 255) == (264, 100, 50)
assert rgb_to_hsl(153, 0, 255) == (276, 100, 50)
assert rgb_to_hsl(204, 0, 255) == (288, 100, 50)
assert rgb_to_hsl(255, 0, 255) == (300, 100, 50)
assert rgb_to_hsl(255, 0, 204) == (312, 100, 50)
assert rgb_to_hsl(255, 0, 153) == (324, 100, 50)
assert rgb_to_hsl(255, 0, 102) == (336, 100, 50)
assert rgb_to_hsl(255, 0, 51) == (348, 100, 50)
# assert rgb_to_hsl(255, 0, 0) == (360, 100, 50)
def test_hsl_to_rgb_part_7():
"""Test hsl to rgb color function"""
assert hsl_to_rgb(0, 20, 50) == (153, 102, 102)
assert hsl_to_rgb(0, 60, 50) == (204, 51, 51)
assert hsl_to_rgb(0, 100, 50) == (255, 0, 0)
def test_rgb_to_hsl_part_7():
"""Test rgb to hsl color function"""
assert rgb_to_hsl(153, 102, 102) == (0, 20, 50)
assert rgb_to_hsl(204, 51, 51) == (0, 60, 50)
assert rgb_to_hsl(255, 0, 0) == (0, 100, 50)
def test_hsl_to_rgb_part_8():
"""Test hsl to rgb color function"""
assert hsl_to_rgb(60, 20, 50) == (153, 153, 102)
assert hsl_to_rgb(60, 60, 50) == (204, 204, 51)
assert hsl_to_rgb(60, 100, 50) == (255, 255, 0)
def test_rgb_to_hsl_part_8():
"""Test rgb to hsl color function"""
assert rgb_to_hsl(153, 153, 102) == (60, 20, 50)
assert rgb_to_hsl(204, 204, 51) == (60, 60, 50)
assert rgb_to_hsl(255, 255, 0) == (60, 100, 50)
def test_hsl_to_rgb_part_9():
"""Test hsl to rgb color function"""
assert hsl_to_rgb(120, 20, 50) == (102, 153, 102)
assert hsl_to_rgb(120, 60, 50) == (51, 204, 51)
assert hsl_to_rgb(120, 100, 50) == (0, 255, 0)
def test_rgb_to_hsl_part_9():
"""Test rgb to hsl color function"""
assert rgb_to_hsl(102, 153, 102) == (120, 20, 50)
assert rgb_to_hsl(51, 204, 51) == (120, 60, 50)
assert rgb_to_hsl(0, 255, 0) == (120, 100, 50)
def test_hsl_to_rgb_part_10():
"""Test hsl to rgb color function"""
assert hsl_to_rgb(180, 20, 50) == (102, 153, 153)
assert hsl_to_rgb(180, 60, 50) == (51, 204, 204)
assert hsl_to_rgb(180, 100, 50) == (0, 255, 255)
def test_rgb_to_hsl_part_10():
"""Test rgb to hsl color function"""
assert rgb_to_hsl(102, 153, 153) == (180, 20, 50)
assert rgb_to_hsl(51, 204, 204) == (180, 60, 50)
assert rgb_to_hsl(0, 255, 255) == (180, 100, 50)
def test_hsl_to_rgb_part_11():
"""Test hsl to rgb color function"""
assert hsl_to_rgb(240, 20, 50) == (102, 102, 153)
assert hsl_to_rgb(240, 60, 50) == (51, 51, 204)
assert hsl_to_rgb(240, 100, 50) == (0, 0, 255)
def test_rgb_to_hsl_part_11():
"""Test rgb to hsl color function"""
assert rgb_to_hsl(102, 102, 153) == (240, 20, 50)
assert rgb_to_hsl(51, 51, 204) == (240, 60, 50)
assert rgb_to_hsl(0, 0, 255) == (240, 100, 50)
def test_hsl_to_rgb_part_12():
"""Test hsl to rgb color function"""
assert hsl_to_rgb(300, 20, 50) == (153, 102, 153)
assert hsl_to_rgb(300, 60, 50) == (204, 51, 204)
assert hsl_to_rgb(300, 100, 50) == (255, 0, 255)
def test_rgb_to_hsl_part_12():
"""Test rgb to hsl color function"""
assert rgb_to_hsl(153, 102, 153) == (300, 20, 50)
assert rgb_to_hsl(204, 51, 204) == (300, 60, 50)
assert rgb_to_hsl(255, 0, 255) == (300, 100, 50)
def test_hsl_to_rgb_part_13():
"""Test hsl to rgb color function"""
assert hsl_to_rgb(0, 100, 0) == (0, 0, 0)
assert hsl_to_rgb(0, 100, 10) == (51, 0, 0)
assert hsl_to_rgb(0, 100, 20) == (102, 0, 0)
assert hsl_to_rgb(0, 100, 30) == (153, 0, 0)
assert hsl_to_rgb(0, 100, 40) == (204, 0, 0)
assert hsl_to_rgb(0, 100, 50) == (255, 0, 0)
assert hsl_to_rgb(0, 100, 60) == (255, 51, 51)
assert hsl_to_rgb(0, 100, 70) == (255, 102, 102)
assert hsl_to_rgb(0, 100, 80) == (255, 153, 153)
assert hsl_to_rgb(0, 100, 90) == (255, 204, 204)
assert hsl_to_rgb(0, 100, 100) == (255, 255, 255)
def test_rgb_to_hsl_part_13():
"""Test rgb to hsl color function"""
assert rgb_to_hsl(0, 0, 0) == (0, 0, 0)
assert rgb_to_hsl(51, 0, 0) == (0, 100, 10)
assert rgb_to_hsl(102, 0, 0) == (0, 100, 20)
assert rgb_to_hsl(153, 0, 0) == (0, 100, 30)
assert rgb_to_hsl(204, 0, 0) == (0, 100, 40)
assert rgb_to_hsl(255, 0, 0) == (0, 100, 50)
assert rgb_to_hsl(255, 51, 51) == (0, 100, 60)
assert rgb_to_hsl(255, 102, 102) == (0, 100, 70)
assert rgb_to_hsl(255, 153, 153) == (0, 100, 80)
assert rgb_to_hsl(255, 204, 204) == (0, 100, 90)
assert rgb_to_hsl(255, 255, 255) == (0, 0, 100)
def test_hsl_to_rgb_part_14():
"""Test hsl to rgb color function"""
assert hsl_to_rgb(60, 100, 0) == (0, 0, 0)
assert hsl_to_rgb(60, 100, 10) == (51, 51, 0)
assert hsl_to_rgb(60, 100, 20) == (102, 102, 0)
assert hsl_to_rgb(60, 100, 30) == (153, 153, 0)
assert hsl_to_rgb(60, 100, 40) == (204, 204, 0)
assert hsl_to_rgb(60, 100, 50) == (255, 255, 0)
assert hsl_to_rgb(60, 100, 60) == (255, 255, 51)
assert hsl_to_rgb(60, 100, 70) == (255, 255, 102)
assert hsl_to_rgb(60, 100, 80) == (255, 255, 153)
assert hsl_to_rgb(60, 100, 90) == (255, 255, 204)
assert hsl_to_rgb(60, 100, 100) == (255, 255, 255)
def test_rgb_to_hsl_part_14():
"""Test rgb to hsl color function"""
# assert rgb_to_hsl(0, 0, 0) == (60, 100, 0)
assert rgb_to_hsl(51, 51, 0) == (60, 100, 10)
assert rgb_to_hsl(102, 102, 0) == (60, 100, 20)
assert rgb_to_hsl(153, 153, 0) == (60, 100, 30)
assert rgb_to_hsl(204, 204, 0) == (60, 100, 40)
assert rgb_to_hsl(255, 255, 0) == (60, 100, 50)
assert rgb_to_hsl(255, 255, 51) == (60, 100, 60)
assert rgb_to_hsl(255, 255, 102) == (60, 100, 70)
assert rgb_to_hsl(255, 255, 153) == (60, 100, 80)
assert rgb_to_hsl(255, 255, 204) == (60, 100, 90)
# assert rgb_to_hsl(255, 255, 255) == (60, 100, 100)
def test_hsl_to_rgb_part_15():
"""Test hsl to rgb color function"""
assert hsl_to_rgb(120, 100, 0) == (0, 0, 0)
assert hsl_to_rgb(120, 100, 10) == (0, 51, 0)
assert hsl_to_rgb(120, 100, 20) == (0, 102, 0)
assert hsl_to_rgb(120, 100, 30) == (0, 153, 0)
assert hsl_to_rgb(120, 100, 40) == (0, 204, 0)
assert hsl_to_rgb(120, 100, 50) == (0, 255, 0)
assert hsl_to_rgb(120, 100, 60) == (51, 255, 51)
assert hsl_to_rgb(120, 100, 70) == (102, 255, 102)
assert hsl_to_rgb(120, 100, 80) == (153, 255, 153)
assert hsl_to_rgb(120, 100, 90) == (204, 255, 204)
assert hsl_to_rgb(120, 100, 100) == (255, 255, 255)
def test_rgb_to_hsl_part_15():
"""Test rgb to hsl color function"""
# assert rgb_to_hsl(0, 0, 0) == (120, 100, 0)
assert rgb_to_hsl(0, 51, 0) == (120, 100, 10)
assert rgb_to_hsl(0, 102, 0) == (120, 100, 20)
assert rgb_to_hsl(0, 153, 0) == (120, 100, 30)
assert rgb_to_hsl(0, 204, 0) == (120, 100, 40)
assert rgb_to_hsl(0, 255, 0) == (120, 100, 50)
assert rgb_to_hsl(51, 255, 51) == (120, 100, 60)
assert rgb_to_hsl(102, 255, 102) == (120, 100, 70)
assert rgb_to_hsl(153, 255, 153) == (120, 100, 80)
assert rgb_to_hsl(204, 255, 204) == (120, 100, 90)
# assert rgb_to_hsl(255, 255, 255) == (120, 100, 100)
def test_hsl_to_rgb_part_16():
"""Test hsl to rgb color function"""
assert hsl_to_rgb(180, 100, 0) == (0, 0, 0)
assert hsl_to_rgb(180, 100, 10) == (0, 51, 51)
assert hsl_to_rgb(180, 100, 20) == (0, 102, 102)
assert hsl_to_rgb(180, 100, 30) == (0, 153, 153)
assert hsl_to_rgb(180, 100, 40) == (0, 204, 204)
assert hsl_to_rgb(180, 100, 50) == (0, 255, 255)
assert hsl_to_rgb(180, 100, 60) == (51, 255, 255)
assert hsl_to_rgb(180, 100, 70) == (102, 255, 255)
assert hsl_to_rgb(180, 100, 80) == (153, 255, 255)
assert hsl_to_rgb(180, 100, 90) == (204, 255, 255)
assert hsl_to_rgb(180, 100, 100) == (255, 255, 255)
def test_rgb_to_hsl_part_16():
"""Test rgb to hsl color function"""
# assert rgb_to_hsl(0, 0, 0) == (180, 100, 0)
assert rgb_to_hsl(0, 51, 51) == (180, 100, 10)
assert rgb_to_hsl(0, 102, 102) == (180, 100, 20)
assert rgb_to_hsl(0, 153, 153) == (180, 100, 30)
assert rgb_to_hsl(0, 204, 204) == (180, 100, 40)
assert rgb_to_hsl(0, 255, 255) == (180, 100, 50)
assert rgb_to_hsl(51, 255, 255) == (180, 100, 60)
assert rgb_to_hsl(102, 255, 255) == (180, 100, 70)
assert rgb_to_hsl(153, 255, 255) == (180, 100, 80)
assert rgb_to_hsl(204, 255, 255) == (180, 100, 90)
# assert rgb_to_hsl(255, 255, 255) == (180, 100, 100)
def test_hsl_to_rgb_part_17():
"""Test hsl to rgb color function"""
assert hsl_to_rgb(240, 100, 0) == (0, 0, 0)
assert hsl_to_rgb(240, 100, 10) == (0, 0, 51)
assert hsl_to_rgb(240, 100, 20) == (0, 0, 102)
assert hsl_to_rgb(240, 100, 30) == (0, 0, 153)
assert hsl_to_rgb(240, 100, 40) == (0, 0, 204)
assert hsl_to_rgb(240, 100, 50) == (0, 0, 255)
assert hsl_to_rgb(240, 100, 60) == (51, 51, 255)
assert hsl_to_rgb(240, 100, 70) == (102, 102, 255)
assert hsl_to_rgb(240, 100, 80) == (153, 153, 255)
assert hsl_to_rgb(240, 100, 90) == (204, 204, 255)
assert hsl_to_rgb(240, 100, 100) == (255, 255, 255)
def test_rgb_to_hsl_part_17():
"""Test rgb to hsl color function"""
# assert rgb_to_hsl(0, 0, 0) == (240, 100, 0)
assert rgb_to_hsl(0, 0, 51) == (240, 100, 10)
assert rgb_to_hsl(0, 0, 102) == (240, 100, 20)
assert rgb_to_hsl(0, 0, 153) == (240, 100, 30)
assert rgb_to_hsl(0, 0, 204) == (240, 100, 40)
assert rgb_to_hsl(0, 0, 255) == (240, 100, 50)
assert rgb_to_hsl(51, 51, 255) == (240, 100, 60)
assert rgb_to_hsl(102, 102, 255) == (240, 100, 70)
assert rgb_to_hsl(153, 153, 255) == (240, 100, 80)
assert rgb_to_hsl(204, 204, 255) == (240, 100, 90)
# assert rgb_to_hsl(255, 255, 255) == (240, 100, 100)
def test_hsl_to_rgb_part_18():
"""Test hsl to rgb color function"""
assert hsl_to_rgb(300, 100, 0) == (0, 0, 0)
assert hsl_to_rgb(300, 100, 10) == (51, 0, 51)
assert hsl_to_rgb(300, 100, 20) == (102, 0, 102)
assert hsl_to_rgb(300, 100, 30) == (153, 0, 153)
assert hsl_to_rgb(300, 100, 40) == (204, 0, 204)
assert hsl_to_rgb(300, 100, 50) == (255, 0, 255)
assert hsl_to_rgb(300, 100, 60) == (255, 51, 255)
assert hsl_to_rgb(300, 100, 70) == (255, 102, 255)
assert hsl_to_rgb(300, 100, 80) == (255, 153, 255)
assert hsl_to_rgb(300, 100, 90) == (255, 204, 255)
assert hsl_to_rgb(300, 100, 100) == (255, 255, 255)
def test_rgb_to_hsl_part_18():
"""Test rgb to hsl color function"""
# assert rgb_to_hsl(0, 0, 0) == (300, 100, 0)
assert rgb_to_hsl(51, 0, 51) == (300, 100, 10)
assert rgb_to_hsl(102, 0, 102) == (300, 100, 20)
assert rgb_to_hsl(153, 0, 153) == (300, 100, 30)
assert rgb_to_hsl(204, 0, 204) == (300, 100, 40)
assert rgb_to_hsl(255, 0, 255) == (300, 100, 50)
assert rgb_to_hsl(255, 51, 255) == (300, 100, 60)
assert rgb_to_hsl(255, 102, 255) == (300, 100, 70)
assert rgb_to_hsl(255, 153, 255) == (300, 100, 80)
assert rgb_to_hsl(255, 204, 255) == (300, 100, 90)
# assert rgb_to_hsl(255, 255, 255) == (300, 100, 100)
|
from django.utils.translation import gettext_lazy as _
from weblate.wladmin.models import WeblateModelAdmin
class PlanAdmin(WeblateModelAdmin):
list_display = (
"name",
"price",
"limit_strings",
"limit_languages",
"limit_projects",
"display_limit_strings",
"display_limit_languages",
"display_limit_projects",
)
ordering = ["price"]
prepopulated_fields = {"slug": ("name",)}
def format_user(obj):
return f"{obj.username}: {obj.full_name} <{obj.email}>"
class BillingAdmin(WeblateModelAdmin):
list_display = (
"list_projects",
"list_owners",
"plan",
"state",
"removal",
"expiry",
"monthly_changes",
"total_changes",
"unit_count",
"display_projects",
"display_strings",
"display_words",
"display_languages",
"in_limits",
"in_display_limits",
"paid",
"last_invoice",
)
list_filter = ("plan", "state", "paid", "in_limits")
search_fields = ("projects__name", "owners__email")
filter_horizontal = ("projects", "owners")
def get_queryset(self, request):
return super().get_queryset(request).prefetch_related("projects", "owners")
def list_projects(self, obj):
if not obj.all_projects:
return "none projects associated"
return ",".join([project.name for project in obj.all_projects])
list_projects.short_description = _("Projects")
def list_owners(self, obj):
return ",".join([owner.full_name for owner in obj.owners.all()])
list_owners.short_description = _("Owners")
def get_form(self, request, obj=None, **kwargs):
form = super().get_form(request, obj, **kwargs)
form.base_fields["owners"].label_from_instance = format_user
return form
def save_related(self, request, form, formsets, change):
super().save_related(request, form, formsets, change)
obj = form.instance
# Add owners as admin if there is none
for project in obj.projects.all():
group = project.get_group("@Administration")
if not group.user_set.exists():
group.user_set.add(*obj.owners.all())
class InvoiceAdmin(WeblateModelAdmin):
list_display = ("billing", "start", "end", "amount", "currency", "ref")
list_filter = ("currency", "billing")
search_fields = ("billing__projects__name", "ref", "note")
date_hierarchy = "end"
ordering = ["billing", "-start"]
|
import logging
from homeassistant.components.media_player import BrowseError, BrowseMedia
from homeassistant.components.media_player.const import (
MEDIA_CLASS_ALBUM,
MEDIA_CLASS_ARTIST,
MEDIA_CLASS_CHANNEL,
MEDIA_CLASS_DIRECTORY,
MEDIA_CLASS_EPISODE,
MEDIA_CLASS_MOVIE,
MEDIA_CLASS_MUSIC,
MEDIA_CLASS_PLAYLIST,
MEDIA_CLASS_SEASON,
MEDIA_CLASS_TRACK,
MEDIA_CLASS_TV_SHOW,
MEDIA_TYPE_ALBUM,
MEDIA_TYPE_ARTIST,
MEDIA_TYPE_CHANNEL,
MEDIA_TYPE_EPISODE,
MEDIA_TYPE_MOVIE,
MEDIA_TYPE_PLAYLIST,
MEDIA_TYPE_SEASON,
MEDIA_TYPE_TRACK,
MEDIA_TYPE_TVSHOW,
)
PLAYABLE_MEDIA_TYPES = [
MEDIA_TYPE_ALBUM,
MEDIA_TYPE_ARTIST,
MEDIA_TYPE_TRACK,
]
CONTAINER_TYPES_SPECIFIC_MEDIA_CLASS = {
MEDIA_TYPE_ALBUM: MEDIA_CLASS_ALBUM,
MEDIA_TYPE_ARTIST: MEDIA_CLASS_ARTIST,
MEDIA_TYPE_PLAYLIST: MEDIA_CLASS_PLAYLIST,
MEDIA_TYPE_SEASON: MEDIA_CLASS_SEASON,
MEDIA_TYPE_TVSHOW: MEDIA_CLASS_TV_SHOW,
}
CHILD_TYPE_MEDIA_CLASS = {
MEDIA_TYPE_SEASON: MEDIA_CLASS_SEASON,
MEDIA_TYPE_ALBUM: MEDIA_CLASS_ALBUM,
MEDIA_TYPE_ARTIST: MEDIA_CLASS_ARTIST,
MEDIA_TYPE_MOVIE: MEDIA_CLASS_MOVIE,
MEDIA_TYPE_PLAYLIST: MEDIA_CLASS_PLAYLIST,
MEDIA_TYPE_TRACK: MEDIA_CLASS_TRACK,
MEDIA_TYPE_TVSHOW: MEDIA_CLASS_TV_SHOW,
MEDIA_TYPE_CHANNEL: MEDIA_CLASS_CHANNEL,
MEDIA_TYPE_EPISODE: MEDIA_CLASS_EPISODE,
}
_LOGGER = logging.getLogger(__name__)
class UnknownMediaType(BrowseError):
"""Unknown media type."""
async def build_item_response(media_library, payload):
"""Create response payload for the provided media query."""
search_id = payload["search_id"]
search_type = payload["search_type"]
thumbnail = None
title = None
media = None
properties = ["thumbnail"]
if search_type == MEDIA_TYPE_ALBUM:
if search_id:
album = await media_library.get_album_details(
album_id=int(search_id), properties=properties
)
thumbnail = media_library.thumbnail_url(
album["albumdetails"].get("thumbnail")
)
title = album["albumdetails"]["label"]
media = await media_library.get_songs(
album_id=int(search_id),
properties=[
"albumid",
"artist",
"duration",
"album",
"thumbnail",
"track",
],
)
media = media.get("songs")
else:
media = await media_library.get_albums(properties=properties)
media = media.get("albums")
title = "Albums"
elif search_type == MEDIA_TYPE_ARTIST:
if search_id:
media = await media_library.get_albums(
artist_id=int(search_id), properties=properties
)
media = media.get("albums")
artist = await media_library.get_artist_details(
artist_id=int(search_id), properties=properties
)
thumbnail = media_library.thumbnail_url(
artist["artistdetails"].get("thumbnail")
)
title = artist["artistdetails"]["label"]
else:
media = await media_library.get_artists(properties)
media = media.get("artists")
title = "Artists"
elif search_type == "library_music":
library = {MEDIA_TYPE_ALBUM: "Albums", MEDIA_TYPE_ARTIST: "Artists"}
media = [{"label": name, "type": type_} for type_, name in library.items()]
title = "Music Library"
elif search_type == MEDIA_TYPE_MOVIE:
media = await media_library.get_movies(properties)
media = media.get("movies")
title = "Movies"
elif search_type == MEDIA_TYPE_TVSHOW:
if search_id:
media = await media_library.get_seasons(
tv_show_id=int(search_id),
properties=["thumbnail", "season", "tvshowid"],
)
media = media.get("seasons")
tvshow = await media_library.get_tv_show_details(
tv_show_id=int(search_id), properties=properties
)
thumbnail = media_library.thumbnail_url(
tvshow["tvshowdetails"].get("thumbnail")
)
title = tvshow["tvshowdetails"]["label"]
else:
media = await media_library.get_tv_shows(properties)
media = media.get("tvshows")
title = "TV Shows"
elif search_type == MEDIA_TYPE_SEASON:
tv_show_id, season_id = search_id.split("/", 1)
media = await media_library.get_episodes(
tv_show_id=int(tv_show_id),
season_id=int(season_id),
properties=["thumbnail", "tvshowid", "seasonid"],
)
media = media.get("episodes")
if media:
season = await media_library.get_season_details(
season_id=int(media[0]["seasonid"]), properties=properties
)
thumbnail = media_library.thumbnail_url(
season["seasondetails"].get("thumbnail")
)
title = season["seasondetails"]["label"]
elif search_type == MEDIA_TYPE_CHANNEL:
media = await media_library.get_channels(
channel_group_id="alltv",
properties=["thumbnail", "channeltype", "channel", "broadcastnow"],
)
media = media.get("channels")
title = "Channels"
if media is None:
return None
children = []
for item in media:
try:
children.append(item_payload(item, media_library))
except UnknownMediaType:
pass
if search_type in (MEDIA_TYPE_TVSHOW, MEDIA_TYPE_MOVIE) and search_id == "":
children.sort(key=lambda x: x.title.replace("The ", "", 1), reverse=False)
response = BrowseMedia(
media_class=CONTAINER_TYPES_SPECIFIC_MEDIA_CLASS.get(
search_type, MEDIA_CLASS_DIRECTORY
),
media_content_id=search_id,
media_content_type=search_type,
title=title,
can_play=search_type in PLAYABLE_MEDIA_TYPES and search_id,
can_expand=True,
children=children,
thumbnail=thumbnail,
)
if search_type == "library_music":
response.children_media_class = MEDIA_CLASS_MUSIC
else:
response.calculate_children_class()
return response
def item_payload(item, media_library):
"""
Create response payload for a single media item.
Used by async_browse_media.
"""
title = item["label"]
thumbnail = item.get("thumbnail")
if thumbnail:
thumbnail = media_library.thumbnail_url(thumbnail)
media_class = None
if "songid" in item:
media_content_type = MEDIA_TYPE_TRACK
media_content_id = f"{item['songid']}"
can_play = True
can_expand = False
elif "albumid" in item:
media_content_type = MEDIA_TYPE_ALBUM
media_content_id = f"{item['albumid']}"
can_play = True
can_expand = True
elif "artistid" in item:
media_content_type = MEDIA_TYPE_ARTIST
media_content_id = f"{item['artistid']}"
can_play = True
can_expand = True
elif "movieid" in item:
media_content_type = MEDIA_TYPE_MOVIE
media_content_id = f"{item['movieid']}"
can_play = True
can_expand = False
elif "episodeid" in item:
media_content_type = MEDIA_TYPE_EPISODE
media_content_id = f"{item['episodeid']}"
can_play = True
can_expand = False
elif "seasonid" in item:
media_content_type = MEDIA_TYPE_SEASON
media_content_id = f"{item['tvshowid']}/{item['season']}"
can_play = False
can_expand = True
elif "tvshowid" in item:
media_content_type = MEDIA_TYPE_TVSHOW
media_content_id = f"{item['tvshowid']}"
can_play = False
can_expand = True
elif "channelid" in item:
media_content_type = MEDIA_TYPE_CHANNEL
media_content_id = f"{item['channelid']}"
broadcasting = item.get("broadcastnow")
if broadcasting:
show = broadcasting.get("title")
title = f"{title} - {show}"
can_play = True
can_expand = False
else:
# this case is for the top folder of each type
# possible content types: album, artist, movie, library_music, tvshow, channel
media_class = MEDIA_CLASS_DIRECTORY
media_content_type = item["type"]
media_content_id = ""
can_play = False
can_expand = True
if media_class is None:
try:
media_class = CHILD_TYPE_MEDIA_CLASS[media_content_type]
except KeyError as err:
_LOGGER.debug("Unknown media type received: %s", media_content_type)
raise UnknownMediaType from err
return BrowseMedia(
title=title,
media_class=media_class,
media_content_type=media_content_type,
media_content_id=media_content_id,
can_play=can_play,
can_expand=can_expand,
thumbnail=thumbnail,
)
def library_payload(media_library):
"""
Create response payload to describe contents of a specific library.
Used by async_browse_media.
"""
library_info = BrowseMedia(
media_class=MEDIA_CLASS_DIRECTORY,
media_content_id="library",
media_content_type="library",
title="Media Library",
can_play=False,
can_expand=True,
children=[],
)
library = {
"library_music": "Music",
MEDIA_TYPE_MOVIE: "Movies",
MEDIA_TYPE_TVSHOW: "TV shows",
MEDIA_TYPE_CHANNEL: "Channels",
}
for item in [{"label": name, "type": type_} for type_, name in library.items()]:
library_info.children.append(
item_payload(
{"label": item["label"], "type": item["type"], "uri": item["type"]},
media_library,
)
)
return library_info
|
from __future__ import division
import unittest
import numpy as np
from chainer import testing
from chainercv.links.model.faster_rcnn import generate_anchor_base
class TestGenerateAnchorBase(unittest.TestCase):
def test_generaete_anchor_base(self):
gt = np.array(
[[-24., -120., 40., 136.],
[-56., -248., 72., 264.],
[-120., -504., 136., 520.],
[-56., -56., 72., 72.],
[-120., -120., 136., 136.],
[-248., -248., 264., 264.],
[-120., -24., 136., 40.],
[-248., -56., 264., 72.],
[-504., -120., 520., 136.]])
base_size = 16
anchor_scales = [8, 16, 32]
ratios = [0.25, 1, 4]
out = generate_anchor_base(base_size=base_size,
anchor_scales=anchor_scales,
ratios=ratios)
np.testing.assert_equal(gt, out)
testing.run_module(__name__, __file__)
|
import subprocess
import sys
from flask_script import Manager, Command, Server as _Server, Option
from app import SQLAlchemyDB as db, socketio, app, __version__
import os
# import shutil
manager = Manager(app)
class Server(_Server):
help = description = 'Runs the Git-WebHook web server'
def get_options(self):
options = (
Option('-h', '--host',
dest='host',
default='0.0.0.0'),
Option('-p', '--port',
dest='port',
type=int,
default=18340),
Option('-d', '--debug',
action='store_true',
dest='use_debugger',
help=('enable the Werkzeug debugger (DO NOT use in '
'production code)'),
default=self.use_debugger),
Option('-D', '--no-debug',
action='store_false',
dest='use_debugger',
help='disable the Werkzeug debugger',
default=self.use_debugger),
Option('-r', '--reload',
action='store_true',
dest='use_reloader',
help=('monitor Python files for changes (not 100%% safe '
'for production use)'),
default=self.use_reloader),
Option('-R', '--no-reload',
action='store_false',
dest='use_reloader',
help='do not monitor Python files for changes',
default=self.use_reloader),
)
return options
def __call__(self, app, host, port, use_debugger, use_reloader):
# override the default runserver command to start a Socket.IO server
if use_debugger is None:
use_debugger = app.debug
if use_debugger is None:
use_debugger = True
if use_reloader is None:
use_reloader = app.debug
import eventlet
# monkey_patch
eventlet.monkey_patch()
socketio.run(app,
host=host,
port=port,
debug=use_debugger,
use_reloader=use_reloader,
**self.server_options)
manager.add_command("runserver", Server())
class CeleryWorker(Command):
"""Starts the celery worker."""
name = 'celery'
capture_all_args = True
def run(self, argv):
cmd = ['celery', '-A', 'app.celeryInstance', 'worker'] + argv
ret = subprocess.call(cmd)
sys.exit(ret)
manager.add_command("celery", CeleryWorker())
CONFIG_TEMP = """# -*- coding: utf-8 -*-
'''
Created on 2016-10-20
@author: hustcc
'''
# for sqlite
DATABASE_URI = 'sqlite:///git_webhook.db'
# for mysql
# DATABASE_URI = 'mysql+pymysql://dev:[email protected]/git_webhook'
CELERY_BROKER_URL = 'redis://:[email protected]:6379/0'
CELERY_RESULT_BACKEND = 'redis://:[email protected]:6379/0'
SOCKET_MESSAGE_QUEUE = 'redis://:[email protected]:6379/0'
GITHUB_CLIENT_ID = 'b6e751cc48d664240467'
GITHUB_CLIENT_SECRET = '6a9e0cbeee1bf89a1e1a25958f35b9dc6b36c996'
"""
class Config(Command):
"""Generates new configuration file into user Home dir."""
name = 'config'
capture_all_args = True
def run(self, argv):
dir = os.path.join(os.path.expanduser('~'), '.git-webhook')
if not os.path.exists(dir):
os.makedirs(dir)
if os.path.isdir(dir):
path = os.path.join(dir, 'git_webhook_config.py')
if os.path.exists(path):
print('Fail: the configuration file exist in `%s`.' % path)
else:
# shutil.copy('app/config_example.py', path)
with open(path, 'w') as f:
f.write(CONFIG_TEMP)
print('OK: init configuration file into `%s`.' % path)
else:
print('Fail: %s should be directory.' % dir)
manager.add_command("config", Config())
@manager.command
def createdb(drop_first=False):
"""Creates the database."""
if drop_first:
db.drop_all()
db.create_all()
print('OK: database is initialed.')
@manager.command
def lint():
"""Runs code linter."""
lint = subprocess.call(['flake8']) == 0
if lint:
print('OK')
sys.exit(lint)
@manager.command
def version():
"Shows the version"
print __version__
# script entry
def run():
manager.run()
if __name__ == '__main__':
manager.run()
|
import numpy as np
import unittest
from chainer.backends import cuda
from chainer import testing
from chainer.testing import attr
from chainercv.links.model.light_head_rcnn import LightHeadRCNNTrainChain
from chainercv.utils import generate_random_bbox
from tests.links_tests.model_tests.light_head_rcnn_tests.test_light_head_rcnn \
import DummyLightHeadRCNN
def _random_array(shape):
return np.array(
np.random.uniform(-1, 1, size=shape), dtype=np.float32)
class TestLightHeadRCNNTrainChain(unittest.TestCase):
def setUp(self):
self.n_anchor_base = 6
self.feat_stride = 4
self.n_fg_class = 3
self.n_roi = 24
self.n_bbox = 3
self.model = LightHeadRCNNTrainChain(
DummyLightHeadRCNN(
n_anchor_base=self.n_anchor_base,
feat_stride=self.feat_stride,
n_fg_class=self.n_fg_class,
n_roi=self.n_roi,
min_size=600,
max_size=800,
loc_normalize_mean=(0., 0., 0., 0.),
loc_normalize_std=(0.1, 0.1, 0.2, 0.2),))
self.bboxes = generate_random_bbox(
self.n_bbox, (600, 800), 16, 350)[np.newaxis]
self.labels = np.random.randint(
0, self.n_fg_class, size=(1, self.n_bbox)).astype(np.int32)
self.imgs = _random_array((1, 3, 600, 800))
self.scales = np.array([1.])
def check_call(self, model, imgs, bboxes, labels, scales):
loss = self.model(imgs, bboxes, labels, scales)
self.assertEqual(loss.shape, ())
def test_call_cpu(self):
self.check_call(
self.model, self.imgs, self.bboxes, self.labels, self.scales)
@attr.gpu
def test_call_gpu(self):
self.model.to_gpu()
self.check_call(
self.model, cuda.to_gpu(self.imgs),
self.bboxes, self.labels, self.scales)
testing.run_module(__name__, __file__)
|
import asyncio
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.core import HomeAssistant
from .const import DOMAIN
PLATFORMS = ["light"]
async def async_setup(hass, config):
"""Set up the Zerproc platform."""
hass.async_create_task(
hass.config_entries.flow.async_init(DOMAIN, context={"source": SOURCE_IMPORT})
)
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Set up Zerproc from a config entry."""
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
return all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
|
import shutil
import pandas as pd
import pytest
from matchzoo import DataPack, load_data_pack
@pytest.fixture
def data_pack():
relation = [['qid0', 'did0', 1], ['qid1', 'did1', 0]]
left = [['qid0', [1, 2]], ['qid1', [2, 3]]]
right = [['did0', [2, 3, 4]], ['did1', [3, 4, 5]]]
relation = pd.DataFrame(relation, columns=['id_left', 'id_right', 'label'])
left = pd.DataFrame(left, columns=['id_left', 'text_left'])
left.set_index('id_left', inplace=True)
right = pd.DataFrame(right, columns=['id_right', 'text_right'])
right.set_index('id_right', inplace=True)
return DataPack(relation=relation,
left=left,
right=right)
def test_length(data_pack):
num_examples = 2
assert len(data_pack) == num_examples
def test_getter(data_pack):
assert data_pack.relation.iloc[0].values.tolist() == ['qid0', 'did0', 1]
assert data_pack.relation.iloc[1].values.tolist() == ['qid1', 'did1', 0]
assert data_pack.left.loc['qid0', 'text_left'] == [1, 2]
assert data_pack.right.loc['did1', 'text_right'] == [3, 4, 5]
def test_save_load(data_pack):
dirpath = '.tmpdir'
data_pack.save(dirpath)
dp = load_data_pack(dirpath)
with pytest.raises(FileExistsError):
data_pack.save(dirpath)
assert len(data_pack) == 2
assert len(dp) == 2
shutil.rmtree(dirpath)
|
import pytest
import arctic.serialization.numpy_records as anr
from tests.unit.serialization.serialization_test_data import _mixed_test_data as input_test_data
df_serializer = anr.DataFrameSerializer()
@pytest.mark.parametrize("input_df", input_test_data().keys())
def test_dataframe_confirm_fast_check_compatibility(input_df):
orig_config = anr.FAST_CHECK_DF_SERIALIZABLE
try:
input_df = input_test_data()[input_df][0]
anr.set_fast_check_df_serializable(True)
with_fast_check = df_serializer.can_convert_to_records_without_objects(input_df, 'symA')
anr.set_fast_check_df_serializable(False)
without_fast_check = df_serializer.can_convert_to_records_without_objects(input_df, 'symA')
assert with_fast_check == without_fast_check
finally:
anr.FAST_CHECK_DF_SERIALIZABLE = orig_config
|
from homeassistant.components.advantage_air.const import DOMAIN
from homeassistant.const import CONF_IP_ADDRESS, CONF_PORT
from tests.common import MockConfigEntry, load_fixture
TEST_SYSTEM_DATA = load_fixture("advantage_air/getSystemData.json")
TEST_SET_RESPONSE = load_fixture("advantage_air/setAircon.json")
USER_INPUT = {
CONF_IP_ADDRESS: "1.2.3.4",
CONF_PORT: 2025,
}
TEST_SYSTEM_URL = (
f"http://{USER_INPUT[CONF_IP_ADDRESS]}:{USER_INPUT[CONF_PORT]}/getSystemData"
)
TEST_SET_URL = f"http://{USER_INPUT[CONF_IP_ADDRESS]}:{USER_INPUT[CONF_PORT]}/setAircon"
async def add_mock_config(hass):
"""Create a fake Advantage Air Config Entry."""
entry = MockConfigEntry(
domain=DOMAIN,
title="test entry",
unique_id="0123456",
data=USER_INPUT,
)
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
return entry
|
import logging
import attr
import tp_connected
from homeassistant.components.notify import ATTR_TARGET, BaseNotificationService
from homeassistant.const import CONF_RECIPIENT
from . import DATA_KEY
_LOGGER = logging.getLogger(__name__)
async def async_get_service(hass, config, discovery_info=None):
"""Get the notification service."""
if discovery_info is None:
return
return TplinkNotifyService(hass, discovery_info)
@attr.s
class TplinkNotifyService(BaseNotificationService):
"""Implementation of a notification service."""
hass = attr.ib()
config = attr.ib()
async def async_send_message(self, message="", **kwargs):
"""Send a message to a user."""
modem_data = self.hass.data[DATA_KEY].get_modem_data(self.config)
if not modem_data:
_LOGGER.error("No modem available")
return
phone = self.config[CONF_RECIPIENT]
targets = kwargs.get(ATTR_TARGET, phone)
if targets and message:
for target in targets:
try:
await modem_data.modem.sms(target, message)
except tp_connected.Error:
_LOGGER.error("Unable to send to %s", target)
|
import voluptuous as vol
from homeassistant.const import CONF_HOST
from homeassistant.helpers import discovery
import homeassistant.helpers.config_validation as cv
DOMAIN = "mycroft"
CONFIG_SCHEMA = vol.Schema(
{DOMAIN: vol.Schema({vol.Required(CONF_HOST): cv.string})}, extra=vol.ALLOW_EXTRA
)
def setup(hass, config):
"""Set up the Mycroft component."""
hass.data[DOMAIN] = config[DOMAIN][CONF_HOST]
discovery.load_platform(hass, "notify", DOMAIN, {}, config)
return True
|
import asyncio
import functools
from functools import partial
import logging
import plexapi.exceptions
from plexapi.gdm import GDM
from plexwebsocket import (
SIGNAL_CONNECTION_STATE,
SIGNAL_DATA,
STATE_CONNECTED,
STATE_DISCONNECTED,
STATE_STOPPED,
PlexWebsocket,
)
import requests.exceptions
import voluptuous as vol
from homeassistant.components.media_player import DOMAIN as MP_DOMAIN
from homeassistant.components.media_player.const import (
ATTR_MEDIA_CONTENT_ID,
ATTR_MEDIA_CONTENT_TYPE,
)
from homeassistant.config_entries import ENTRY_STATE_SETUP_RETRY, SOURCE_REAUTH
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_SOURCE,
CONF_URL,
CONF_VERIFY_SSL,
EVENT_HOMEASSISTANT_STOP,
)
from homeassistant.core import callback
from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.debounce import Debouncer
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from .const import (
CONF_SERVER,
CONF_SERVER_IDENTIFIER,
DISPATCHERS,
DOMAIN as PLEX_DOMAIN,
GDM_DEBOUNCER,
GDM_SCANNER,
PLATFORMS,
PLATFORMS_COMPLETED,
PLEX_SERVER_CONFIG,
PLEX_UPDATE_PLATFORMS_SIGNAL,
SERVERS,
SERVICE_PLAY_ON_SONOS,
WEBSOCKETS,
)
from .errors import ShouldUpdateConfigEntry
from .server import PlexServer
from .services import async_setup_services, lookup_plex_media
_LOGGER = logging.getLogger(__package__)
async def async_setup(hass, config):
"""Set up the Plex component."""
hass.data.setdefault(
PLEX_DOMAIN,
{SERVERS: {}, DISPATCHERS: {}, WEBSOCKETS: {}, PLATFORMS_COMPLETED: {}},
)
await async_setup_services(hass)
gdm = hass.data[PLEX_DOMAIN][GDM_SCANNER] = GDM()
hass.data[PLEX_DOMAIN][GDM_DEBOUNCER] = Debouncer(
hass,
_LOGGER,
cooldown=10,
immediate=True,
function=partial(gdm.scan, scan_for_clients=True),
).async_call
return True
async def async_setup_entry(hass, entry):
"""Set up Plex from a config entry."""
server_config = entry.data[PLEX_SERVER_CONFIG]
if entry.unique_id is None:
hass.config_entries.async_update_entry(
entry, unique_id=entry.data[CONF_SERVER_IDENTIFIER]
)
if MP_DOMAIN not in entry.options:
options = dict(entry.options)
options.setdefault(MP_DOMAIN, {})
hass.config_entries.async_update_entry(entry, options=options)
plex_server = PlexServer(
hass,
server_config,
entry.data[CONF_SERVER_IDENTIFIER],
entry.options,
entry.entry_id,
)
try:
await hass.async_add_executor_job(plex_server.connect)
except ShouldUpdateConfigEntry:
new_server_data = {
**entry.data[PLEX_SERVER_CONFIG],
CONF_URL: plex_server.url_in_use,
CONF_SERVER: plex_server.friendly_name,
}
hass.config_entries.async_update_entry(
entry, data={**entry.data, PLEX_SERVER_CONFIG: new_server_data}
)
except requests.exceptions.ConnectionError as error:
if entry.state != ENTRY_STATE_SETUP_RETRY:
_LOGGER.error(
"Plex server (%s) could not be reached: [%s]",
server_config[CONF_URL],
error,
)
raise ConfigEntryNotReady from error
except plexapi.exceptions.Unauthorized:
hass.async_create_task(
hass.config_entries.flow.async_init(
PLEX_DOMAIN,
context={CONF_SOURCE: SOURCE_REAUTH},
data=entry.data,
)
)
_LOGGER.error(
"Token not accepted, please reauthenticate Plex server '%s'",
entry.data[CONF_SERVER],
)
return False
except (
plexapi.exceptions.BadRequest,
plexapi.exceptions.NotFound,
) as error:
_LOGGER.error(
"Login to %s failed, verify token and SSL settings: [%s]",
entry.data[CONF_SERVER],
error,
)
return False
_LOGGER.debug(
"Connected to: %s (%s)", plex_server.friendly_name, plex_server.url_in_use
)
server_id = plex_server.machine_identifier
hass.data[PLEX_DOMAIN][SERVERS][server_id] = plex_server
hass.data[PLEX_DOMAIN][PLATFORMS_COMPLETED][server_id] = set()
entry.add_update_listener(async_options_updated)
async def async_update_plex():
await hass.data[PLEX_DOMAIN][GDM_DEBOUNCER]()
await plex_server.async_update_platforms()
unsub = async_dispatcher_connect(
hass,
PLEX_UPDATE_PLATFORMS_SIGNAL.format(server_id),
async_update_plex,
)
hass.data[PLEX_DOMAIN][DISPATCHERS].setdefault(server_id, [])
hass.data[PLEX_DOMAIN][DISPATCHERS][server_id].append(unsub)
@callback
def plex_websocket_callback(signal, data, error):
"""Handle callbacks from plexwebsocket library."""
if signal == SIGNAL_CONNECTION_STATE:
if data == STATE_CONNECTED:
_LOGGER.debug("Websocket to %s successful", entry.data[CONF_SERVER])
elif data == STATE_DISCONNECTED:
_LOGGER.debug(
"Websocket to %s disconnected, retrying", entry.data[CONF_SERVER]
)
# Stopped websockets without errors are expected during shutdown and ignored
elif data == STATE_STOPPED and error:
_LOGGER.error(
"Websocket to %s failed, aborting [Error: %s]",
entry.data[CONF_SERVER],
error,
)
hass.async_create_task(hass.config_entries.async_reload(entry.entry_id))
elif signal == SIGNAL_DATA:
async_dispatcher_send(hass, PLEX_UPDATE_PLATFORMS_SIGNAL.format(server_id))
session = async_get_clientsession(hass)
verify_ssl = server_config.get(CONF_VERIFY_SSL)
websocket = PlexWebsocket(
plex_server.plex_server,
plex_websocket_callback,
session=session,
verify_ssl=verify_ssl,
)
hass.data[PLEX_DOMAIN][WEBSOCKETS][server_id] = websocket
def start_websocket_session(platform, _):
hass.data[PLEX_DOMAIN][PLATFORMS_COMPLETED][server_id].add(platform)
if hass.data[PLEX_DOMAIN][PLATFORMS_COMPLETED][server_id] == PLATFORMS:
hass.loop.create_task(websocket.listen())
def close_websocket_session(_):
websocket.close()
unsub = hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_STOP, close_websocket_session
)
hass.data[PLEX_DOMAIN][DISPATCHERS][server_id].append(unsub)
for platform in PLATFORMS:
task = hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, platform)
)
task.add_done_callback(functools.partial(start_websocket_session, platform))
async def async_play_on_sonos_service(service_call):
await hass.async_add_executor_job(play_on_sonos, hass, service_call)
play_on_sonos_schema = vol.Schema(
{
vol.Required(ATTR_ENTITY_ID): cv.entity_id,
vol.Required(ATTR_MEDIA_CONTENT_ID): str,
vol.Optional(ATTR_MEDIA_CONTENT_TYPE): vol.In("music"),
}
)
def get_plex_account(plex_server):
try:
return plex_server.account
except (plexapi.exceptions.BadRequest, plexapi.exceptions.Unauthorized):
return None
plex_account = await hass.async_add_executor_job(get_plex_account, plex_server)
if plex_account:
hass.services.async_register(
PLEX_DOMAIN,
SERVICE_PLAY_ON_SONOS,
async_play_on_sonos_service,
schema=play_on_sonos_schema,
)
return True
async def async_unload_entry(hass, entry):
"""Unload a config entry."""
server_id = entry.data[CONF_SERVER_IDENTIFIER]
websocket = hass.data[PLEX_DOMAIN][WEBSOCKETS].pop(server_id)
websocket.close()
dispatchers = hass.data[PLEX_DOMAIN][DISPATCHERS].pop(server_id)
for unsub in dispatchers:
unsub()
tasks = [
hass.config_entries.async_forward_entry_unload(entry, platform)
for platform in PLATFORMS
]
await asyncio.gather(*tasks)
hass.data[PLEX_DOMAIN][SERVERS].pop(server_id)
return True
async def async_options_updated(hass, entry):
"""Triggered by config entry options updates."""
server_id = entry.data[CONF_SERVER_IDENTIFIER]
# Guard incomplete setup during reauth flows
if server_id in hass.data[PLEX_DOMAIN][SERVERS]:
hass.data[PLEX_DOMAIN][SERVERS][server_id].options = entry.options
def play_on_sonos(hass, service_call):
"""Play Plex media on a linked Sonos device."""
entity_id = service_call.data[ATTR_ENTITY_ID]
content_id = service_call.data[ATTR_MEDIA_CONTENT_ID]
content_type = service_call.data.get(ATTR_MEDIA_CONTENT_TYPE)
sonos = hass.components.sonos
try:
sonos_name = sonos.get_coordinator_name(entity_id)
except HomeAssistantError as err:
_LOGGER.error("Cannot get Sonos device: %s", err)
return
media, plex_server = lookup_plex_media(hass, content_type, content_id)
if media is None:
return
sonos_speaker = plex_server.account.sonos_speaker(sonos_name)
if sonos_speaker is None:
_LOGGER.error(
"Sonos speaker '%s' could not be found on this Plex account", sonos_name
)
return
sonos_speaker.playMedia(media)
|
from datetime import datetime
import logging
import time
from nuheat.config import SCHEDULE_HOLD, SCHEDULE_RUN, SCHEDULE_TEMPORARY_HOLD
from nuheat.util import (
celsius_to_nuheat,
fahrenheit_to_nuheat,
nuheat_to_celsius,
nuheat_to_fahrenheit,
)
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
ATTR_HVAC_MODE,
CURRENT_HVAC_HEAT,
CURRENT_HVAC_IDLE,
HVAC_MODE_AUTO,
HVAC_MODE_HEAT,
SUPPORT_PRESET_MODE,
SUPPORT_TARGET_TEMPERATURE,
)
from homeassistant.const import ATTR_TEMPERATURE, TEMP_CELSIUS, TEMP_FAHRENHEIT
from homeassistant.core import callback
from homeassistant.helpers import event as event_helper
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import (
DOMAIN,
MANUFACTURER,
NUHEAT_API_STATE_SHIFT_DELAY,
NUHEAT_DATETIME_FORMAT,
NUHEAT_KEY_HOLD_SET_POINT_DATE_TIME,
NUHEAT_KEY_SCHEDULE_MODE,
NUHEAT_KEY_SET_POINT_TEMP,
TEMP_HOLD_TIME_SEC,
)
_LOGGER = logging.getLogger(__name__)
# The device does not have an off function.
# To turn it off set to min_temp and PRESET_PERMANENT_HOLD
OPERATION_LIST = [HVAC_MODE_AUTO, HVAC_MODE_HEAT]
PRESET_RUN = "Run Schedule"
PRESET_TEMPORARY_HOLD = "Temporary Hold"
PRESET_PERMANENT_HOLD = "Permanent Hold"
PRESET_MODES = [PRESET_RUN, PRESET_TEMPORARY_HOLD, PRESET_PERMANENT_HOLD]
PRESET_MODE_TO_SCHEDULE_MODE_MAP = {
PRESET_RUN: SCHEDULE_RUN,
PRESET_TEMPORARY_HOLD: SCHEDULE_TEMPORARY_HOLD,
PRESET_PERMANENT_HOLD: SCHEDULE_HOLD,
}
SCHEDULE_MODE_TO_PRESET_MODE_MAP = {
value: key for key, value in PRESET_MODE_TO_SCHEDULE_MODE_MAP.items()
}
SUPPORT_FLAGS = SUPPORT_TARGET_TEMPERATURE | SUPPORT_PRESET_MODE
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the NuHeat thermostat(s)."""
thermostat, coordinator = hass.data[DOMAIN][config_entry.entry_id]
temperature_unit = hass.config.units.temperature_unit
entity = NuHeatThermostat(coordinator, thermostat, temperature_unit)
# No longer need a service as set_hvac_mode to auto does this
# since climate 1.0 has been implemented
async_add_entities([entity], True)
class NuHeatThermostat(CoordinatorEntity, ClimateEntity):
"""Representation of a NuHeat Thermostat."""
def __init__(self, coordinator, thermostat, temperature_unit):
"""Initialize the thermostat."""
super().__init__(coordinator)
self._thermostat = thermostat
self._temperature_unit = temperature_unit
self._schedule_mode = None
self._target_temperature = None
@property
def name(self):
"""Return the name of the thermostat."""
return self._thermostat.room
@property
def supported_features(self):
"""Return the list of supported features."""
return SUPPORT_FLAGS
@property
def temperature_unit(self):
"""Return the unit of measurement."""
if self._temperature_unit == "C":
return TEMP_CELSIUS
return TEMP_FAHRENHEIT
@property
def current_temperature(self):
"""Return the current temperature."""
if self._temperature_unit == "C":
return self._thermostat.celsius
return self._thermostat.fahrenheit
@property
def unique_id(self):
"""Return the unique id."""
return self._thermostat.serial_number
@property
def available(self):
"""Return the unique id."""
return self.coordinator.last_update_success and self._thermostat.online
def set_hvac_mode(self, hvac_mode):
"""Set the system mode."""
if hvac_mode == HVAC_MODE_AUTO:
self._set_schedule_mode(SCHEDULE_RUN)
elif hvac_mode == HVAC_MODE_HEAT:
self._set_schedule_mode(SCHEDULE_HOLD)
@property
def hvac_mode(self):
"""Return current setting heat or auto."""
if self._schedule_mode in (SCHEDULE_TEMPORARY_HOLD, SCHEDULE_HOLD):
return HVAC_MODE_HEAT
return HVAC_MODE_AUTO
@property
def hvac_action(self):
"""Return current operation heat or idle."""
return CURRENT_HVAC_HEAT if self._thermostat.heating else CURRENT_HVAC_IDLE
@property
def min_temp(self):
"""Return the minimum supported temperature for the thermostat."""
if self._temperature_unit == "C":
return self._thermostat.min_celsius
return self._thermostat.min_fahrenheit
@property
def max_temp(self):
"""Return the maximum supported temperature for the thermostat."""
if self._temperature_unit == "C":
return self._thermostat.max_celsius
return self._thermostat.max_fahrenheit
@property
def target_temperature(self):
"""Return the currently programmed temperature."""
if self._temperature_unit == "C":
return nuheat_to_celsius(self._target_temperature)
return nuheat_to_fahrenheit(self._target_temperature)
@property
def preset_mode(self):
"""Return current preset mode."""
return SCHEDULE_MODE_TO_PRESET_MODE_MAP.get(self._schedule_mode, PRESET_RUN)
@property
def preset_modes(self):
"""Return available preset modes."""
return PRESET_MODES
@property
def hvac_modes(self):
"""Return list of possible operation modes."""
return OPERATION_LIST
def set_preset_mode(self, preset_mode):
"""Update the hold mode of the thermostat."""
self._set_schedule_mode(
PRESET_MODE_TO_SCHEDULE_MODE_MAP.get(preset_mode, SCHEDULE_RUN)
)
def _set_schedule_mode(self, schedule_mode):
"""Set a schedule mode."""
self._schedule_mode = schedule_mode
# Changing the property here does the actual set
self._thermostat.schedule_mode = schedule_mode
self._schedule_update()
def set_temperature(self, **kwargs):
"""Set a new target temperature."""
self._set_temperature_and_mode(
kwargs.get(ATTR_TEMPERATURE), hvac_mode=kwargs.get(ATTR_HVAC_MODE)
)
def _set_temperature_and_mode(self, temperature, hvac_mode=None, preset_mode=None):
"""Set temperature and hvac mode at the same time."""
if self._temperature_unit == "C":
target_temperature = celsius_to_nuheat(temperature)
else:
target_temperature = fahrenheit_to_nuheat(temperature)
# If they set a temperature without changing the mode
# to heat, we behave like the device does locally
# and set a temp hold.
target_schedule_mode = SCHEDULE_TEMPORARY_HOLD
if preset_mode:
target_schedule_mode = PRESET_MODE_TO_SCHEDULE_MODE_MAP.get(
preset_mode, SCHEDULE_RUN
)
elif self._schedule_mode == SCHEDULE_HOLD or (
hvac_mode and hvac_mode == HVAC_MODE_HEAT
):
target_schedule_mode = SCHEDULE_HOLD
_LOGGER.debug(
"Setting NuHeat thermostat temperature to %s %s and schedule mode: %s",
temperature,
self.temperature_unit,
target_schedule_mode,
)
target_temperature = max(
min(self._thermostat.max_temperature, target_temperature),
self._thermostat.min_temperature,
)
request = {
NUHEAT_KEY_SET_POINT_TEMP: target_temperature,
NUHEAT_KEY_SCHEDULE_MODE: target_schedule_mode,
}
if target_schedule_mode == SCHEDULE_TEMPORARY_HOLD:
request[NUHEAT_KEY_HOLD_SET_POINT_DATE_TIME] = datetime.fromtimestamp(
time.time() + TEMP_HOLD_TIME_SEC
).strftime(NUHEAT_DATETIME_FORMAT)
self._thermostat.set_data(request)
self._schedule_mode = target_schedule_mode
self._target_temperature = target_temperature
self._schedule_update()
def _schedule_update(self):
if not self.hass:
return
# Update the new state
self.schedule_update_ha_state(False)
# nuheat has a delay switching state
# so we schedule a poll of the api
# in the future to make sure the change actually
# took effect
event_helper.call_later(
self.hass, NUHEAT_API_STATE_SHIFT_DELAY, self._forced_refresh
)
async def _forced_refresh(self, *_) -> None:
"""Force a refresh."""
await self.coordinator.async_refresh()
async def async_added_to_hass(self) -> None:
"""When entity is added to hass."""
await super().async_added_to_hass()
self._update_internal_state()
@callback
def _update_internal_state(self):
"""Update our internal state from the last api response."""
self._schedule_mode = self._thermostat.schedule_mode
self._target_temperature = self._thermostat.target_temperature
@callback
def _handle_coordinator_update(self):
"""Get the latest state from the thermostat."""
self._update_internal_state()
self.async_write_ha_state()
@property
def device_info(self):
"""Return the device_info of the device."""
return {
"identifiers": {(DOMAIN, self._thermostat.serial_number)},
"name": self._thermostat.room,
"model": "nVent Signature",
"manufacturer": MANUFACTURER,
}
|
import pytest
from homeassistant.components.alarm_control_panel import DOMAIN as ALARM_DOMAIN
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_FRIENDLY_NAME,
SERVICE_ALARM_ARM_AWAY,
SERVICE_ALARM_ARM_HOME,
SERVICE_ALARM_DISARM,
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_DISARMED,
)
from .common import (
RESPONSE_ARM_FAILURE,
RESPONSE_ARM_SUCCESS,
RESPONSE_ARMED_AWAY,
RESPONSE_ARMED_STAY,
RESPONSE_DISARM_FAILURE,
RESPONSE_DISARM_SUCCESS,
RESPONSE_DISARMED,
setup_platform,
)
from tests.async_mock import patch
ENTITY_ID = "alarm_control_panel.test"
CODE = "-1"
DATA = {ATTR_ENTITY_ID: ENTITY_ID}
async def test_attributes(hass):
"""Test the alarm control panel attributes are correct."""
with patch(
"homeassistant.components.totalconnect.TotalConnectClient.TotalConnectClient.request",
return_value=RESPONSE_DISARMED,
) as mock_request:
await setup_platform(hass, ALARM_DOMAIN)
state = hass.states.get(ENTITY_ID)
assert state.state == STATE_ALARM_DISARMED
mock_request.assert_called_once()
assert state.attributes.get(ATTR_FRIENDLY_NAME) == "test"
async def test_arm_home_success(hass):
"""Test arm home method success."""
responses = [RESPONSE_DISARMED, RESPONSE_ARM_SUCCESS, RESPONSE_ARMED_STAY]
with patch(
"homeassistant.components.totalconnect.TotalConnectClient.TotalConnectClient.request",
side_effect=responses,
):
await setup_platform(hass, ALARM_DOMAIN)
assert STATE_ALARM_DISARMED == hass.states.get(ENTITY_ID).state
await hass.services.async_call(
ALARM_DOMAIN, SERVICE_ALARM_ARM_HOME, DATA, blocking=True
)
await hass.async_block_till_done()
assert STATE_ALARM_ARMED_HOME == hass.states.get(ENTITY_ID).state
async def test_arm_home_failure(hass):
"""Test arm home method failure."""
responses = [RESPONSE_DISARMED, RESPONSE_ARM_FAILURE, RESPONSE_DISARMED]
with patch(
"homeassistant.components.totalconnect.TotalConnectClient.TotalConnectClient.request",
side_effect=responses,
):
await setup_platform(hass, ALARM_DOMAIN)
assert STATE_ALARM_DISARMED == hass.states.get(ENTITY_ID).state
with pytest.raises(Exception) as e:
await hass.services.async_call(
ALARM_DOMAIN, SERVICE_ALARM_ARM_HOME, DATA, blocking=True
)
await hass.async_block_till_done()
assert f"{e.value}" == "TotalConnect failed to arm home test."
assert STATE_ALARM_DISARMED == hass.states.get(ENTITY_ID).state
async def test_arm_away_success(hass):
"""Test arm away method success."""
responses = [RESPONSE_DISARMED, RESPONSE_ARM_SUCCESS, RESPONSE_ARMED_AWAY]
with patch(
"homeassistant.components.totalconnect.TotalConnectClient.TotalConnectClient.request",
side_effect=responses,
):
await setup_platform(hass, ALARM_DOMAIN)
assert STATE_ALARM_DISARMED == hass.states.get(ENTITY_ID).state
await hass.services.async_call(
ALARM_DOMAIN, SERVICE_ALARM_ARM_AWAY, DATA, blocking=True
)
await hass.async_block_till_done()
assert STATE_ALARM_ARMED_AWAY == hass.states.get(ENTITY_ID).state
async def test_arm_away_failure(hass):
"""Test arm away method failure."""
responses = [RESPONSE_DISARMED, RESPONSE_ARM_FAILURE, RESPONSE_DISARMED]
with patch(
"homeassistant.components.totalconnect.TotalConnectClient.TotalConnectClient.request",
side_effect=responses,
):
await setup_platform(hass, ALARM_DOMAIN)
assert STATE_ALARM_DISARMED == hass.states.get(ENTITY_ID).state
with pytest.raises(Exception) as e:
await hass.services.async_call(
ALARM_DOMAIN, SERVICE_ALARM_ARM_AWAY, DATA, blocking=True
)
await hass.async_block_till_done()
assert f"{e.value}" == "TotalConnect failed to arm away test."
assert STATE_ALARM_DISARMED == hass.states.get(ENTITY_ID).state
async def test_disarm_success(hass):
"""Test disarm method success."""
responses = [RESPONSE_ARMED_AWAY, RESPONSE_DISARM_SUCCESS, RESPONSE_DISARMED]
with patch(
"homeassistant.components.totalconnect.TotalConnectClient.TotalConnectClient.request",
side_effect=responses,
):
await setup_platform(hass, ALARM_DOMAIN)
assert STATE_ALARM_ARMED_AWAY == hass.states.get(ENTITY_ID).state
await hass.services.async_call(
ALARM_DOMAIN, SERVICE_ALARM_DISARM, DATA, blocking=True
)
await hass.async_block_till_done()
assert STATE_ALARM_DISARMED == hass.states.get(ENTITY_ID).state
async def test_disarm_failure(hass):
"""Test disarm method failure."""
responses = [RESPONSE_ARMED_AWAY, RESPONSE_DISARM_FAILURE, RESPONSE_ARMED_AWAY]
with patch(
"homeassistant.components.totalconnect.TotalConnectClient.TotalConnectClient.request",
side_effect=responses,
):
await setup_platform(hass, ALARM_DOMAIN)
assert STATE_ALARM_ARMED_AWAY == hass.states.get(ENTITY_ID).state
with pytest.raises(Exception) as e:
await hass.services.async_call(
ALARM_DOMAIN, SERVICE_ALARM_DISARM, DATA, blocking=True
)
await hass.async_block_till_done()
assert f"{e.value}" == "TotalConnect failed to disarm test."
assert STATE_ALARM_ARMED_AWAY == hass.states.get(ENTITY_ID).state
|
import pytest
import voluptuous_serialize
import homeassistant.components.automation as automation
from homeassistant.components.climate import DOMAIN, const, device_action
from homeassistant.helpers import config_validation as cv, device_registry
from homeassistant.setup import async_setup_component
from tests.common import (
MockConfigEntry,
assert_lists_same,
async_get_device_automations,
async_mock_service,
mock_device_registry,
mock_registry,
)
@pytest.fixture
def device_reg(hass):
"""Return an empty, loaded, registry."""
return mock_device_registry(hass)
@pytest.fixture
def entity_reg(hass):
"""Return an empty, loaded, registry."""
return mock_registry(hass)
async def test_get_actions(hass, device_reg, entity_reg):
"""Test we get the expected actions from a climate."""
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(DOMAIN, "test", "5678", device_id=device_entry.id)
hass.states.async_set("climate.test_5678", const.HVAC_MODE_COOL, {})
hass.states.async_set("climate.test_5678", "attributes", {"supported_features": 17})
expected_actions = [
{
"domain": DOMAIN,
"type": "set_hvac_mode",
"device_id": device_entry.id,
"entity_id": "climate.test_5678",
},
{
"domain": DOMAIN,
"type": "set_preset_mode",
"device_id": device_entry.id,
"entity_id": "climate.test_5678",
},
]
actions = await async_get_device_automations(hass, "action", device_entry.id)
assert_lists_same(actions, expected_actions)
async def test_get_action_hvac_only(hass, device_reg, entity_reg):
"""Test we get the expected actions from a climate."""
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(DOMAIN, "test", "5678", device_id=device_entry.id)
hass.states.async_set("climate.test_5678", const.HVAC_MODE_COOL, {})
hass.states.async_set("climate.test_5678", "attributes", {"supported_features": 1})
expected_actions = [
{
"domain": DOMAIN,
"type": "set_hvac_mode",
"device_id": device_entry.id,
"entity_id": "climate.test_5678",
},
]
actions = await async_get_device_automations(hass, "action", device_entry.id)
assert_lists_same(actions, expected_actions)
async def test_action(hass):
"""Test for actions."""
hass.states.async_set(
"climate.entity",
const.HVAC_MODE_COOL,
{
const.ATTR_HVAC_MODES: [const.HVAC_MODE_COOL, const.HVAC_MODE_OFF],
const.ATTR_PRESET_MODES: [const.PRESET_HOME, const.PRESET_AWAY],
},
)
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {
"platform": "event",
"event_type": "test_event_set_hvac_mode",
},
"action": {
"domain": DOMAIN,
"device_id": "abcdefgh",
"entity_id": "climate.entity",
"type": "set_hvac_mode",
"hvac_mode": const.HVAC_MODE_OFF,
},
},
{
"trigger": {
"platform": "event",
"event_type": "test_event_set_preset_mode",
},
"action": {
"domain": DOMAIN,
"device_id": "abcdefgh",
"entity_id": "climate.entity",
"type": "set_preset_mode",
"preset_mode": const.PRESET_AWAY,
},
},
]
},
)
set_hvac_mode_calls = async_mock_service(hass, "climate", "set_hvac_mode")
set_preset_mode_calls = async_mock_service(hass, "climate", "set_preset_mode")
hass.bus.async_fire("test_event_set_hvac_mode")
await hass.async_block_till_done()
assert len(set_hvac_mode_calls) == 1
assert len(set_preset_mode_calls) == 0
hass.bus.async_fire("test_event_set_preset_mode")
await hass.async_block_till_done()
assert len(set_hvac_mode_calls) == 1
assert len(set_preset_mode_calls) == 1
async def test_capabilities(hass):
"""Test getting capabilities."""
hass.states.async_set(
"climate.entity",
const.HVAC_MODE_COOL,
{
const.ATTR_HVAC_MODES: [const.HVAC_MODE_COOL, const.HVAC_MODE_OFF],
const.ATTR_PRESET_MODES: [const.PRESET_HOME, const.PRESET_AWAY],
},
)
# Set HVAC mode
capabilities = await device_action.async_get_action_capabilities(
hass,
{
"domain": DOMAIN,
"device_id": "abcdefgh",
"entity_id": "climate.entity",
"type": "set_hvac_mode",
},
)
assert capabilities and "extra_fields" in capabilities
assert voluptuous_serialize.convert(
capabilities["extra_fields"], custom_serializer=cv.custom_serializer
) == [
{
"name": "hvac_mode",
"options": [("cool", "cool"), ("off", "off")],
"required": True,
"type": "select",
}
]
# Set preset mode
capabilities = await device_action.async_get_action_capabilities(
hass,
{
"domain": DOMAIN,
"device_id": "abcdefgh",
"entity_id": "climate.entity",
"type": "set_preset_mode",
},
)
assert capabilities and "extra_fields" in capabilities
assert voluptuous_serialize.convert(
capabilities["extra_fields"], custom_serializer=cv.custom_serializer
) == [
{
"name": "preset_mode",
"options": [("home", "home"), ("away", "away")],
"required": True,
"type": "select",
}
]
|
import numpy as np
import chainer
from chainer.functions import average_pooling_2d
from chainer.functions import clipped_relu
from chainer.functions import softmax
from chainer.functions import squeeze
from chainercv.links.model.mobilenet.expanded_conv_2d import ExpandedConv2D
from chainercv.links.model.mobilenet.tf_conv_2d_bn_activ import TFConv2DBNActiv
from chainercv.links.model.mobilenet.tf_convolution_2d import TFConvolution2D
from chainercv.links.model.mobilenet.util import _make_divisible
from chainercv.links.model.mobilenet.util import expand_input_by_factor
from chainercv.links.model.pickable_sequential_chain import \
PickableSequentialChain
from chainercv import utils
"""
Implementation of Mobilenet V2, converting the weights from the pretrained
Tensorflow model from
https://github.com/tensorflow/models/tree/master/research/slim/nets/mobilenet
This MobileNetV2 implementation is based on @alexisVallet's one.
@okdshin modified it for ChainerCV.
"""
def _depth_multiplied_output_channels(base_out_channels,
multiplier,
divisable_by=8,
min_depth=8):
return _make_divisible(base_out_channels * multiplier, divisable_by,
min_depth)
_tf_mobilenetv2_mean = np.asarray(
[128] * 3, dtype=np.float)[:, np.newaxis, np.newaxis]
# RGB order
_imagenet_mean = np.array(
[123.68, 116.779, 103.939], dtype=np.float32)[:, np.newaxis, np.newaxis]
class MobileNetV2(PickableSequentialChain):
"""MobileNetV2 Network.
This is a pickable sequential link.
The network can choose output layers from set of all
intermediate layers.
The attribute :obj:`pick` is the names of the layers that are going
to be picked by :meth:`__call__`.
The attribute :obj:`layer_names` is the names of all layers
that can be picked.
Examples:
>>> model = MobileNetV2()
# By default, __call__ returns a probability score (after Softmax).
>>> prob = model(imgs)
>>> model.pick = 'expanded_conv_5'
# This is layer expanded_conv_5.
>>> expanded_conv_5 = model(imgs)
>>> model.pick = ['expanded_conv_5', 'conv1']
>>> # These are layers expanded_conv_5 and conv1 (before Pool).
>>> expanded_conv_5, conv1 = model(imgs)
.. seealso::
:class:`chainercv.links.model.PickableSequentialChain`
When :obj:`pretrained_model` is the path of a pre-trained chainer model
serialized as a :obj:`.npz` file in the constructor, this chain model
automatically initializes all the parameters with it.
When a string in the prespecified set is provided, a pretrained model is
loaded from weights distributed on the Internet.
The list of pretrained models supported are as follows:
* :obj:`imagenet`: Loads weights trained with ImageNet. \
When :obj:`arch=='tf'`, the weights distributed \
at tensorflow/models
`<https://github.com/tensorflow/models/tree/master/research/slim/nets/mobilenet>`_ \ # NOQA
are used.
Args:
n_class (int): The number of classes. If :obj:`None`,
the default values are used.
If a supported pretrained model is used,
the number of classes used to train the pretrained model
is used. Otherwise, the number of classes in ILSVRC 2012 dataset
is used.
pretrained_model (string): The destination of the pre-trained
chainer model serialized as a :obj:`.npz` file.
If this is one of the strings described
above, it automatically loads weights stored under a directory
:obj:`$CHAINER_DATASET_ROOT/pfnet/chainercv/models/`,
where :obj:`$CHAINER_DATASET_ROOT` is set as
:obj:`$HOME/.chainer/dataset` unless you specify another value
by modifying the environment variable.
mean (numpy.ndarray): A mean value. If :obj:`None`,
the default values are used.
If a supported pretrained model is used,
the mean value used to train the pretrained model is used.
Otherwise, the mean value used by TF's implementation is used.
initialW (callable): Initializer for the weights.
initial_bias (callable): Initializer for the biases.
"""
# Batch normalization replicating default tensorflow slim parameters
# as used in the original tensorflow implementation.
_bn_tf_default_params = {
"decay": 0.999,
"eps": 0.001,
"dtype": chainer.config.dtype
}
_models = {
'tf': {
1.0: {
'imagenet': {
'param': {
'n_class': 1001, # first element is background
'mean': _tf_mobilenetv2_mean,
},
'overwritable': ('mean',),
'url':
'https://chainercv-models.preferred.jp/mobilenet_v2_depth_multiplier_1.0_imagenet_converted_2019_05_13.npz', # NOQA
}
},
1.4: {
'imagenet': {
'param': {
'n_class': 1001, # first element is background
'mean': _tf_mobilenetv2_mean,
},
'overwritable': ('mean',),
'url':
'https://chainercv-models.preferred.jp/mobilenet_v2_depth_multiplier_1.4_imagenet_converted_2019_05_13.npz', # NOQA
}
}
}
}
def __init__(self,
n_class=None,
pretrained_model=None,
mean=None,
initialW=None,
initial_bias=None,
arch='tf',
depth_multiplier=1.,
bn_kwargs=_bn_tf_default_params,
thousand_categories_mode=False):
if depth_multiplier <= 0:
raise ValueError('depth_multiplier must be greater than 0')
param, path = utils.prepare_pretrained_model({
'n_class': n_class,
'mean': mean,
}, pretrained_model, self._models[arch][depth_multiplier], {
'n_class': 1000,
'mean': _imagenet_mean,
})
self.mean = param['mean']
self.n_class = param['n_class']
super(MobileNetV2, self).__init__()
def relu6(x):
return clipped_relu(x, 6.)
with self.init_scope():
conv_out_channels = _depth_multiplied_output_channels(
32, depth_multiplier)
self.conv = TFConv2DBNActiv(
in_channels=3,
out_channels=conv_out_channels,
stride=2,
ksize=3,
nobias=True,
activ=relu6,
initialW=initialW,
bn_kwargs=bn_kwargs)
expanded_out_channels = _depth_multiplied_output_channels(
16, depth_multiplier)
self.expanded_conv = ExpandedConv2D(
expansion_size=expand_input_by_factor(1, divisible_by=1),
in_channels=conv_out_channels,
out_channels=expanded_out_channels,
initialW=initialW,
bn_kwargs=bn_kwargs)
in_channels = expanded_out_channels
out_channels_list = (24, ) * 2 + (32, ) * 3 + (64, ) * 4 + (
96, ) * 3 + (160, ) * 3 + (320, )
for i, out_channels in enumerate(out_channels_list):
layer_id = i + 1
if layer_id in (1, 3, 6, 13):
stride = 2
else:
stride = 1
multiplied_out_channels = _depth_multiplied_output_channels(
out_channels, depth_multiplier)
setattr(self, "expanded_conv_{}".format(layer_id),
ExpandedConv2D(
in_channels=in_channels,
out_channels=multiplied_out_channels,
depthwise_stride=stride,
initialW=initialW,
bn_kwargs=bn_kwargs))
in_channels = multiplied_out_channels
if depth_multiplier < 1:
conv1_out_channels = 1280
else:
conv1_out_channels = _depth_multiplied_output_channels(
1280, depth_multiplier)
self.conv1 = TFConv2DBNActiv(
in_channels=in_channels,
out_channels=conv1_out_channels,
ksize=1,
nobias=True,
initialW=initialW,
activ=relu6,
bn_kwargs=bn_kwargs)
self.global_average_pool = \
lambda x: average_pooling_2d(x, ksize=x.shape[2:4], stride=1)
self.logits_conv = TFConvolution2D(
in_channels=conv1_out_channels,
out_channels=self.n_class,
ksize=1,
nobias=False, # bias is needed
initialW=initialW,
initial_bias=initial_bias,
)
self.squeeze = lambda x: squeeze(x, axis=(2, 3))
self.softmax = softmax
if path:
chainer.serializers.load_npz(path, self)
if thousand_categories_mode and 1000 < n_class:
self.logits_conv.W.data = np.delete(self.logits_conv.W.data, 0, 0)
self.logits_conv.b.data = np.delete(self.logits_conv.b.data, 0)
|
import logging
from kiwiki import KiwiClient, KiwiException
import voluptuous as vol
from homeassistant.components.lock import PLATFORM_SCHEMA, LockEntity
from homeassistant.const import (
ATTR_ID,
ATTR_LATITUDE,
ATTR_LONGITUDE,
CONF_PASSWORD,
CONF_USERNAME,
STATE_LOCKED,
STATE_UNLOCKED,
)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import async_call_later
_LOGGER = logging.getLogger(__name__)
ATTR_TYPE = "hardware_type"
ATTR_PERMISSION = "permission"
ATTR_CAN_INVITE = "can_invite_others"
UNLOCK_MAINTAIN_TIME = 5
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the KIWI lock platform."""
try:
kiwi = KiwiClient(config[CONF_USERNAME], config[CONF_PASSWORD])
except KiwiException as exc:
_LOGGER.error(exc)
return
available_locks = kiwi.get_locks()
if not available_locks:
# No locks found; abort setup routine.
_LOGGER.info("No KIWI locks found in your account")
return
add_entities([KiwiLock(lock, kiwi) for lock in available_locks], True)
class KiwiLock(LockEntity):
"""Representation of a Kiwi lock."""
def __init__(self, kiwi_lock, client):
"""Initialize the lock."""
self._sensor = kiwi_lock
self._client = client
self.lock_id = kiwi_lock["sensor_id"]
self._state = STATE_LOCKED
address = kiwi_lock.get("address")
address.update(
{
ATTR_LATITUDE: address.pop("lat", None),
ATTR_LONGITUDE: address.pop("lng", None),
}
)
self._device_attrs = {
ATTR_ID: self.lock_id,
ATTR_TYPE: kiwi_lock.get("hardware_type"),
ATTR_PERMISSION: kiwi_lock.get("highest_permission"),
ATTR_CAN_INVITE: kiwi_lock.get("can_invite"),
**address,
}
@property
def name(self):
"""Return the name of the lock."""
name = self._sensor.get("name")
specifier = self._sensor["address"].get("specifier")
return name or specifier
@property
def is_locked(self):
"""Return true if lock is locked."""
return self._state == STATE_LOCKED
@property
def device_state_attributes(self):
"""Return the device specific state attributes."""
return self._device_attrs
@callback
def clear_unlock_state(self, _):
"""Clear unlock state automatically."""
self._state = STATE_LOCKED
self.async_write_ha_state()
def unlock(self, **kwargs):
"""Unlock the device."""
try:
self._client.open_door(self.lock_id)
except KiwiException:
_LOGGER.error("failed to open door")
else:
self._state = STATE_UNLOCKED
self.hass.add_job(
async_call_later,
self.hass,
UNLOCK_MAINTAIN_TIME,
self.clear_unlock_state,
)
|
Subsets and Splits