text
stringlengths 213
32.3k
|
---|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
from nets import inception
slim = tf.contrib.slim
class InceptionV1Test(tf.test.TestCase):
def testBuildClassificationNetwork(self):
batch_size = 5
height, width = 224, 224
num_classes = 1000
inputs = tf.random_uniform((batch_size, height, width, 3))
logits, end_points = inception.inception_v1(inputs, num_classes)
self.assertTrue(logits.op.name.startswith('InceptionV1/Logits'))
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, num_classes])
self.assertTrue('Predictions' in end_points)
self.assertListEqual(end_points['Predictions'].get_shape().as_list(),
[batch_size, num_classes])
def testBuildBaseNetwork(self):
batch_size = 5
height, width = 224, 224
inputs = tf.random_uniform((batch_size, height, width, 3))
mixed_6c, end_points = inception.inception_v1_base(inputs)
self.assertTrue(mixed_6c.op.name.startswith('InceptionV1/Mixed_5c'))
self.assertListEqual(mixed_6c.get_shape().as_list(),
[batch_size, 7, 7, 1024])
expected_endpoints = ['Conv2d_1a_7x7', 'MaxPool_2a_3x3', 'Conv2d_2b_1x1',
'Conv2d_2c_3x3', 'MaxPool_3a_3x3', 'Mixed_3b',
'Mixed_3c', 'MaxPool_4a_3x3', 'Mixed_4b', 'Mixed_4c',
'Mixed_4d', 'Mixed_4e', 'Mixed_4f', 'MaxPool_5a_2x2',
'Mixed_5b', 'Mixed_5c']
self.assertItemsEqual(end_points.keys(), expected_endpoints)
def testBuildOnlyUptoFinalEndpoint(self):
batch_size = 5
height, width = 224, 224
endpoints = ['Conv2d_1a_7x7', 'MaxPool_2a_3x3', 'Conv2d_2b_1x1',
'Conv2d_2c_3x3', 'MaxPool_3a_3x3', 'Mixed_3b', 'Mixed_3c',
'MaxPool_4a_3x3', 'Mixed_4b', 'Mixed_4c', 'Mixed_4d',
'Mixed_4e', 'Mixed_4f', 'MaxPool_5a_2x2', 'Mixed_5b',
'Mixed_5c']
for index, endpoint in enumerate(endpoints):
with tf.Graph().as_default():
inputs = tf.random_uniform((batch_size, height, width, 3))
out_tensor, end_points = inception.inception_v1_base(
inputs, final_endpoint=endpoint)
self.assertTrue(out_tensor.op.name.startswith(
'InceptionV1/' + endpoint))
self.assertItemsEqual(endpoints[:index+1], end_points)
def testBuildAndCheckAllEndPointsUptoMixed5c(self):
batch_size = 5
height, width = 224, 224
inputs = tf.random_uniform((batch_size, height, width, 3))
_, end_points = inception.inception_v1_base(inputs,
final_endpoint='Mixed_5c')
endpoints_shapes = {'Conv2d_1a_7x7': [5, 112, 112, 64],
'MaxPool_2a_3x3': [5, 56, 56, 64],
'Conv2d_2b_1x1': [5, 56, 56, 64],
'Conv2d_2c_3x3': [5, 56, 56, 192],
'MaxPool_3a_3x3': [5, 28, 28, 192],
'Mixed_3b': [5, 28, 28, 256],
'Mixed_3c': [5, 28, 28, 480],
'MaxPool_4a_3x3': [5, 14, 14, 480],
'Mixed_4b': [5, 14, 14, 512],
'Mixed_4c': [5, 14, 14, 512],
'Mixed_4d': [5, 14, 14, 512],
'Mixed_4e': [5, 14, 14, 528],
'Mixed_4f': [5, 14, 14, 832],
'MaxPool_5a_2x2': [5, 7, 7, 832],
'Mixed_5b': [5, 7, 7, 832],
'Mixed_5c': [5, 7, 7, 1024]}
self.assertItemsEqual(endpoints_shapes.keys(), end_points.keys())
for endpoint_name in endpoints_shapes:
expected_shape = endpoints_shapes[endpoint_name]
self.assertTrue(endpoint_name in end_points)
self.assertListEqual(end_points[endpoint_name].get_shape().as_list(),
expected_shape)
def testModelHasExpectedNumberOfParameters(self):
batch_size = 5
height, width = 224, 224
inputs = tf.random_uniform((batch_size, height, width, 3))
with slim.arg_scope(inception.inception_v1_arg_scope()):
inception.inception_v1_base(inputs)
total_params, _ = slim.model_analyzer.analyze_vars(
slim.get_model_variables())
self.assertAlmostEqual(5607184, total_params)
def testHalfSizeImages(self):
batch_size = 5
height, width = 112, 112
inputs = tf.random_uniform((batch_size, height, width, 3))
mixed_5c, _ = inception.inception_v1_base(inputs)
self.assertTrue(mixed_5c.op.name.startswith('InceptionV1/Mixed_5c'))
self.assertListEqual(mixed_5c.get_shape().as_list(),
[batch_size, 4, 4, 1024])
def testUnknownImageShape(self):
tf.reset_default_graph()
batch_size = 2
height, width = 224, 224
num_classes = 1000
input_np = np.random.uniform(0, 1, (batch_size, height, width, 3))
with self.test_session() as sess:
inputs = tf.placeholder(tf.float32, shape=(batch_size, None, None, 3))
logits, end_points = inception.inception_v1(inputs, num_classes)
self.assertTrue(logits.op.name.startswith('InceptionV1/Logits'))
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, num_classes])
pre_pool = end_points['Mixed_5c']
feed_dict = {inputs: input_np}
tf.global_variables_initializer().run()
pre_pool_out = sess.run(pre_pool, feed_dict=feed_dict)
self.assertListEqual(list(pre_pool_out.shape), [batch_size, 7, 7, 1024])
def testUnknowBatchSize(self):
batch_size = 1
height, width = 224, 224
num_classes = 1000
inputs = tf.placeholder(tf.float32, (None, height, width, 3))
logits, _ = inception.inception_v1(inputs, num_classes)
self.assertTrue(logits.op.name.startswith('InceptionV1/Logits'))
self.assertListEqual(logits.get_shape().as_list(),
[None, num_classes])
images = tf.random_uniform((batch_size, height, width, 3))
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
output = sess.run(logits, {inputs: images.eval()})
self.assertEquals(output.shape, (batch_size, num_classes))
def testEvaluation(self):
batch_size = 2
height, width = 224, 224
num_classes = 1000
eval_inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = inception.inception_v1(eval_inputs, num_classes,
is_training=False)
predictions = tf.argmax(logits, 1)
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
output = sess.run(predictions)
self.assertEquals(output.shape, (batch_size,))
def testTrainEvalWithReuse(self):
train_batch_size = 5
eval_batch_size = 2
height, width = 224, 224
num_classes = 1000
train_inputs = tf.random_uniform((train_batch_size, height, width, 3))
inception.inception_v1(train_inputs, num_classes)
eval_inputs = tf.random_uniform((eval_batch_size, height, width, 3))
logits, _ = inception.inception_v1(eval_inputs, num_classes, reuse=True)
predictions = tf.argmax(logits, 1)
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
output = sess.run(predictions)
self.assertEquals(output.shape, (eval_batch_size,))
def testLogitsNotSqueezed(self):
num_classes = 25
images = tf.random_uniform([1, 224, 224, 3])
logits, _ = inception.inception_v1(images,
num_classes=num_classes,
spatial_squeeze=False)
with self.test_session() as sess:
tf.global_variables_initializer().run()
logits_out = sess.run(logits)
self.assertListEqual(list(logits_out.shape), [1, 1, 1, num_classes])
if __name__ == '__main__':
tf.test.main()
|
from tqdm import tqdm
import pandas as pd
from matchzoo.engine.base_preprocessor import BasePreprocessor
from matchzoo import DataPack
from .build_vocab_unit import build_vocab_unit
from .chain_transform import chain_transform
from . import units
tqdm.pandas()
class DIINPreprocessor(BasePreprocessor):
"""DIIN Model preprocessor."""
def __init__(self,
fixed_length_left: int = 10,
fixed_length_right: int = 10,
fixed_length_word: int = 5):
"""
DIIN Model preprocessor.
:param fixed_length_left: Integer, maximize length of :attr:'left' in
the data_pack.
:param fixed_length_right: Integer, maximize length of :attr:'right' in
the data_pack.
:param fixed_length_word: Integer, maximize length of each word.
Example:
>>> import matchzoo as mz
>>> train_data = mz.datasets.toy.load_data()
>>> test_data = mz.datasets.toy.load_data(stage='test')
>>> diin_preprocessor = mz.preprocessors.DIINPreprocessor(
... fixed_length_left=5,
... fixed_length_right=5,
... fixed_length_word=3,
... )
>>> diin_preprocessor = diin_preprocessor.fit(
... train_data, verbose=0)
>>> diin_preprocessor.context['input_shapes']
[(5,), (5,), (5, 3), (5, 3), (5,), (5,)]
>>> diin_preprocessor.context['vocab_size']
893
>>> train_data_processed = diin_preprocessor.transform(
... train_data, verbose=0)
>>> type(train_data_processed)
<class 'matchzoo.data_pack.data_pack.DataPack'>
>>> test_data_processed = diin_preprocessor.transform(
... test_data, verbose=0)
>>> type(test_data_processed)
<class 'matchzoo.data_pack.data_pack.DataPack'>
"""
super().__init__()
self._fixed_length_left = fixed_length_left
self._fixed_length_right = fixed_length_right
self._fixed_length_word = fixed_length_word
self._left_fixedlength_unit = units.FixedLength(
self._fixed_length_left,
pad_value='0',
pad_mode='post'
)
self._right_fixedlength_unit = units.FixedLength(
self._fixed_length_right,
pad_value='0',
pad_mode='post'
)
self._units = self._default_units()
def fit(self, data_pack: DataPack, verbose: int = 1):
"""
Fit pre-processing context for transformation.
:param data_pack: data_pack to be preprocessed.
:param verbose: Verbosity.
:return: class:'DIINPreprocessor' instance.
"""
func = chain_transform(self._units)
data_pack = data_pack.apply_on_text(func, mode='both', verbose=verbose)
vocab_unit = build_vocab_unit(data_pack, verbose=verbose)
vocab_size = len(vocab_unit.state['term_index'])
self._context['vocab_unit'] = vocab_unit
self._context['vocab_size'] = vocab_size
self._context['embedding_input_dim'] = vocab_size
data_pack = data_pack.apply_on_text(
units.NgramLetter(ngram=1, reduce_dim=True).transform,
mode='both', verbose=verbose)
char_unit = build_vocab_unit(data_pack, verbose=verbose)
self._context['char_unit'] = char_unit
self._context['input_shapes'] = [
(self._fixed_length_left,),
(self._fixed_length_right,),
(self._fixed_length_left, self._fixed_length_word,),
(self._fixed_length_right, self._fixed_length_word,),
(self._fixed_length_left,),
(self._fixed_length_right,)
]
return self
def transform(self, data_pack: DataPack, verbose: int = 1) -> DataPack:
"""
Apply transformation on data.
:param data_pack: Inputs to be preprocessed.
:param verbose: Verbosity.
:return: Transformed data as :class:'DataPack' object.
"""
data_pack = data_pack.copy()
data_pack.apply_on_text(
chain_transform(self._units),
mode='both', inplace=True, verbose=verbose)
# Process character representation
data_pack.apply_on_text(
units.NgramLetter(ngram=1, reduce_dim=False).transform,
rename=('char_left', 'char_right'),
mode='both', inplace=True, verbose=verbose)
char_index_dict = self._context['char_unit'].state['term_index']
left_charindex_unit = units.CharacterIndex(
char_index_dict, self._fixed_length_left, self._fixed_length_word)
right_charindex_unit = units.CharacterIndex(
char_index_dict, self._fixed_length_right, self._fixed_length_word)
data_pack.left['char_left'] = data_pack.left['char_left'].apply(
left_charindex_unit.transform)
data_pack.right['char_right'] = data_pack.right['char_right'].apply(
right_charindex_unit.transform)
# Process word representation
data_pack.apply_on_text(
self._context['vocab_unit'].transform,
mode='both', inplace=True, verbose=verbose)
# Process exact match representation
frame = data_pack.relation.join(
data_pack.left, on='id_left', how='left'
).join(data_pack.right, on='id_right', how='left')
left_exactmatch_unit = units.WordExactMatch(
self._fixed_length_left, match='text_left', to_match='text_right')
right_exactmatch_unit = units.WordExactMatch(
self._fixed_length_right, match='text_right', to_match='text_left')
data_pack.relation['match_left'] = frame.apply(
left_exactmatch_unit.transform, axis=1)
data_pack.relation['match_right'] = frame.apply(
right_exactmatch_unit.transform, axis=1)
data_pack.apply_on_text(
self._left_fixedlength_unit.transform,
mode='left', inplace=True, verbose=verbose)
data_pack.apply_on_text(
self._right_fixedlength_unit.transform,
mode='right', inplace=True, verbose=verbose)
return data_pack
|
import asyncio
from datetime import timedelta
import logging
import time
from typing import Any, Dict, NamedTuple, Tuple, cast
from pyHS100 import SmartBulb, SmartDeviceException
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_COLOR_TEMP,
ATTR_HS_COLOR,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
SUPPORT_COLOR_TEMP,
LightEntity,
)
from homeassistant.exceptions import HomeAssistantError, PlatformNotReady
import homeassistant.helpers.device_registry as dr
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.util.color import (
color_temperature_kelvin_to_mired as kelvin_to_mired,
color_temperature_mired_to_kelvin as mired_to_kelvin,
)
import homeassistant.util.dt as dt_util
from . import CONF_LIGHT, DOMAIN as TPLINK_DOMAIN
from .common import add_available_devices
PARALLEL_UPDATES = 0
SCAN_INTERVAL = timedelta(seconds=5)
CURRENT_POWER_UPDATE_INTERVAL = timedelta(seconds=60)
HISTORICAL_POWER_UPDATE_INTERVAL = timedelta(minutes=60)
_LOGGER = logging.getLogger(__name__)
ATTR_CURRENT_POWER_W = "current_power_w"
ATTR_DAILY_ENERGY_KWH = "daily_energy_kwh"
ATTR_MONTHLY_ENERGY_KWH = "monthly_energy_kwh"
LIGHT_STATE_DFT_ON = "dft_on_state"
LIGHT_STATE_ON_OFF = "on_off"
LIGHT_STATE_RELAY_STATE = "relay_state"
LIGHT_STATE_BRIGHTNESS = "brightness"
LIGHT_STATE_COLOR_TEMP = "color_temp"
LIGHT_STATE_HUE = "hue"
LIGHT_STATE_SATURATION = "saturation"
LIGHT_STATE_ERROR_MSG = "err_msg"
LIGHT_SYSINFO_MAC = "mac"
LIGHT_SYSINFO_ALIAS = "alias"
LIGHT_SYSINFO_MODEL = "model"
LIGHT_SYSINFO_IS_DIMMABLE = "is_dimmable"
LIGHT_SYSINFO_IS_VARIABLE_COLOR_TEMP = "is_variable_color_temp"
LIGHT_SYSINFO_IS_COLOR = "is_color"
MAX_ATTEMPTS = 300
SLEEP_TIME = 2
async def async_setup_entry(hass: HomeAssistantType, config_entry, async_add_entities):
"""Set up lights."""
entities = await hass.async_add_executor_job(
add_available_devices, hass, CONF_LIGHT, TPLinkSmartBulb
)
if entities:
async_add_entities(entities, update_before_add=True)
if hass.data[TPLINK_DOMAIN][f"{CONF_LIGHT}_remaining"]:
raise PlatformNotReady
def brightness_to_percentage(byt):
"""Convert brightness from absolute 0..255 to percentage."""
return round((byt * 100.0) / 255.0)
def brightness_from_percentage(percent):
"""Convert percentage to absolute value 0..255."""
return round((percent * 255.0) / 100.0)
class LightState(NamedTuple):
"""Light state."""
state: bool
brightness: int
color_temp: float
hs: Tuple[int, int]
def to_param(self):
"""Return a version that we can send to the bulb."""
if self.color_temp:
color_temp = mired_to_kelvin(self.color_temp)
else:
color_temp = None
return {
LIGHT_STATE_ON_OFF: 1 if self.state else 0,
LIGHT_STATE_BRIGHTNESS: brightness_to_percentage(self.brightness),
LIGHT_STATE_COLOR_TEMP: color_temp,
LIGHT_STATE_HUE: self.hs[0] if self.hs else 0,
LIGHT_STATE_SATURATION: self.hs[1] if self.hs else 0,
}
class LightFeatures(NamedTuple):
"""Light features."""
sysinfo: Dict[str, Any]
mac: str
alias: str
model: str
supported_features: int
min_mireds: float
max_mireds: float
has_emeter: bool
class TPLinkSmartBulb(LightEntity):
"""Representation of a TPLink Smart Bulb."""
def __init__(self, smartbulb: SmartBulb) -> None:
"""Initialize the bulb."""
self.smartbulb = smartbulb
self._light_features = cast(LightFeatures, None)
self._light_state = cast(LightState, None)
self._is_available = True
self._is_setting_light_state = False
self._last_current_power_update = None
self._last_historical_power_update = None
self._emeter_params = {}
self._host = None
self._alias = None
@property
def unique_id(self):
"""Return a unique ID."""
return self._light_features.mac
@property
def name(self):
"""Return the name of the Smart Bulb."""
return self._light_features.alias
@property
def device_info(self):
"""Return information about the device."""
return {
"name": self._light_features.alias,
"model": self._light_features.model,
"manufacturer": "TP-Link",
"connections": {(dr.CONNECTION_NETWORK_MAC, self._light_features.mac)},
"sw_version": self._light_features.sysinfo["sw_ver"],
}
@property
def available(self) -> bool:
"""Return if bulb is available."""
return self._is_available
@property
def device_state_attributes(self):
"""Return the state attributes of the device."""
return self._emeter_params
async def async_turn_on(self, **kwargs):
"""Turn the light on."""
if ATTR_BRIGHTNESS in kwargs:
brightness = int(kwargs[ATTR_BRIGHTNESS])
elif self._light_state.brightness is not None:
brightness = self._light_state.brightness
else:
brightness = 255
if ATTR_COLOR_TEMP in kwargs:
color_tmp = int(kwargs[ATTR_COLOR_TEMP])
else:
color_tmp = self._light_state.color_temp
if ATTR_HS_COLOR in kwargs:
# TP-Link requires integers.
hue_sat = tuple(int(val) for val in kwargs[ATTR_HS_COLOR])
# TP-Link cannot have both color temp and hue_sat
color_tmp = 0
else:
hue_sat = self._light_state.hs
await self._async_set_light_state_retry(
self._light_state,
self._light_state._replace(
state=True,
brightness=brightness,
color_temp=color_tmp,
hs=hue_sat,
),
)
async def async_turn_off(self, **kwargs):
"""Turn the light off."""
await self._async_set_light_state_retry(
self._light_state,
self._light_state._replace(state=False),
)
@property
def min_mireds(self):
"""Return minimum supported color temperature."""
return self._light_features.min_mireds
@property
def max_mireds(self):
"""Return maximum supported color temperature."""
return self._light_features.max_mireds
@property
def color_temp(self):
"""Return the color temperature of this light in mireds for HA."""
return self._light_state.color_temp
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
return self._light_state.brightness
@property
def hs_color(self):
"""Return the color."""
return self._light_state.hs
@property
def is_on(self):
"""Return True if device is on."""
return self._light_state.state
def attempt_update(self, update_attempt):
"""Attempt to get details the TP-Link bulb."""
# State is currently being set, ignore.
if self._is_setting_light_state:
return False
try:
if not self._light_features:
self._light_features = self._get_light_features()
self._alias = self._light_features.alias
self._host = self.smartbulb.host
self._light_state = self._get_light_state()
return True
except (SmartDeviceException, OSError) as ex:
if update_attempt == 0:
_LOGGER.debug(
"Retrying in %s seconds for %s|%s due to: %s",
SLEEP_TIME,
self._host,
self._alias,
ex,
)
return False
@property
def supported_features(self):
"""Flag supported features."""
return self._light_features.supported_features
def _get_light_features(self):
"""Determine all supported features in one go."""
sysinfo = self.smartbulb.sys_info
supported_features = 0
# Calling api here as it reformats
mac = self.smartbulb.mac
alias = sysinfo[LIGHT_SYSINFO_ALIAS]
model = sysinfo[LIGHT_SYSINFO_MODEL]
min_mireds = None
max_mireds = None
has_emeter = self.smartbulb.has_emeter
if sysinfo.get(LIGHT_SYSINFO_IS_DIMMABLE) or LIGHT_STATE_BRIGHTNESS in sysinfo:
supported_features += SUPPORT_BRIGHTNESS
if sysinfo.get(LIGHT_SYSINFO_IS_VARIABLE_COLOR_TEMP):
supported_features += SUPPORT_COLOR_TEMP
# Have to make another api request here in
# order to not re-implement pyHS100 here
max_range, min_range = self.smartbulb.valid_temperature_range
min_mireds = kelvin_to_mired(min_range)
max_mireds = kelvin_to_mired(max_range)
if sysinfo.get(LIGHT_SYSINFO_IS_COLOR):
supported_features += SUPPORT_COLOR
return LightFeatures(
sysinfo=sysinfo,
mac=mac,
alias=alias,
model=model,
supported_features=supported_features,
min_mireds=min_mireds,
max_mireds=max_mireds,
has_emeter=has_emeter,
)
def _light_state_from_params(self, light_state_params) -> LightState:
brightness = None
color_temp = None
hue_saturation = None
light_features = self._light_features
state = bool(light_state_params[LIGHT_STATE_ON_OFF])
if not state and LIGHT_STATE_DFT_ON in light_state_params:
light_state_params = light_state_params[LIGHT_STATE_DFT_ON]
if light_features.supported_features & SUPPORT_BRIGHTNESS:
brightness = brightness_from_percentage(
light_state_params[LIGHT_STATE_BRIGHTNESS]
)
if light_features.supported_features & SUPPORT_COLOR_TEMP:
if (
light_state_params.get(LIGHT_STATE_COLOR_TEMP) is not None
and light_state_params[LIGHT_STATE_COLOR_TEMP] != 0
):
color_temp = kelvin_to_mired(light_state_params[LIGHT_STATE_COLOR_TEMP])
if light_features.supported_features & SUPPORT_COLOR:
hue_saturation = (
light_state_params[LIGHT_STATE_HUE],
light_state_params[LIGHT_STATE_SATURATION],
)
return LightState(
state=state,
brightness=brightness,
color_temp=color_temp,
hs=hue_saturation,
)
def _get_light_state(self) -> LightState:
"""Get the light state."""
self._update_emeter()
return self._light_state_from_params(self._get_device_state())
def _update_emeter(self):
if not self._light_features.has_emeter:
return
now = dt_util.utcnow()
if (
not self._last_current_power_update
or self._last_current_power_update + CURRENT_POWER_UPDATE_INTERVAL < now
):
self._last_current_power_update = now
self._emeter_params[ATTR_CURRENT_POWER_W] = "{:.1f}".format(
self.smartbulb.current_consumption()
)
if (
not self._last_historical_power_update
or self._last_historical_power_update + HISTORICAL_POWER_UPDATE_INTERVAL
< now
):
self._last_historical_power_update = now
daily_statistics = self.smartbulb.get_emeter_daily()
monthly_statistics = self.smartbulb.get_emeter_monthly()
try:
self._emeter_params[ATTR_DAILY_ENERGY_KWH] = "{:.3f}".format(
daily_statistics[int(time.strftime("%d"))]
)
self._emeter_params[ATTR_MONTHLY_ENERGY_KWH] = "{:.3f}".format(
monthly_statistics[int(time.strftime("%m"))]
)
except KeyError:
# device returned no daily/monthly history
pass
async def _async_set_light_state_retry(
self, old_light_state: LightState, new_light_state: LightState
) -> None:
"""Set the light state with retry."""
# Tell the device to set the states.
if not _light_state_diff(old_light_state, new_light_state):
# Nothing to do, avoid the executor
return
self._is_setting_light_state = True
try:
light_state_params = await self.hass.async_add_executor_job(
self._set_light_state, old_light_state, new_light_state
)
self._is_available = True
self._is_setting_light_state = False
if LIGHT_STATE_ERROR_MSG in light_state_params:
raise HomeAssistantError(light_state_params[LIGHT_STATE_ERROR_MSG])
self._light_state = self._light_state_from_params(light_state_params)
return
except (SmartDeviceException, OSError):
pass
try:
_LOGGER.debug("Retrying setting light state")
light_state_params = await self.hass.async_add_executor_job(
self._set_light_state, old_light_state, new_light_state
)
self._is_available = True
if LIGHT_STATE_ERROR_MSG in light_state_params:
raise HomeAssistantError(light_state_params[LIGHT_STATE_ERROR_MSG])
self._light_state = self._light_state_from_params(light_state_params)
except (SmartDeviceException, OSError) as ex:
self._is_available = False
_LOGGER.warning("Could not set data for %s: %s", self.smartbulb.host, ex)
self._is_setting_light_state = False
def _set_light_state(
self, old_light_state: LightState, new_light_state: LightState
) -> None:
"""Set the light state."""
diff = _light_state_diff(old_light_state, new_light_state)
if not diff:
return
return self._set_device_state(diff)
def _get_device_state(self):
"""State of the bulb or smart dimmer switch."""
if isinstance(self.smartbulb, SmartBulb):
return self.smartbulb.get_light_state()
sysinfo = self.smartbulb.sys_info
# Its not really a bulb, its a dimmable SmartPlug (aka Wall Switch)
return {
LIGHT_STATE_ON_OFF: sysinfo[LIGHT_STATE_RELAY_STATE],
LIGHT_STATE_BRIGHTNESS: sysinfo.get(LIGHT_STATE_BRIGHTNESS, 0),
LIGHT_STATE_COLOR_TEMP: 0,
LIGHT_STATE_HUE: 0,
LIGHT_STATE_SATURATION: 0,
}
def _set_device_state(self, state):
"""Set state of the bulb or smart dimmer switch."""
if isinstance(self.smartbulb, SmartBulb):
return self.smartbulb.set_light_state(state)
# Its not really a bulb, its a dimmable SmartPlug (aka Wall Switch)
if LIGHT_STATE_BRIGHTNESS in state:
# Brightness of 0 is accepted by the
# device but the underlying library rejects it
# so we turn off instead.
if state[LIGHT_STATE_BRIGHTNESS]:
self.smartbulb.brightness = state[LIGHT_STATE_BRIGHTNESS]
else:
self.smartbulb.state = self.smartbulb.SWITCH_STATE_OFF
elif LIGHT_STATE_ON_OFF in state:
if state[LIGHT_STATE_ON_OFF]:
self.smartbulb.state = self.smartbulb.SWITCH_STATE_ON
else:
self.smartbulb.state = self.smartbulb.SWITCH_STATE_OFF
return self._get_device_state()
async def async_update(self):
"""Update the TP-Link bulb's state."""
for update_attempt in range(MAX_ATTEMPTS):
is_ready = await self.hass.async_add_executor_job(
self.attempt_update, update_attempt
)
if is_ready:
self._is_available = True
if update_attempt > 0:
_LOGGER.debug(
"Device %s|%s responded after %s attempts",
self._host,
self._alias,
update_attempt,
)
break
await asyncio.sleep(SLEEP_TIME)
else:
if self._is_available:
_LOGGER.warning(
"Could not read state for %s|%s",
self._host,
self._alias,
)
self._is_available = False
def _light_state_diff(old_light_state: LightState, new_light_state: LightState):
old_state_param = old_light_state.to_param()
new_state_param = new_light_state.to_param()
return {
key: value
for key, value in new_state_param.items()
if new_state_param.get(key) != old_state_param.get(key)
}
|
from datetime import timedelta
import os
import sqlite3
import pytest
from homeassistant.components.recorder import util
from homeassistant.components.recorder.const import DATA_INSTANCE, SQLITE_URL_PREFIX
from homeassistant.util import dt as dt_util
from .common import wait_recording_done
from tests.async_mock import MagicMock, patch
from tests.common import get_test_home_assistant, init_recorder_component
@pytest.fixture
def hass_recorder():
"""Home Assistant fixture with in-memory recorder."""
hass = get_test_home_assistant()
def setup_recorder(config=None):
"""Set up with params."""
init_recorder_component(hass, config)
hass.start()
hass.block_till_done()
hass.data[DATA_INSTANCE].block_till_done()
return hass
yield setup_recorder
hass.stop()
def test_recorder_bad_commit(hass_recorder):
"""Bad _commit should retry 3 times."""
hass = hass_recorder()
def work(session):
"""Bad work."""
session.execute("select * from notthere")
with patch(
"homeassistant.components.recorder.time.sleep"
) as e_mock, util.session_scope(hass=hass) as session:
res = util.commit(session, work)
assert res is False
assert e_mock.call_count == 3
def test_recorder_bad_execute(hass_recorder):
"""Bad execute, retry 3 times."""
from sqlalchemy.exc import SQLAlchemyError
hass_recorder()
def to_native(validate_entity_id=True):
"""Raise exception."""
raise SQLAlchemyError()
mck1 = MagicMock()
mck1.to_native = to_native
with pytest.raises(SQLAlchemyError), patch(
"homeassistant.components.recorder.time.sleep"
) as e_mock:
util.execute((mck1,), to_native=True)
assert e_mock.call_count == 2
def test_validate_or_move_away_sqlite_database_with_integrity_check(
hass, tmpdir, caplog
):
"""Ensure a malformed sqlite database is moved away.
A quick_check is run here
"""
db_integrity_check = True
test_dir = tmpdir.mkdir("test_validate_or_move_away_sqlite_database")
test_db_file = f"{test_dir}/broken.db"
dburl = f"{SQLITE_URL_PREFIX}{test_db_file}"
util.validate_sqlite_database(test_db_file, db_integrity_check) is True
assert os.path.exists(test_db_file) is True
assert (
util.validate_or_move_away_sqlite_database(dburl, db_integrity_check) is False
)
_corrupt_db_file(test_db_file)
assert util.validate_sqlite_database(dburl, db_integrity_check) is False
assert (
util.validate_or_move_away_sqlite_database(dburl, db_integrity_check) is False
)
assert "corrupt or malformed" in caplog.text
assert util.validate_sqlite_database(dburl, db_integrity_check) is False
assert util.validate_or_move_away_sqlite_database(dburl, db_integrity_check) is True
def test_validate_or_move_away_sqlite_database_without_integrity_check(
hass, tmpdir, caplog
):
"""Ensure a malformed sqlite database is moved away.
The quick_check is skipped, but we can still find
corruption if the whole database is unreadable
"""
db_integrity_check = False
test_dir = tmpdir.mkdir("test_validate_or_move_away_sqlite_database")
test_db_file = f"{test_dir}/broken.db"
dburl = f"{SQLITE_URL_PREFIX}{test_db_file}"
util.validate_sqlite_database(test_db_file, db_integrity_check) is True
assert os.path.exists(test_db_file) is True
assert (
util.validate_or_move_away_sqlite_database(dburl, db_integrity_check) is False
)
_corrupt_db_file(test_db_file)
assert util.validate_sqlite_database(dburl, db_integrity_check) is False
assert (
util.validate_or_move_away_sqlite_database(dburl, db_integrity_check) is False
)
assert "corrupt or malformed" in caplog.text
assert util.validate_sqlite_database(dburl, db_integrity_check) is False
assert util.validate_or_move_away_sqlite_database(dburl, db_integrity_check) is True
def test_last_run_was_recently_clean(hass_recorder):
"""Test we can check if the last recorder run was recently clean."""
hass = hass_recorder()
cursor = hass.data[DATA_INSTANCE].engine.raw_connection().cursor()
assert util.last_run_was_recently_clean(cursor) is False
hass.data[DATA_INSTANCE]._close_run()
wait_recording_done(hass)
assert util.last_run_was_recently_clean(cursor) is True
thirty_min_future_time = dt_util.utcnow() + timedelta(minutes=30)
with patch(
"homeassistant.components.recorder.dt_util.utcnow",
return_value=thirty_min_future_time,
):
assert util.last_run_was_recently_clean(cursor) is False
def test_basic_sanity_check(hass_recorder):
"""Test the basic sanity checks with a missing table."""
hass = hass_recorder()
cursor = hass.data[DATA_INSTANCE].engine.raw_connection().cursor()
assert util.basic_sanity_check(cursor) is True
cursor.execute("DROP TABLE states;")
with pytest.raises(sqlite3.DatabaseError):
util.basic_sanity_check(cursor)
def test_combined_checks(hass_recorder):
"""Run Checks on the open database."""
hass = hass_recorder()
db_integrity_check = False
cursor = hass.data[DATA_INSTANCE].engine.raw_connection().cursor()
assert (
util.run_checks_on_open_db("fake_db_path", cursor, db_integrity_check) is None
)
# We are patching recorder.util here in order
# to avoid creating the full database on disk
with patch("homeassistant.components.recorder.util.last_run_was_recently_clean"):
assert (
util.run_checks_on_open_db("fake_db_path", cursor, db_integrity_check)
is None
)
with patch(
"homeassistant.components.recorder.util.last_run_was_recently_clean",
side_effect=sqlite3.DatabaseError,
), pytest.raises(sqlite3.DatabaseError):
util.run_checks_on_open_db("fake_db_path", cursor, db_integrity_check)
cursor.execute("DROP TABLE events;")
with pytest.raises(sqlite3.DatabaseError):
util.run_checks_on_open_db("fake_db_path", cursor, db_integrity_check)
def _corrupt_db_file(test_db_file):
"""Corrupt an sqlite3 database file."""
f = open(test_db_file, "a")
f.write("I am a corrupt db")
f.close()
|
import logging
from libpurecool.const import (
FanPower,
FanSpeed,
FanState,
FocusMode,
HeatMode,
HeatState,
HeatTarget,
)
from libpurecool.dyson_pure_hotcool import DysonPureHotCool
from libpurecool.dyson_pure_hotcool_link import DysonPureHotCoolLink
from libpurecool.dyson_pure_state import DysonPureHotCoolState
from libpurecool.dyson_pure_state_v2 import DysonPureHotCoolV2State
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
CURRENT_HVAC_COOL,
CURRENT_HVAC_HEAT,
CURRENT_HVAC_IDLE,
CURRENT_HVAC_OFF,
FAN_AUTO,
FAN_DIFFUSE,
FAN_FOCUS,
FAN_HIGH,
FAN_LOW,
FAN_MEDIUM,
FAN_OFF,
HVAC_MODE_COOL,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
SUPPORT_FAN_MODE,
SUPPORT_TARGET_TEMPERATURE,
)
from homeassistant.const import ATTR_TEMPERATURE, TEMP_CELSIUS
from . import DYSON_DEVICES
_LOGGER = logging.getLogger(__name__)
SUPPORT_FAN = [FAN_FOCUS, FAN_DIFFUSE]
SUPPORT_FAN_PCOOL = [FAN_OFF, FAN_AUTO, FAN_LOW, FAN_MEDIUM, FAN_HIGH]
SUPPORT_HVAG = [HVAC_MODE_COOL, HVAC_MODE_HEAT]
SUPPORT_HVAC_PCOOL = [HVAC_MODE_COOL, HVAC_MODE_HEAT, HVAC_MODE_OFF]
SUPPORT_FLAGS = SUPPORT_TARGET_TEMPERATURE | SUPPORT_FAN_MODE
DYSON_KNOWN_CLIMATE_DEVICES = "dyson_known_climate_devices"
SPEED_MAP = {
FanSpeed.FAN_SPEED_1.value: FAN_LOW,
FanSpeed.FAN_SPEED_2.value: FAN_LOW,
FanSpeed.FAN_SPEED_3.value: FAN_LOW,
FanSpeed.FAN_SPEED_4.value: FAN_LOW,
FanSpeed.FAN_SPEED_AUTO.value: FAN_AUTO,
FanSpeed.FAN_SPEED_5.value: FAN_MEDIUM,
FanSpeed.FAN_SPEED_6.value: FAN_MEDIUM,
FanSpeed.FAN_SPEED_7.value: FAN_MEDIUM,
FanSpeed.FAN_SPEED_8.value: FAN_HIGH,
FanSpeed.FAN_SPEED_9.value: FAN_HIGH,
FanSpeed.FAN_SPEED_10.value: FAN_HIGH,
}
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Dyson fan components."""
if discovery_info is None:
return
known_devices = hass.data.setdefault(DYSON_KNOWN_CLIMATE_DEVICES, set())
# Get Dyson Devices from parent component
new_entities = []
for device in hass.data[DYSON_DEVICES]:
if device.serial not in known_devices:
if isinstance(device, DysonPureHotCool):
dyson_entity = DysonPureHotCoolEntity(device)
new_entities.append(dyson_entity)
known_devices.add(device.serial)
elif isinstance(device, DysonPureHotCoolLink):
dyson_entity = DysonPureHotCoolLinkEntity(device)
new_entities.append(dyson_entity)
known_devices.add(device.serial)
add_entities(new_entities)
class DysonPureHotCoolLinkEntity(ClimateEntity):
"""Representation of a Dyson climate fan."""
def __init__(self, device):
"""Initialize the fan."""
self._device = device
self._current_temp = None
async def async_added_to_hass(self):
"""Call when entity is added to hass."""
self._device.add_message_listener(self.on_message)
def on_message(self, message):
"""Call when new messages received from the climate."""
if isinstance(message, DysonPureHotCoolState):
_LOGGER.debug(
"Message received for climate device %s : %s", self.name, message
)
self.schedule_update_ha_state()
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def supported_features(self):
"""Return the list of supported features."""
return SUPPORT_FLAGS
@property
def name(self):
"""Return the display name of this climate."""
return self._device.name
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return TEMP_CELSIUS
@property
def current_temperature(self):
"""Return the current temperature."""
if self._device.environmental_state:
temperature_kelvin = self._device.environmental_state.temperature
if temperature_kelvin != 0:
self._current_temp = float(f"{(temperature_kelvin - 273):.1f}")
return self._current_temp
@property
def target_temperature(self):
"""Return the target temperature."""
heat_target = int(self._device.state.heat_target) / 10
return int(heat_target - 273)
@property
def current_humidity(self):
"""Return the current humidity."""
if self._device.environmental_state:
if self._device.environmental_state.humidity == 0:
return None
return self._device.environmental_state.humidity
return None
@property
def hvac_mode(self):
"""Return hvac operation ie. heat, cool mode.
Need to be one of HVAC_MODE_*.
"""
if self._device.state.heat_mode == HeatMode.HEAT_ON.value:
return HVAC_MODE_HEAT
return HVAC_MODE_COOL
@property
def hvac_modes(self):
"""Return the list of available hvac operation modes.
Need to be a subset of HVAC_MODES.
"""
return SUPPORT_HVAG
@property
def hvac_action(self):
"""Return the current running hvac operation if supported.
Need to be one of CURRENT_HVAC_*.
"""
if self._device.state.heat_mode == HeatMode.HEAT_ON.value:
if self._device.state.heat_state == HeatState.HEAT_STATE_ON.value:
return CURRENT_HVAC_HEAT
return CURRENT_HVAC_IDLE
return CURRENT_HVAC_COOL
@property
def fan_mode(self):
"""Return the fan setting."""
if self._device.state.focus_mode == FocusMode.FOCUS_ON.value:
return FAN_FOCUS
return FAN_DIFFUSE
@property
def fan_modes(self):
"""Return the list of available fan modes."""
return SUPPORT_FAN
def set_temperature(self, **kwargs):
"""Set new target temperature."""
target_temp = kwargs.get(ATTR_TEMPERATURE)
if target_temp is None:
return
target_temp = int(target_temp)
_LOGGER.debug("Set %s temperature %s", self.name, target_temp)
# Limit the target temperature into acceptable range.
target_temp = min(self.max_temp, target_temp)
target_temp = max(self.min_temp, target_temp)
self._device.set_configuration(
heat_target=HeatTarget.celsius(target_temp), heat_mode=HeatMode.HEAT_ON
)
def set_fan_mode(self, fan_mode):
"""Set new fan mode."""
_LOGGER.debug("Set %s focus mode %s", self.name, fan_mode)
if fan_mode == FAN_FOCUS:
self._device.set_configuration(focus_mode=FocusMode.FOCUS_ON)
elif fan_mode == FAN_DIFFUSE:
self._device.set_configuration(focus_mode=FocusMode.FOCUS_OFF)
def set_hvac_mode(self, hvac_mode):
"""Set new target hvac mode."""
_LOGGER.debug("Set %s heat mode %s", self.name, hvac_mode)
if hvac_mode == HVAC_MODE_HEAT:
self._device.set_configuration(heat_mode=HeatMode.HEAT_ON)
elif hvac_mode == HVAC_MODE_COOL:
self._device.set_configuration(heat_mode=HeatMode.HEAT_OFF)
@property
def min_temp(self):
"""Return the minimum temperature."""
return 1
@property
def max_temp(self):
"""Return the maximum temperature."""
return 37
class DysonPureHotCoolEntity(ClimateEntity):
"""Representation of a Dyson climate hot+cool fan."""
def __init__(self, device):
"""Initialize the fan."""
self._device = device
async def async_added_to_hass(self):
"""Call when entity is added to hass."""
self._device.add_message_listener(self.on_message)
def on_message(self, message):
"""Call when new messages received from the climate device."""
if isinstance(message, DysonPureHotCoolV2State):
_LOGGER.debug(
"Message received for climate device %s : %s", self.name, message
)
self.schedule_update_ha_state()
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def supported_features(self):
"""Return the list of supported features."""
return SUPPORT_FLAGS
@property
def name(self):
"""Return the display name of this climate."""
return self._device.name
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return TEMP_CELSIUS
@property
def current_temperature(self):
"""Return the current temperature."""
if self._device.environmental_state.temperature is not None:
temperature_kelvin = self._device.environmental_state.temperature
if temperature_kelvin != 0:
return float("{:.1f}".format(temperature_kelvin - 273))
return None
@property
def target_temperature(self):
"""Return the target temperature."""
heat_target = int(self._device.state.heat_target) / 10
return int(heat_target - 273)
@property
def current_humidity(self):
"""Return the current humidity."""
if self._device.environmental_state.humidity is not None:
if self._device.environmental_state.humidity != 0:
return self._device.environmental_state.humidity
return None
@property
def hvac_mode(self):
"""Return hvac operation ie. heat, cool mode.
Need to be one of HVAC_MODE_*.
"""
if self._device.state.fan_power == FanPower.POWER_OFF.value:
return HVAC_MODE_OFF
if self._device.state.heat_mode == HeatMode.HEAT_ON.value:
return HVAC_MODE_HEAT
return HVAC_MODE_COOL
@property
def hvac_modes(self):
"""Return the list of available hvac operation modes.
Need to be a subset of HVAC_MODES.
"""
return SUPPORT_HVAC_PCOOL
@property
def hvac_action(self):
"""Return the current running hvac operation if supported.
Need to be one of CURRENT_HVAC_*.
"""
if self._device.state.fan_power == FanPower.POWER_OFF.value:
return CURRENT_HVAC_OFF
if self._device.state.heat_mode == HeatMode.HEAT_ON.value:
if self._device.state.heat_state == HeatState.HEAT_STATE_ON.value:
return CURRENT_HVAC_HEAT
return CURRENT_HVAC_IDLE
return CURRENT_HVAC_COOL
@property
def fan_mode(self):
"""Return the fan setting."""
if self._device.state.fan_state == FanState.FAN_OFF.value:
return FAN_OFF
return SPEED_MAP[self._device.state.speed]
@property
def fan_modes(self):
"""Return the list of available fan modes."""
return SUPPORT_FAN_PCOOL
def set_temperature(self, **kwargs):
"""Set new target temperature."""
target_temp = kwargs.get(ATTR_TEMPERATURE)
if target_temp is None:
_LOGGER.error("Missing target temperature %s", kwargs)
return
target_temp = int(target_temp)
_LOGGER.debug("Set %s temperature %s", self.name, target_temp)
# Limit the target temperature into acceptable range.
target_temp = min(self.max_temp, target_temp)
target_temp = max(self.min_temp, target_temp)
self._device.set_heat_target(HeatTarget.celsius(target_temp))
def set_fan_mode(self, fan_mode):
"""Set new fan mode."""
_LOGGER.debug("Set %s focus mode %s", self.name, fan_mode)
if fan_mode == FAN_OFF:
self._device.turn_off()
elif fan_mode == FAN_LOW:
self._device.set_fan_speed(FanSpeed.FAN_SPEED_4)
elif fan_mode == FAN_MEDIUM:
self._device.set_fan_speed(FanSpeed.FAN_SPEED_7)
elif fan_mode == FAN_HIGH:
self._device.set_fan_speed(FanSpeed.FAN_SPEED_10)
elif fan_mode == FAN_AUTO:
self._device.set_fan_speed(FanSpeed.FAN_SPEED_AUTO)
def set_hvac_mode(self, hvac_mode):
"""Set new target hvac mode."""
_LOGGER.debug("Set %s heat mode %s", self.name, hvac_mode)
if hvac_mode == HVAC_MODE_OFF:
self._device.turn_off()
elif self._device.state.fan_power == FanPower.POWER_OFF.value:
self._device.turn_on()
if hvac_mode == HVAC_MODE_HEAT:
self._device.enable_heat_mode()
elif hvac_mode == HVAC_MODE_COOL:
self._device.disable_heat_mode()
@property
def min_temp(self):
"""Return the minimum temperature."""
return 1
@property
def max_temp(self):
"""Return the maximum temperature."""
return 37
|
from functools import partial
import itertools as it
from . import coroutine, return_value, reactor
from .mock import FakeReactor
try:
from twisted.internet.task import Cooperator
except ImportError:
pass
else:
from twisted.internet import task as real_task
from twisted.internet.defer import gatherResults
def get_task():
if reactor.fake:
task = Cooperator(scheduler=partial(FakeReactor().callLater,
FakeReactor._DELAY))
else:
task = real_task.Cooperator()
return task
@coroutine
def coop_reduce(func, iterable, initializer=None):
task = get_task()
iterable = iter(iterable)
x = initializer or next(iterable)
result = {}
def work(func, it, x):
for y in it:
result['value'] = x = func(x, y)
yield
_task = task.cooperate(work(func, iterable, x))
yield _task.whenDone()
return_value(result['value'])
def async_reduce(async_func, iterable, initializer=None):
it = iter(iterable)
x = initializer or next(it)
@coroutine
def work(async_func, it, x):
for y in it:
x = yield async_func(x, y)
return_value(x)
return work(async_func, it, x)
@coroutine
def async_map(async_func, iterable, connections=0):
"""parallel map for deferred callables using cooperative multitasking
http://stackoverflow.com/a/20376166/408556
"""
if connections and not reactor.fake:
results = []
work = (async_func(x).addCallback(results.append) for x in iterable)
deferreds = [get_task().coiterate(work) for _ in range(connections)]
yield gatherResults(deferreds, consumeErrors=True)
else:
deferreds = map(async_func, iterable)
results = yield gatherResults(deferreds, consumeErrors=True)
return_value(results)
def async_starmap(async_func, iterable):
"""itertools.starmap for deferred callables
"""
deferreds = it.starmap(async_func, iterable)
return gatherResults(deferreds, consumeErrors=True)
def async_dispatch(split, *async_funcs, **kwargs):
return async_starmap(lambda item, f: f(item), zip(split, async_funcs))
def async_broadcast(item, *async_funcs, **kwargs):
return async_dispatch(it.repeat(item), *async_funcs, **kwargs)
|
import asyncio
import logging
from aioftp import Client, StatusCodeError
from haffmpeg.camera import CameraMjpeg
from haffmpeg.tools import IMAGE_JPEG, ImageFrame
import voluptuous as vol
from homeassistant.components.camera import PLATFORM_SCHEMA, Camera
from homeassistant.components.ffmpeg import DATA_FFMPEG
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PASSWORD,
CONF_PATH,
CONF_PORT,
CONF_USERNAME,
)
from homeassistant.exceptions import PlatformNotReady
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.aiohttp_client import async_aiohttp_proxy_stream
_LOGGER = logging.getLogger(__name__)
DEFAULT_BRAND = "YI Home Camera"
DEFAULT_PASSWORD = ""
DEFAULT_PATH = "/tmp/sd/record" # nosec
DEFAULT_PORT = 21
DEFAULT_USERNAME = "root"
DEFAULT_ARGUMENTS = "-pred 1"
CONF_FFMPEG_ARGUMENTS = "ffmpeg_arguments"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_NAME): cv.string,
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_PATH, default=DEFAULT_PATH): cv.string,
vol.Optional(CONF_USERNAME, default=DEFAULT_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_FFMPEG_ARGUMENTS, default=DEFAULT_ARGUMENTS): cv.string,
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up a Yi Camera."""
async_add_entities([YiCamera(hass, config)], True)
class YiCamera(Camera):
"""Define an implementation of a Yi Camera."""
def __init__(self, hass, config):
"""Initialize."""
super().__init__()
self._extra_arguments = config.get(CONF_FFMPEG_ARGUMENTS)
self._last_image = None
self._last_url = None
self._manager = hass.data[DATA_FFMPEG]
self._name = config[CONF_NAME]
self._is_on = True
self.host = config[CONF_HOST]
self.port = config[CONF_PORT]
self.path = config[CONF_PATH]
self.user = config[CONF_USERNAME]
self.passwd = config[CONF_PASSWORD]
@property
def brand(self):
"""Camera brand."""
return DEFAULT_BRAND
@property
def is_on(self):
"""Determine whether the camera is on."""
return self._is_on
@property
def name(self):
"""Return the name of this camera."""
return self._name
async def _get_latest_video_url(self):
"""Retrieve the latest video file from the customized Yi FTP server."""
ftp = Client()
try:
await ftp.connect(self.host)
await ftp.login(self.user, self.passwd)
except (ConnectionRefusedError, StatusCodeError) as err:
raise PlatformNotReady(err) from err
try:
await ftp.change_directory(self.path)
dirs = []
for path, attrs in await ftp.list():
if attrs["type"] == "dir" and "." not in str(path):
dirs.append(path)
latest_dir = dirs[-1]
await ftp.change_directory(latest_dir)
videos = []
for path, _ in await ftp.list():
videos.append(path)
if not videos:
_LOGGER.info('Video folder "%s" empty; delaying', latest_dir)
return None
await ftp.quit()
self._is_on = True
return (
f"ftp://{self.user}:{self.passwd}@{self.host}:"
f"{self.port}{self.path}/{latest_dir}/{videos[-1]}"
)
except (ConnectionRefusedError, StatusCodeError) as err:
_LOGGER.error("Error while fetching video: %s", err)
self._is_on = False
return None
async def async_camera_image(self):
"""Return a still image response from the camera."""
url = await self._get_latest_video_url()
if url and url != self._last_url:
ffmpeg = ImageFrame(self._manager.binary, loop=self.hass.loop)
self._last_image = await asyncio.shield(
ffmpeg.get_image(
url, output_format=IMAGE_JPEG, extra_cmd=self._extra_arguments
),
loop=self.hass.loop,
)
self._last_url = url
return self._last_image
async def handle_async_mjpeg_stream(self, request):
"""Generate an HTTP MJPEG stream from the camera."""
if not self._is_on:
return
stream = CameraMjpeg(self._manager.binary, loop=self.hass.loop)
await stream.open_camera(self._last_url, extra_cmd=self._extra_arguments)
try:
stream_reader = await stream.get_reader()
return await async_aiohttp_proxy_stream(
self.hass,
request,
stream_reader,
self._manager.ffmpeg_stream_content_type,
)
finally:
await stream.close()
|
import operator
import tree_format
from molecule import logger
from molecule import util
LOG = logger.get_logger(__name__)
class Scenarios(object):
"""
The Scenarios object consists of one to many scenario objects Molecule will
execute.
"""
def __init__(self, configs, scenario_name=None):
"""
Initialize a new scenarios class and returns None.
:param configs: A list containing Molecule config instances.
:param scenario_name: A string containing the name of the scenario.
:return: None
"""
self._configs = configs
self._scenario_name = scenario_name
self._scenarios = self.all
def next(self):
if not self._scenarios:
raise StopIteration
return self._scenarios.pop(0)
def __iter__(self):
return self
__next__ = next # Python 3.X compatibility
@property
def all(self):
"""
Return a list containing all scenario objects.
:return: list
"""
if self._scenario_name:
scenarios = self._filter_for_scenario()
self._verify()
return scenarios
scenarios = [c.scenario for c in self._configs]
scenarios.sort(key=lambda x: x.directory)
return scenarios
def print_matrix(self):
msg = 'Test matrix'
LOG.info(msg)
tree = tuple(('', [(scenario.name, [(action, [])
for action in scenario.sequence])
for scenario in self.all]))
tf = tree_format.format_tree(
tree,
format_node=operator.itemgetter(0),
get_children=operator.itemgetter(1))
LOG.out(tf)
LOG.out('')
def _verify(self):
"""
Verify the specified scenario was found and returns None.
:return: None
"""
scenario_names = [c.scenario.name for c in self._configs]
if self._scenario_name not in scenario_names:
msg = ("Scenario '{}' not found. "
'Exiting.').format(self._scenario_name)
util.sysexit_with_message(msg)
def _filter_for_scenario(self):
"""
Find the scenario matching the provided scenario name and returns a
list.
:return: list
"""
return [
c.scenario for c in self._configs
if c.scenario.name == self._scenario_name
]
def _get_matrix(self):
"""
Build a matrix of scenarios with sequence to include and returns a
dict.
{
scenario_1: {
'subcommand': [
'action-1',
'action-2',
],
},
scenario_2: {
'subcommand': [
'action-1',
],
},
}
:returns: dict
"""
return dict({
scenario.name: {
'check': scenario.check_sequence,
'cleanup': scenario.cleanup_sequence,
'converge': scenario.converge_sequence,
'create': scenario.create_sequence,
'dependency': scenario.dependency_sequence,
'destroy': scenario.destroy_sequence,
'idempotence': scenario.idempotence_sequence,
'lint': scenario.lint_sequence,
'prepare': scenario.prepare_sequence,
'side_effect': scenario.side_effect_sequence,
'syntax': scenario.syntax_sequence,
'test': scenario.test_sequence,
'verify': scenario.verify_sequence,
}
for scenario in self.all
})
|
from pymfy.api.devices.camera_protect import CameraProtect
from pymfy.api.devices.category import Category
from homeassistant.components.switch import SwitchEntity
from . import SomfyEntity
from .const import API, COORDINATOR, DOMAIN
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Somfy switch platform."""
def get_shutters():
"""Retrieve switches."""
domain_data = hass.data[DOMAIN]
coordinator = domain_data[COORDINATOR]
api = domain_data[API]
return [
SomfyCameraShutter(coordinator, device_id, api)
for device_id, device in coordinator.data.items()
if Category.CAMERA.value in device.categories
]
async_add_entities(await hass.async_add_executor_job(get_shutters), True)
class SomfyCameraShutter(SomfyEntity, SwitchEntity):
"""Representation of a Somfy Camera Shutter device."""
def __init__(self, coordinator, device_id, api):
"""Initialize the Somfy device."""
super().__init__(coordinator, device_id, api)
self._create_device()
def _create_device(self):
"""Update the device with the latest data."""
self.shutter = CameraProtect(self.device, self.api)
def turn_on(self, **kwargs) -> None:
"""Turn the entity on."""
self.shutter.open_shutter()
def turn_off(self, **kwargs):
"""Turn the entity off."""
self.shutter.close_shutter()
@property
def is_on(self) -> bool:
"""Return True if entity is on."""
return self.shutter.get_shutter_position() == "opened"
|
from flask_script import Manager
from lemur.constants import SUCCESS_METRIC_STATUS, FAILURE_METRIC_STATUS
from lemur.extensions import sentry, metrics
from lemur.notifications.messaging import send_expiration_notifications
from lemur.notifications.messaging import send_authority_expiration_notifications
from lemur.notifications.messaging import send_security_expiration_summary
manager = Manager(usage="Handles notification related tasks.")
@manager.option(
"-e",
"--exclude",
dest="exclude",
action="append",
default=[],
help="Common name matching of certificates that should be excluded from notification",
)
def expirations(exclude):
"""
Runs Lemur's notification engine, that looks for expiring certificates and sends
notifications out to those that have subscribed to them.
Every certificate receives notifications by default. When expiration notifications are handled outside of Lemur
we exclude their names (or matching) from expiration notifications.
It performs simple subset matching and is case insensitive.
:return:
"""
status = FAILURE_METRIC_STATUS
try:
print("Starting to notify subscribers about expiring certificates!")
success, failed = send_expiration_notifications(exclude)
print(
f"Finished notifying subscribers about expiring certificates! Sent: {success} Failed: {failed}"
)
status = SUCCESS_METRIC_STATUS
except Exception as e:
sentry.captureException()
metrics.send(
"expiration_notification_job", "counter", 1, metric_tags={"status": status}
)
def authority_expirations():
"""
Runs Lemur's notification engine, that looks for expiring certificate authority certificates and sends
notifications out to the security team and owner.
:return:
"""
status = FAILURE_METRIC_STATUS
try:
print("Starting to notify subscribers about expiring certificate authority certificates!")
success, failed = send_authority_expiration_notifications()
print(
"Finished notifying subscribers about expiring certificate authority certificates! "
f"Sent: {success} Failed: {failed}"
)
status = SUCCESS_METRIC_STATUS
except Exception as e:
sentry.captureException()
metrics.send(
"authority_expiration_notification_job", "counter", 1, metric_tags={"status": status}
)
def security_expiration_summary(exclude):
"""
Sends a summary email with info on all expiring certs (that match the configured expiry intervals).
:return:
"""
status = FAILURE_METRIC_STATUS
try:
print("Starting to notify security team about expiring certificates!")
success = send_security_expiration_summary(exclude)
print(
f"Finished notifying security team about expiring certificates! Success: {success}"
)
if success:
status = SUCCESS_METRIC_STATUS
except Exception:
sentry.captureException()
metrics.send(
"security_expiration_notification_job", "counter", 1, metric_tags={"status": status}
)
|
from homeassistant.components.hassio.handler import HassioAPIError
from homeassistant.const import EVENT_HOMEASSISTANT_START
from homeassistant.setup import async_setup_component
from tests.async_mock import Mock, patch
async def test_hassio_discovery_startup(hass, aioclient_mock, hassio_client):
"""Test startup and discovery after event."""
aioclient_mock.get(
"http://127.0.0.1/discovery",
json={
"result": "ok",
"data": {
"discovery": [
{
"service": "mqtt",
"uuid": "test",
"addon": "mosquitto",
"config": {
"broker": "mock-broker",
"port": 1883,
"username": "mock-user",
"password": "mock-pass",
"protocol": "3.1.1",
},
}
]
},
},
)
aioclient_mock.get(
"http://127.0.0.1/addons/mosquitto/info",
json={"result": "ok", "data": {"name": "Mosquitto Test"}},
)
assert aioclient_mock.call_count == 0
with patch(
"homeassistant.components.mqtt.config_flow.FlowHandler.async_step_hassio",
return_value={"type": "abort"},
) as mock_mqtt:
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
assert aioclient_mock.call_count == 2
assert mock_mqtt.called
mock_mqtt.assert_called_with(
{
"broker": "mock-broker",
"port": 1883,
"username": "mock-user",
"password": "mock-pass",
"protocol": "3.1.1",
"addon": "Mosquitto Test",
}
)
async def test_hassio_discovery_startup_done(hass, aioclient_mock, hassio_client):
"""Test startup and discovery with hass discovery."""
aioclient_mock.post(
"http://127.0.0.1/supervisor/options",
json={"result": "ok", "data": {}},
)
aioclient_mock.get(
"http://127.0.0.1/discovery",
json={
"result": "ok",
"data": {
"discovery": [
{
"service": "mqtt",
"uuid": "test",
"addon": "mosquitto",
"config": {
"broker": "mock-broker",
"port": 1883,
"username": "mock-user",
"password": "mock-pass",
"protocol": "3.1.1",
},
}
]
},
},
)
aioclient_mock.get(
"http://127.0.0.1/addons/mosquitto/info",
json={"result": "ok", "data": {"name": "Mosquitto Test"}},
)
with patch(
"homeassistant.components.hassio.HassIO.update_hass_api",
return_value={"result": "ok"},
), patch(
"homeassistant.components.hassio.HassIO.get_info",
Mock(side_effect=HassioAPIError()),
), patch(
"homeassistant.components.mqtt.config_flow.FlowHandler.async_step_hassio",
return_value={"type": "abort"},
) as mock_mqtt:
await hass.async_start()
await async_setup_component(hass, "hassio", {})
await hass.async_block_till_done()
assert aioclient_mock.call_count == 2
assert mock_mqtt.called
mock_mqtt.assert_called_with(
{
"broker": "mock-broker",
"port": 1883,
"username": "mock-user",
"password": "mock-pass",
"protocol": "3.1.1",
"addon": "Mosquitto Test",
}
)
async def test_hassio_discovery_webhook(hass, aioclient_mock, hassio_client):
"""Test discovery webhook."""
aioclient_mock.get(
"http://127.0.0.1/discovery/testuuid",
json={
"result": "ok",
"data": {
"service": "mqtt",
"uuid": "test",
"addon": "mosquitto",
"config": {
"broker": "mock-broker",
"port": 1883,
"username": "mock-user",
"password": "mock-pass",
"protocol": "3.1.1",
},
},
},
)
aioclient_mock.get(
"http://127.0.0.1/addons/mosquitto/info",
json={"result": "ok", "data": {"name": "Mosquitto Test"}},
)
with patch(
"homeassistant.components.mqtt.config_flow.FlowHandler.async_step_hassio",
return_value={"type": "abort"},
) as mock_mqtt:
resp = await hassio_client.post(
"/api/hassio_push/discovery/testuuid",
json={"addon": "mosquitto", "service": "mqtt", "uuid": "testuuid"},
)
await hass.async_block_till_done()
assert resp.status == 200
assert aioclient_mock.call_count == 2
assert mock_mqtt.called
mock_mqtt.assert_called_with(
{
"broker": "mock-broker",
"port": 1883,
"username": "mock-user",
"password": "mock-pass",
"protocol": "3.1.1",
"addon": "Mosquitto Test",
}
)
|
from test import CollectorTestCase
from test import get_collector_config
from mock import Mock
from mock import patch
from diamond.collector import Collector
from disktemp import DiskTemperatureCollector
###############################################################################
class TestDiskTemperatureCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('DiskTemperatureCollector', {
'interval': 10,
'bin': 'true',
})
self.collector = DiskTemperatureCollector(config, None)
def test_import(self):
self.assertTrue(DiskTemperatureCollector)
@patch.object(Collector, 'publish')
def test_smart_available(self, publish_mock):
patch_listdir = patch('os.listdir', Mock(return_value=['sda']))
patch_communicate = patch(
'subprocess.Popen.communicate',
Mock(return_value=(
self.getFixture('hddtemp').getvalue(),
'')))
patch_listdir.start()
patch_communicate.start()
self.collector.collect()
patch_listdir.stop()
patch_communicate.stop()
self.assertPublished(publish_mock, 'sda.Temperature', 50)
@patch.object(Collector, 'publish')
def test_smart_unavailable(self, publish_mock):
patch_listdir = patch('os.listdir', Mock(return_value=['sda']))
patch_communicate = patch(
'subprocess.Popen.communicate',
Mock(return_value=(
self.getFixture('smart_missing').getvalue(),
'')))
patch_listdir.start()
patch_communicate.start()
self.collector.collect()
patch_listdir.stop()
patch_communicate.stop()
self.assertUnpublished(publish_mock, 'sda.Temperature', 50)
@patch.object(Collector, 'publish')
def test_filter(self, publish_mock):
self.collector.config['devices'] = 'sda'
self.collector.process_config()
patch_listdir = patch('os.listdir', Mock(return_value=['sda', 'sdb']))
patch_communicate = patch(
'subprocess.Popen.communicate',
Mock(return_value=(
self.getFixture('hddtemp').getvalue(),
'')))
patch_listdir.start()
patch_communicate.start()
self.collector.collect()
patch_listdir.stop()
patch_communicate.stop()
self.assertPublished(publish_mock, 'sda.Temperature', 50)
self.assertUnpublished(publish_mock, 'sdb.Temperature', 50)
@patch.object(Collector, 'publish')
def test_regex(self, publish_mock):
self.collector.config['devices'] = '(s)d(a)'
self.collector.process_config()
patch_listdir = patch('os.listdir', Mock(return_value=['sda']))
patch_communicate = patch(
'subprocess.Popen.communicate',
Mock(return_value=(
self.getFixture('hddtemp').getvalue(),
'')))
patch_listdir.start()
patch_communicate.start()
self.collector.collect()
patch_listdir.stop()
patch_communicate.stop()
self.assertPublished(publish_mock, 's.a.Temperature', 50)
|
import string
import types
from typing import Mapping, MutableMapping, Optional, Sequence
import attr
from PyQt5.QtCore import pyqtSignal, QObject, Qt
from PyQt5.QtGui import QKeySequence, QKeyEvent
from qutebrowser.config import config
from qutebrowser.utils import usertypes, log, utils
from qutebrowser.keyinput import keyutils
@attr.s(frozen=True)
class MatchResult:
"""The result of matching a keybinding."""
match_type: QKeySequence.SequenceMatch = attr.ib()
command: Optional[str] = attr.ib()
sequence: keyutils.KeySequence = attr.ib()
def __attrs_post_init__(self) -> None:
if self.match_type == QKeySequence.ExactMatch:
assert self.command is not None
else:
assert self.command is None
class BindingTrie:
"""Helper class for key parser. Represents a set of bindings.
Every BindingTree will either contain children or a command (for leaf
nodes). The only exception is the root BindingNode, if there are no
bindings at all.
From the outside, this class works similar to a mapping of
keyutils.KeySequence to str. Doing trie[sequence] = 'command' adds a
binding, and so does calling .update() with a mapping. Additionally, a
"matches" method can be used to do partial matching.
However, some mapping methods are not (yet) implemented:
- __getitem__ (use matches() instead)
- __len__
- __iter__
- __delitem__
Attributes:
children: A mapping from KeyInfo to children BindingTries.
command: Command associated with this trie node.
"""
__slots__ = 'children', 'command'
def __init__(self) -> None:
self.children: MutableMapping[keyutils.KeyInfo, BindingTrie] = {}
self.command: Optional[str] = None
def __setitem__(self, sequence: keyutils.KeySequence,
command: str) -> None:
node = self
for key in sequence:
if key not in node.children:
node.children[key] = BindingTrie()
node = node.children[key]
node.command = command
def __contains__(self, sequence: keyutils.KeySequence) -> bool:
return self.matches(sequence).match_type == QKeySequence.ExactMatch
def __repr__(self) -> str:
return utils.get_repr(self, children=self.children,
command=self.command)
def __str__(self) -> str:
return '\n'.join(self.string_lines(blank=True))
def string_lines(self, indent: int = 0, blank: bool = False) -> Sequence[str]:
"""Get a list of strings for a pretty-printed version of this trie."""
lines = []
if self.command is not None:
lines.append('{}=> {}'.format(' ' * indent, self.command))
for key, child in sorted(self.children.items()):
lines.append('{}{}:'.format(' ' * indent, key))
lines.extend(child.string_lines(indent=indent+1))
if blank:
lines.append('')
return lines
def update(self, mapping: Mapping) -> None:
"""Add data from the given mapping to the trie."""
for key in mapping:
self[key] = mapping[key]
def matches(self, sequence: keyutils.KeySequence) -> MatchResult:
"""Try to match a given keystring with any bound keychain.
Args:
sequence: The key sequence to match.
Return:
A MatchResult object.
"""
node = self
for key in sequence:
try:
node = node.children[key]
except KeyError:
return MatchResult(match_type=QKeySequence.NoMatch,
command=None,
sequence=sequence)
if node.command is not None:
return MatchResult(match_type=QKeySequence.ExactMatch,
command=node.command,
sequence=sequence)
elif node.children:
return MatchResult(match_type=QKeySequence.PartialMatch,
command=None,
sequence=sequence)
else: # This can only happen when there are no bindings at all.
return MatchResult(match_type=QKeySequence.NoMatch,
command=None,
sequence=sequence)
class BaseKeyParser(QObject):
"""Parser for vim-like key sequences and shortcuts.
Not intended to be instantiated directly. Subclasses have to override
execute() to do whatever they want to.
Attributes:
mode_name: The name of the mode in the config.
bindings: Bound key bindings
_mode: The usertypes.KeyMode associated with this keyparser.
_win_id: The window ID this keyparser is associated with.
_sequence: The currently entered key sequence
_do_log: Whether to log keypresses or not.
passthrough: Whether unbound keys should be passed through with this
handler.
_supports_count: Whether count is supported.
Signals:
keystring_updated: Emitted when the keystring is updated.
arg: New keystring.
request_leave: Emitted to request leaving a mode.
arg 0: Mode to leave.
arg 1: Reason for leaving.
arg 2: Ignore the request if we're not in that mode
"""
keystring_updated = pyqtSignal(str)
request_leave = pyqtSignal(usertypes.KeyMode, str, bool)
def __init__(self, *, mode: usertypes.KeyMode,
win_id: int,
parent: QObject = None,
do_log: bool = True,
passthrough: bool = False,
supports_count: bool = True) -> None:
super().__init__(parent)
self._win_id = win_id
self._sequence = keyutils.KeySequence()
self._count = ''
self._mode = mode
self._do_log = do_log
self.passthrough = passthrough
self._supports_count = supports_count
self.bindings = BindingTrie()
self._read_config()
config.instance.changed.connect(self._on_config_changed)
def __repr__(self) -> str:
return utils.get_repr(self, mode=self._mode,
win_id=self._win_id,
do_log=self._do_log,
passthrough=self.passthrough,
supports_count=self._supports_count)
def _debug_log(self, message: str) -> None:
"""Log a message to the debug log if logging is active.
Args:
message: The message to log.
"""
if self._do_log:
prefix = '{} for mode {}: '.format(self.__class__.__name__,
self._mode.name)
log.keyboard.debug(prefix + message)
def _match_key(self, sequence: keyutils.KeySequence) -> MatchResult:
"""Try to match a given keystring with any bound keychain.
Args:
sequence: The command string to find.
Return:
A tuple (matchtype, binding).
matchtype: Match.definitive, Match.partial or Match.none.
binding: - None with Match.partial/Match.none.
- The found binding with Match.definitive.
"""
assert sequence
return self.bindings.matches(sequence)
def _match_without_modifiers(
self, sequence: keyutils.KeySequence) -> MatchResult:
"""Try to match a key with optional modifiers stripped."""
self._debug_log("Trying match without modifiers")
sequence = sequence.strip_modifiers()
return self._match_key(sequence)
def _match_key_mapping(
self, sequence: keyutils.KeySequence) -> MatchResult:
"""Try to match a key in bindings.key_mappings."""
self._debug_log("Trying match with key_mappings")
mapped = sequence.with_mappings(
types.MappingProxyType(config.cache['bindings.key_mappings']))
if sequence != mapped:
self._debug_log("Mapped {} -> {}".format(
sequence, mapped))
return self._match_key(mapped)
return MatchResult(match_type=QKeySequence.NoMatch,
command=None,
sequence=sequence)
def _match_count(self, sequence: keyutils.KeySequence,
dry_run: bool) -> bool:
"""Try to match a key as count."""
txt = str(sequence[-1]) # To account for sequences changed above.
if (txt in string.digits and self._supports_count and
not (not self._count and txt == '0')):
self._debug_log("Trying match as count")
assert len(txt) == 1, txt
if not dry_run:
self._count += txt
self.keystring_updated.emit(self._count + str(self._sequence))
return True
return False
def handle(self, e: QKeyEvent, *,
dry_run: bool = False) -> QKeySequence.SequenceMatch:
"""Handle a new keypress.
Separate the keypress into count/command, then check if it matches
any possible command, and either run the command, ignore it, or
display an error.
Args:
e: the KeyPressEvent from Qt.
dry_run: Don't actually execute anything, only check whether there
would be a match.
Return:
A QKeySequence match.
"""
key = Qt.Key(e.key())
txt = str(keyutils.KeyInfo.from_event(e))
self._debug_log("Got key: 0x{:x} / modifiers: 0x{:x} / text: '{}' / "
"dry_run {}".format(key, int(e.modifiers()), txt,
dry_run))
if keyutils.is_modifier_key(key):
self._debug_log("Ignoring, only modifier")
return QKeySequence.NoMatch
try:
sequence = self._sequence.append_event(e)
except keyutils.KeyParseError as ex:
self._debug_log("{} Aborting keychain.".format(ex))
self.clear_keystring()
return QKeySequence.NoMatch
result = self._match_key(sequence)
del sequence # Enforce code below to use the modified result.sequence
if result.match_type == QKeySequence.NoMatch:
result = self._match_without_modifiers(result.sequence)
if result.match_type == QKeySequence.NoMatch:
result = self._match_key_mapping(result.sequence)
if result.match_type == QKeySequence.NoMatch:
was_count = self._match_count(result.sequence, dry_run)
if was_count:
return QKeySequence.ExactMatch
if dry_run:
return result.match_type
self._sequence = result.sequence
if result.match_type == QKeySequence.ExactMatch:
assert result.command is not None
self._debug_log("Definitive match for '{}'.".format(
result.sequence))
count = int(self._count) if self._count else None
self.clear_keystring()
self.execute(result.command, count)
elif result.match_type == QKeySequence.PartialMatch:
self._debug_log("No match for '{}' (added {})".format(
result.sequence, txt))
self.keystring_updated.emit(self._count + str(result.sequence))
elif result.match_type == QKeySequence.NoMatch:
self._debug_log("Giving up with '{}', no matches".format(
result.sequence))
self.clear_keystring()
else:
raise utils.Unreachable("Invalid match value {!r}".format(
result.match_type))
return result.match_type
@config.change_filter('bindings')
def _on_config_changed(self) -> None:
self._read_config()
def _read_config(self) -> None:
"""Read the configuration."""
self.bindings = BindingTrie()
config_bindings = config.key_instance.get_bindings_for(self._mode.name)
for key, cmd in config_bindings.items():
assert cmd
self.bindings[key] = cmd
def execute(self, cmdstr: str, count: int = None) -> None:
"""Handle a completed keychain.
Args:
cmdstr: The command to execute as a string.
count: The count if given.
"""
raise NotImplementedError
def clear_keystring(self) -> None:
"""Clear the currently entered key sequence."""
if self._sequence:
self._debug_log("Clearing keystring (was: {}).".format(
self._sequence))
self._sequence = keyutils.KeySequence()
self._count = ''
self.keystring_updated.emit('')
|
import asyncio
from datetime import timedelta
from homeassistant.core import callback
from homeassistant.helpers import ratelimit
from homeassistant.util import dt as dt_util
async def test_hit(hass):
"""Test hitting the rate limit."""
refresh_called = False
@callback
def _refresh():
nonlocal refresh_called
refresh_called = True
return
rate_limiter = ratelimit.KeyedRateLimit(hass)
rate_limiter.async_triggered("key1", dt_util.utcnow())
assert (
rate_limiter.async_schedule_action(
"key1", timedelta(seconds=0.001), dt_util.utcnow(), _refresh
)
is not None
)
assert not refresh_called
assert rate_limiter.async_has_timer("key1")
await asyncio.sleep(0.002)
assert refresh_called
assert (
rate_limiter.async_schedule_action(
"key2", timedelta(seconds=0.001), dt_util.utcnow(), _refresh
)
is None
)
rate_limiter.async_remove()
async def test_miss(hass):
"""Test missing the rate limit."""
refresh_called = False
@callback
def _refresh():
nonlocal refresh_called
refresh_called = True
return
rate_limiter = ratelimit.KeyedRateLimit(hass)
assert (
rate_limiter.async_schedule_action(
"key1", timedelta(seconds=0.1), dt_util.utcnow(), _refresh
)
is None
)
assert not refresh_called
assert not rate_limiter.async_has_timer("key1")
assert (
rate_limiter.async_schedule_action(
"key1", timedelta(seconds=0.1), dt_util.utcnow(), _refresh
)
is None
)
assert not refresh_called
assert not rate_limiter.async_has_timer("key1")
rate_limiter.async_remove()
async def test_no_limit(hass):
"""Test async_schedule_action always return None when there is no rate limit."""
refresh_called = False
@callback
def _refresh():
nonlocal refresh_called
refresh_called = True
return
rate_limiter = ratelimit.KeyedRateLimit(hass)
rate_limiter.async_triggered("key1", dt_util.utcnow())
assert (
rate_limiter.async_schedule_action("key1", None, dt_util.utcnow(), _refresh)
is None
)
assert not refresh_called
assert not rate_limiter.async_has_timer("key1")
rate_limiter.async_triggered("key1", dt_util.utcnow())
assert (
rate_limiter.async_schedule_action("key1", None, dt_util.utcnow(), _refresh)
is None
)
assert not refresh_called
assert not rate_limiter.async_has_timer("key1")
rate_limiter.async_remove()
|
import ftplib
import tempfile
import os
import stat
from stashutils.core import get_stash
from stashutils.fsi.base import BaseFSI, make_stat, calc_mode
from stashutils.fsi.errors import OperationFailure, IsDir, IsFile
from stashutils.fsi.errors import AlreadyExists
_stash = get_stash()
class FTPFSI(BaseFSI):
"""
a FSI for FTP-server.
Unfortunally, FTP was designed as a human-readable protocol.
Due to this, the protocol is not completly unified.
This means, that this FSI may not work on all FTP-servers.
"""
def __init__(self, logger=None):
self.logger = logger
self.path = "/"
self.ftp = None
self.host = None
def abspath(self, name):
"""returns the absolute path of name"""
return os.path.join(self.path, name)
def connect(self, *args):
if self.ftp is not None:
return "Interface already connected"
if len(args) < 1 or len(args) > 5:
return "Invalid argument count"
user, pswd = None, None
debug = 0
# TODO: make the following code less ugly
if len(args) == 1:
host = args[0]
port = 21
secure = False
elif len(args) == 2:
host, port = args
secure = False
elif len(args) == 5 or len(args) == 4:
user = args[2]
pswd = args[3]
secure = False
host = args[0]
port = args[1]
if len(args) not in (3, 5):
# this prevents the elifs from beeing execeuted
pass
elif args[-1] == "-s":
host, port = args[:2]
secure = True
elif args[-1] == "-n":
host, port = args[:2]
secure = False
elif args[-1] == "-d":
host, port = args[:2]
secure = True
debug = 2
else:
return "Unknown argument(s)"
self.host = host
self.port = port
self.user = user
try:
port = int(port)
except:
return "Invalid port-argument"
if secure:
self.ftp = ftplib.FTP_TLS()
else:
self.ftp = ftplib.FTP()
self.ftp.set_debuglevel(debug)
try:
self.ftp.connect(host, port)
except Exception as e:
self.close()
if isinstance(e, EOFError):
return "EOF"
return e.message
else:
if secure:
self.log(_stash.text_color("Done", "green"))
self.log(".\nSecuring Connection... ")
try:
self.ftp.prot_p()
except Exception as e:
self.close()
return e.message
self.log(_stash.text_color("Done", "green"))
self.log(".\nLogging in... ")
try:
self.ftp.login(user, pswd)
except Exception as e:
self.close()
return e.message
else:
self.path = self.ftp.pwd()
return True
def close(self):
if self.ftp is not None:
try:
self.ftp.quit()
except:
try:
self.ftp.close()
except:
pass
def repr(self):
raw = "FTP-Session for {u} on {h}:{p}"
fo = raw.format(u=self.user, h=self.host, p=self.port)
return fo
def cd(self, name):
ap = self.abspath(name)
try:
self.ftp.cwd(ap)
except Exception as e:
raise OperationFailure(str(e))
else:
self.path = ap
def mkdir(self, name):
ap = self.abspath(name)
try:
self.ftp.mkd(ap)
except Exception as e:
# test wether the dir exists
self.get_path()
try:
self.cd(ap)
except Exception:
raise e
else:
raise AlreadyExists("Already exists!")
raise OperationFailure(str(e))
def listdir(self, path="."):
ap = self.abspath(path)
try:
content = self.ftp.nlst(ap)
ret = [e.split("/")[-1] for e in content]
return ret
except Exception as e:
raise OperationFailure(str(e))
def remove(self, name):
ap = self.abspath(name)
# we dont know wether target is a server or a file, so try both
try:
self.ftp.delete(ap)
except Exception as e:
try:
self.ftp.rmd(ap)
except Exception as e2:
text = _stash.text_color("Error trying to delete file: {e}!\n".format(e=e.message), "red")
self.log(text)
text = _stash.text_color("Error trying to delete dir (after file-deletion failed)!\n", "red")
self.log(text)
raise OperationFailure(e2.message)
def open(self, name, mode="rb", buffering=0):
mode = mode.replace("+", "").replace("U", "")
ap = self.abspath(name)
self.log("Opening '{p}' with mode '{m}'...\n".format(p=ap, m=mode))
if mode in ("r", "rb"):
try:
tf = tempfile.TemporaryFile()
self.ftp.retrbinary("RETR " + ap, tf.write, 4096)
tf.seek(0)
except Exception as e:
self.log('Error during open("{p}","r"): {e}\n'.format(p=ap, e=e.message))
raise OperationFailure(e.message)
return tf
elif "w" in mode:
return FTP_Upload(self.ftp, ap, mode, ap)
else:
raise OperationFailure("Mode not supported!")
def get_path(self):
return self.ftp.pwd()
def isdir(self, name):
ap = self.abspath(name)
op = self.get_path()
try:
self.ftp.cwd(ap)
return True
except:
return False
finally:
self.ftp.cwd(op)
def _get_total_size_and_type(self, path):
"""
returns the file/dir size and the type. Copied from:
http://stackoverflow.com/questions/22090001/get-folder-size-using-ftplib
This is a modified version.
"""
size = 0
op = self.ftp.pwd()
try:
self.ftp.cwd(path)
self.log("stat: cwd worked (->IsDir)\n")
except:
# TODO: raise Exception if file does not exists
self.log("stat: cwd failed (->IsFile or NotFound)\n")
try:
size = self.ftp.size(path)
except:
size = None
if size is None:
self.log("stat: size failed (->NotFound)\n")
raise OperationFailure("NotFound!")
self.log("stat: size worked (->IsFile)\n")
return (size, stat.S_IFREG)
finally:
self.ftp.cwd(op)
return (1, stat.S_IFDIR)
def stat(self, name):
ap = self.abspath(name)
self.log("stat: {p}\n".format(p=ap))
op = self.path
try:
size, type = self._get_total_size_and_type(ap)
except Exception as e:
self.log("Error during stat: {e}\n".format(e=e.message))
raise OperationFailure(e.message)
finally:
self.ftp.cwd(op)
# todo: check permissions
m = calc_mode(type=type)
return make_stat(size=size, mode=m)
class FTP_Upload(object):
"""utility class used for FTP-uploads.
this class creates a tempfile, which is uploaded to the server when closed."""
def __init__(self, ftp, path, mode, name):
self.ftp = ftp
self.path = path
self.mode = mode
self.closed = False
self.name = name
self.tf = tempfile.TemporaryFile()
def write(self, data):
self.tf.write(data)
def flush(self):
pass
def tell(self):
return self.tf.tell()
def seek(self, offset, whence=os.SEEK_SET):
self.tf.seek(offset, whence)
def close(self):
if self.closed:
return
self.closed = True
self.tf.seek(0)
try:
self.ftp.storbinary("STOR " + self.path, self.tf, 4096)
except Exception as e:
raise OperationFailure(e.message)
finally:
self.tf.close()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, exc_tb):
self.close()
def __del__(self):
self.close()
|
import logging
import posixpath
from absl import flags
from perfkitbenchmarker import configs
from perfkitbenchmarker import data
from perfkitbenchmarker import errors
from perfkitbenchmarker import linux_virtual_machine
from perfkitbenchmarker import os_types
from perfkitbenchmarker import sample
from perfkitbenchmarker import vm_util
from perfkitbenchmarker import windows_virtual_machine
from perfkitbenchmarker.providers.aws import util as aws_util
from perfkitbenchmarker.providers.azure import azure_virtual_machine
from perfkitbenchmarker.providers.gcp import util as gcp_util
BENCHMARK_NAME = 'large_scale_boot'
BENCHMARK_CONFIG = """
large_scale_boot:
description: >
Create a cluster of launcher servers,
where each launcher server launches FLAGS.boots_per_launcher machines.
vm_groups:
servers:
vm_spec:
GCP:
machine_type: n1-standard-2
zone: us-central1-a
boot_disk_type: pd-ssd
AWS:
machine_type: m5.large
zone: us-east-1
Azure:
machine_type: Standard_D2_v3
zone: eastus
boot_disk_type: StandardSSD_LRS
vm_count: 1
os_type: debian9
clients:
vm_spec:
GCP:
machine_type: n1-standard-2
boot_disk_type: pd-ssd
AWS:
machine_type: m5.large
Azure:
machine_type: Standard_D2_v3
boot_disk_type: StandardSSD_LRS
os_type: debian9
vm_count: 1
"""
FLAGS = flags.FLAGS
flags.DEFINE_integer('boots_per_launcher', 1, 'Number of VMs to boot per '
'launcher server VM. Defaults to 1.')
flags.register_validator('boots_per_launcher',
lambda value: 1 <= value <= 1000,
message='The number of VMs booted by each launcher '
'should be between 1 and 1000.')
flags.DEFINE_string('boot_os_type', 'debian9', 'OS to boot on the VMs. '
'Defaults to debian9. OS on launcher server VM is set '
'using os_type flag.')
flags.DEFINE_string('boot_machine_type', 'n1-standard-2', 'Machine type to boot'
'on the VMs. Defaults to n1-standard-2. Set machine type '
'on launcher server VM with launcher_machine_type flag.')
flags.DEFINE_string('launcher_machine_type', 'n1-standard-16', 'Machine type '
'to launcher the VMs. Defaults to n1-standard-16. Set '
'machine type on boot VMs with boot_machine_type flag.')
flags.DEFINE_boolean('vms_contact_launcher', True, 'Whether launched vms '
'attempt to contact the launcher before launcher attempts '
'to connect to them. Default to True.')
flags.DEFINE_boolean('use_public_ip', False, 'Whether launcher should contact '
'boot vms using public ip instead of internal ip. Only '
'applicable for vms_contact_launcher=False mode. '
'Defaults to False.')
# Tag for undefined hostname, should be synced with listener_server.py script.
UNDEFINED_HOSTNAME = 'UNDEFINED'
# Tag for sequential hostname, should be synced with listener_server.py script.
SEQUENTIAL_IP = 'SEQUENTIAL_IP_{}_{}'
# remote tmp directory used for this benchmark.
_REMOTE_DIR = vm_util.VM_TMP_DIR
# boot script to use on the launcher server vms.
_BOOT_SCRIPT = 'boot_script.sh'
# local boot template to build boot script.
_BOOT_TEMPLATE = 'large_scale_boot/boot_script.sh.jinja2'
# Remote boot script path
_BOOT_PATH = posixpath.join(_REMOTE_DIR, _BOOT_SCRIPT)
# status command path.
_STATUS_SCRIPT = 'vm_status.sh'
# local status template to build status command.
_STATUS_TEMPLATE = 'large_scale_boot/vm_status.sh.jinja2'
# Remote status command path
_STATUS_PATH = posixpath.join(_REMOTE_DIR, _STATUS_SCRIPT)
# python listener server to run on launcher server vms.
_LISTENER_SERVER = 'large_scale_boot/listener_server.py'
# log for python listener server.
_LISTENER_SERVER_LOG = 'http.log'
# clean up script to use on the launcher server vms.
_CLEAN_UP_SCRIPT = 'clean_up.sh'
# local clean up template to build the clean up script
_CLEAN_UP_TEMPLATE = 'large_scale_boot/clean_up_script.jinja2'
# Remote clean up script path
_CLEAN_UP_SCRIPT_PATH = posixpath.join(_REMOTE_DIR, _CLEAN_UP_SCRIPT)
# port where listener server listens for incoming booted vms.
_PORT = 8000
# file to record the start time of the boots using system time in nanoseconds.
_START_TIME_FILE = 'start_time'
# start time file path
_START_TIME_FILE_PATH = posixpath.join(_REMOTE_DIR, _START_TIME_FILE)
# file to record the end time of the boots using system time in naneseconds.
_RESULTS_FILE = 'results'
# results file path
_RESULTS_FILE_PATH = posixpath.join(_REMOTE_DIR, _RESULTS_FILE)
# Seconds to wait for vms to boot.
_TIMEOUT_SECONDS = 60 * 10
# Seconds to deplay between polling for launcher server task complete.
_POLLING_DELAY = 3
# Naming pattern for GCP booted vms.
_BOOT_VM_NAME_PREFIX = 'booter-{launcher_name}'
# Naming pattern for Azure NICs
_BOOT_NIC_NAME_PREFIX = 'booter-nic-{run_uri}-'
# Number of azure private ips that are reserved
_AZURE_RESERVED_IPS = 5
# Status for VM being reachable at an ipaddress from another VM.
STATUS_PASSING = 'Pass'
# Status for VM marked as running by the cloud provider.
STATUS_RUNNING = 'Running'
# sha256sum for preprovisioned service account credentials.
# If not using service account credentials from preprovisioned data bucket,
# use --gcp_service_account_key_file flag to specify the same credentials.
BENCHMARK_DATA = {
'large-scale-boot-381ea7fa0a7d.json':
'22cd2412f38f5b6f1615ae565cd74073deff3f30829769ec66eebb5cf9672329',
}
# default linux ssh port
_SSH_PORT = linux_virtual_machine.DEFAULT_SSH_PORT
# default windows rdp port
_RDP_PORT = windows_virtual_machine.RDP_PORT
def GetAzBootVMStartIdByLauncher(launcher_name):
"""Returns the Azure boot VM id by launcher name.
We want to keep the VM id unique across all the vms in this resource group.
Since the ids are used later to calculate the private ip address. We have to
skip the first few ids that will match up to reserved reserved ips.
E.g.
Azure reserved ip: 10.0.0.0, 10.0.0.1 ... 10.0.0.4
Launcher VM pkb-{run_uri}-1 (id 5, ip 10.0.0.5): boot vm id 7, boot vm id 8
Launcher VM pkb-{run_uri}-2 (id 6, ip 10.0.0.6): boot vm id 9, boot vm id 10
Args:
launcher_name: indexed launcher name to calculate ids for the VMs it boots.
"""
launcher_index = int(launcher_name.split('-')[-1]) - 1
return (launcher_index * FLAGS.boots_per_launcher +
_AZURE_RESERVED_IPS + FLAGS.num_vms)
def _GetServerStartCommand(client_port, launcher_vm):
"""Returns the command to start the listener server."""
cloud = FLAGS.cloud
if cloud == 'GCP' and FLAGS.use_public_ip:
vms_name_pattern = UNDEFINED_HOSTNAME
elif cloud == 'GCP':
vms_name_pattern = '{name_pattern}-VM_ID.{zone}.c.{project}.internal'.format(
name_pattern=_BOOT_VM_NAME_PREFIX.format(
launcher_name=launcher_vm.name),
zone=launcher_vm.zone,
project=FLAGS.project)
elif cloud == 'AWS':
# AWS do not have a defined vm name pattern till after vm is launched.
vms_name_pattern = UNDEFINED_HOSTNAME
elif cloud == 'Azure':
if FLAGS.use_public_ip:
public_dns = 'booter-{}-VMID.{}.cloudapp.azure.com'.format(
FLAGS.run_uri,
launcher_vm.zone)
else:
public_dns = ''
# Azure assigns a sequential ip
vms_name_pattern = SEQUENTIAL_IP.format(
public_dns,
GetAzBootVMStartIdByLauncher(launcher_vm.name))
return (
'python3 {server_path} {server_name} {port} {results_path} {client_port} '
'{use_server} {vms_name_pattern} {vms_count} {use_public_ip} '
'> {server_log} 2>&1 &'
.format(
server_name=launcher_vm.name,
server_path=posixpath.join(
_REMOTE_DIR, _LISTENER_SERVER.split('/')[-1]),
port=_PORT,
results_path=_RESULTS_FILE_PATH,
client_port=client_port,
use_server=FLAGS.vms_contact_launcher,
vms_name_pattern=vms_name_pattern,
vms_count=FLAGS.boots_per_launcher,
server_log=_LISTENER_SERVER_LOG,
use_public_ip=FLAGS.use_public_ip))
def _IsLinux():
"""Returns whether the boot vms are Linux VMs."""
return FLAGS.boot_os_type in os_types.LINUX_OS_TYPES
class InsufficientBootsError(Exception):
"""Error thrown if there are insufficient boots during wait."""
def CheckPrerequisites(_):
"""Verifies that the required resources are present.
Raises:
perfkitbenchmarker.data.ResourceNotFound: On missing resource.
"""
data.ResourcePath(_BOOT_TEMPLATE)
data.ResourcePath(_LISTENER_SERVER)
data.ResourcePath(_CLEAN_UP_TEMPLATE)
if FLAGS.cloud == 'Azure' and FLAGS.vms_contact_launcher and not _IsLinux():
raise errors.Benchmarks.PrepareException(
'Booting Windows VMs on Azure with a start-up script is not supported. '
'See https://github.com/Azure/azure-powershell/issues/9600.')
if FLAGS.vms_contact_launcher and FLAGS.use_public_ip:
raise errors.Benchmarks.PrepareException(
'After VMs contact launcher server, launcher will check connectivity '
'of the VMs using the client address of the curl request. This option '
'is only applicable when launcher makes the initial contact.')
def GetConfig(user_config):
"""Load and updates the benchmark config with user flags.
Args:
user_config: user supplied configuration (flags and config file)
Returns:
loaded benchmark configuration
"""
config = configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
launcher_config = config['vm_groups']['servers']
launcher_config['vm_count'] = FLAGS.num_vms
launcher_config['vm_spec'][FLAGS.cloud]['machine_type'] = (
FLAGS.launcher_machine_type)
booter_template = config['vm_groups']['clients']
booter_template['os_type'] = FLAGS.boot_os_type
booter_template['vm_spec'][FLAGS.cloud]['machine_type'] = (
FLAGS.boot_machine_type)
if FLAGS.machine_type:
raise errors.Setup.InvalidConfigurationError(
'Do not set machine type flag as it will override both launcher and '
'booter machine types. Use launcher_machine_type and boot_machine_type'
'instead.')
if booter_template['vm_count'] != 1:
raise errors.Setup.InvalidConfigurationError(
'Booter_template is a configuration template VM. '
'Booter count should be set by number of launchers (FLAGS.num_vms) and '
'booters per launcher (FLAGS.boots_per_launcher).')
return config
def _BuildContext(launcher_vm, booter_template_vm):
"""Returns the context variables for Jinja2 template during rendering."""
context = {
'boot_machine_type': booter_template_vm.machine_type,
'cloud': FLAGS.cloud,
'contact_launcher': FLAGS.vms_contact_launcher,
'launcher_vm_name': launcher_vm.name,
'os_type': 'linux' if _IsLinux() else 'windows',
'server_ip': launcher_vm.internal_ip,
'server_port': _PORT,
'start_time_file': _START_TIME_FILE_PATH,
'timeout': _TIMEOUT_SECONDS,
'vm_count': FLAGS.boots_per_launcher,
'zone': launcher_vm.zone,
'use_public_ip': '' if FLAGS.use_public_ip else 'no-',
}
cloud = FLAGS.cloud
if cloud == 'GCP':
context.update({
'boot_disk_size': booter_template_vm.boot_disk_size,
'boot_vm_name_prefix': _BOOT_VM_NAME_PREFIX.format(
launcher_name=launcher_vm.name),
'image_family': booter_template_vm.image_family,
'image_project': booter_template_vm.image_project,
'gcloud_path': FLAGS.gcloud_path,
'project': FLAGS.project,
})
elif cloud == 'AWS':
context.update({
'group_name': booter_template_vm.placement_group.name,
'image': booter_template_vm.image,
'key_name': 'perfkit-key-{0}'.format(FLAGS.run_uri),
'region': aws_util.GetRegionFromZone(launcher_vm.zone),
'subnet_id': booter_template_vm.network.subnet.id,
})
elif cloud == 'Azure':
context.update({
'boot_vm_name_prefix': launcher_vm.name.split('-', 1)[1],
'location': launcher_vm.location,
'image': booter_template_vm.image,
'storage_sku': booter_template_vm.os_disk.disk_type,
'resource_group': launcher_vm.resource_group.name,
'nic': _BOOT_NIC_NAME_PREFIX.format(run_uri=FLAGS.run_uri),
'password': booter_template_vm.password,
'start_id': GetAzBootVMStartIdByLauncher(launcher_vm.name),
})
return context
def _Install(launcher_vm, booter_template_vm):
"""Installs benchmark scripts and packages on the launcher vm."""
launcher_vm.InstallCli()
# Render boot script on launcher server VM(s)
context = _BuildContext(launcher_vm, booter_template_vm)
launcher_vm.RenderTemplate(data.ResourcePath(_BOOT_TEMPLATE), _BOOT_PATH,
context)
launcher_vm.RenderTemplate(data.ResourcePath(_STATUS_TEMPLATE), _STATUS_PATH,
context)
# Installs and start listener server on launcher VM(s).
launcher_vm.InstallPackages('netcat')
launcher_vm.PushDataFile(_LISTENER_SERVER, _REMOTE_DIR)
client_port = _SSH_PORT if _IsLinux() else _RDP_PORT
launcher_vm.RemoteCommand('touch log')
launcher_vm.RemoteCommand(_GetServerStartCommand(client_port, launcher_vm))
# Render clean up script on launcher server VM(s).
launcher_vm.RenderTemplate(data.ResourcePath(_CLEAN_UP_TEMPLATE),
_CLEAN_UP_SCRIPT_PATH, context)
def Prepare(benchmark_spec):
"""Prepare the launcher server vm(s).
Prepare the launcher server vm(s) by:
1) Build the script that each launcher server will use to kick off boot.
2) Start a listening server to wait for booting vms.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
benchmark_spec.always_call_cleanup = True
launcher_vms = benchmark_spec.vm_groups['servers']
booter_template_vm = benchmark_spec.vm_groups['clients'][0]
# Setup account/IAM credentials/permissions on launcher servers.
if FLAGS.cloud == 'GCP':
for vm in launcher_vms:
gcp_util.AuthenticateServiceAccount(vm, benchmark=BENCHMARK_NAME)
# fail early if launched VMs exceeds more than 50 per vcpu.
# High CPU usage can negatively impact measured boot times.
if FLAGS.boots_per_launcher > (launcher_vms[0].num_cpus * 50):
raise errors.Setup.InvalidConfigurationError(
'Each launcher server VM is launching too many VMs. '
'Increase launcher server VM size or decrease boots_per_launcher. '
'For a VM with {} CPUs, launch at most {} VMs.'.format(
launcher_vms[0].num_cpus, launcher_vms[0].num_cpus * 50))
if FLAGS.cloud == 'Azure':
used_private_ips = _AZURE_RESERVED_IPS + FLAGS.num_vms
for i in range(used_private_ips, used_private_ips + _GetExpectedBoots()):
nic_name_prefix = _BOOT_NIC_NAME_PREFIX.format(run_uri=FLAGS.run_uri)
private_ip = '10.0.{octet3}.{octet4}'.format(
octet3=i // 256,
octet4=i % 256)
public_ip_name = ''
if FLAGS.use_public_ip:
public_ip = azure_virtual_machine.AzurePublicIPAddress(
launcher_vms[0].location, launcher_vms[0].availability_zone,
'{}-public-ip'.format(i), 'booter-{}-{}'.format(FLAGS.run_uri, i))
public_ip.Create()
public_ip_name = public_ip.name
nic = azure_virtual_machine.AzureNIC(
launcher_vms[0].network.subnet, nic_name_prefix + str(i),
public_ip_name, False, private_ip)
nic.Create()
vm_util.RunThreaded(
lambda vm: _Install(vm, booter_template_vm), launcher_vms)
def _GetExpectedBoots():
"""Return the number of expected boots."""
return FLAGS.num_vms * FLAGS.boots_per_launcher
def _ReportRunningStatus():
"""Returns whether benchmark will report time till 'Running' status."""
return FLAGS.boots_per_launcher == 1 and not FLAGS.vms_contact_launcher
@vm_util.Retry(poll_interval=_POLLING_DELAY, timeout=_TIMEOUT_SECONDS,
retryable_exceptions=(InsufficientBootsError))
def _WaitForResponses(launcher_vms):
"""Wait for all results or server shutdown or TIMEOUT_SECONDS."""
# if any listener server exited, stop waiting.
def _LauncherError(vm):
error, _ = vm.RemoteCommand('grep ERROR ' + _LISTENER_SERVER_LOG,
ignore_failure=True)
return error
error_str = vm_util.RunThreaded(_LauncherError, launcher_vms)
if any(error_str):
raise errors.Benchmarks.RunError(
'Some listening server errored out: %s' % error_str)
def _CountState(vm, state):
stdout, _ = vm.RemoteCommand(f'grep -c {state} {_RESULTS_FILE_PATH}',
ignore_failure=True)
try:
return int(stdout)
except ValueError:
return -1
boots = vm_util.RunThreaded(
lambda vm: _CountState(vm, STATUS_PASSING), launcher_vms)
for vm, boot_count in zip(launcher_vms, boots):
logging.info('Launcher %s reported %d/%d booted VMs',
vm.internal_ip, boot_count, FLAGS.boots_per_launcher)
total_running_count = 0
if _ReportRunningStatus():
running = vm_util.RunThreaded(
lambda vm: _CountState(vm, STATUS_RUNNING), launcher_vms)
for vm, running_count in zip(launcher_vms, running):
logging.info('Launcher %s reported %d/%d running VMs',
vm.internal_ip, running_count, FLAGS.boots_per_launcher)
total_running_count = sum(running)
reporting_vms_count = sum(boots)
if (reporting_vms_count != _GetExpectedBoots() or
(_ReportRunningStatus() and total_running_count != _GetExpectedBoots())):
raise InsufficientBootsError(
'Launcher vms reported %d total boots. Expecting %d.' %
(reporting_vms_count, _GetExpectedBoots()))
def _ParseResult(launcher_vms):
"""Parse the results on the launcher VMs and send it back.
Boot time is the boot duration of the slowest machine.
Args:
launcher_vms: Launcher server VMs.
Returns:
A list of benchmark samples.
"""
vm_count = 0
slowest_time = -1
get_starttime_cmd = 'cat {startime}'.format(startime=_START_TIME_FILE_PATH)
get_results_cmd = 'cat {results}'.format(results=_RESULTS_FILE_PATH)
samples = []
common_metadata = {
'cloud': FLAGS.cloud,
'num_launchers': FLAGS.num_vms,
'expected_boots_per_launcher': FLAGS.boots_per_launcher,
'boot_os_type': FLAGS.boot_os_type,
'boot_machine_type': FLAGS.boot_machine_type,
'launcher_machine_type': FLAGS.launcher_machine_type,
'vms_contact_launcher': FLAGS.vms_contact_launcher,
'use_public_ip': FLAGS.use_public_ip,
}
for vm in launcher_vms:
start_time_str, _ = vm.RemoteCommand(get_starttime_cmd)
start_time = int(start_time_str)
results, _ = vm.RemoteCommand(get_results_cmd)
cur_launcher_success = 0
cur_launcher_closed_incoming = 0
durations = []
time_to_running = -1
for line in results.splitlines():
state, _, duration = line.split(':')
end_time = int(duration)
if state == STATUS_PASSING:
duration_in_ns = end_time - start_time
durations.append(duration_in_ns)
slowest_time = max(slowest_time, duration_in_ns)
cur_launcher_success += 1
elif state == STATUS_RUNNING:
t = end_time - start_time
time_to_running = max(time_to_running, t)
elif state == 'Fail':
# outgoing port was open but incoming port was closed.
cur_launcher_closed_incoming += 1
vm_count += cur_launcher_success
current_metadata = {
'zone': vm.zone,
'launcher_successes': cur_launcher_success,
'launcher_boot_durations_ns': durations,
'launcher_closed_incoming': cur_launcher_closed_incoming,
}
current_metadata.update(common_metadata)
samples.append(sample.Sample('Launcher Boot Details', -1,
'', current_metadata))
samples.append(sample.Sample('Cluster Max Boot Time', slowest_time,
'nanoseconds', common_metadata))
samples.append(sample.Sample('Cluster Expected Boots', _GetExpectedBoots(),
'', common_metadata))
samples.append(sample.Sample('Cluster Success Boots', vm_count,
'', common_metadata))
samples.append(sample.Sample('Cluster Max Time to Running', time_to_running,
'nanoseconds', common_metadata))
return samples
def Run(benchmark_spec):
"""Kick off gartner boot script on launcher server vms.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
Returns:
A list of benchmark samples.
"""
launcher_vms = benchmark_spec.vm_groups['servers']
vm_util.RunThreaded(
lambda vm: vm.RemoteCommand('bash {} 2>&1 | tee log'.format(_BOOT_PATH)),
launcher_vms)
try:
_WaitForResponses(launcher_vms)
except InsufficientBootsError:
# On really large-scale boots, some failures are expected.
logging.info('Some VMs failed to boot.')
return _ParseResult(launcher_vms)
def Cleanup(benchmark_spec):
"""Clean up.
Launcher VMs and booter template VM are deleted by pkb resource management.
Boot VMs are self-destructing, but we will make a second attempt at destroying
them anyway for good hygene.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
launcher_vms = benchmark_spec.vm_groups['servers']
command = 'bash {} 2>&1 | tee clean_up_log'.format(_CLEAN_UP_SCRIPT_PATH)
vm_util.RunThreaded(
lambda vm: vm.RemoteCommand(command),
launcher_vms)
|
import logging
import threading
import time
from typing import Any
from typing import Callable
from kazoo.client import KazooClient
from kazoo.client import KazooState
from kazoo.exceptions import ConnectionClosedError
from kazoo.recipe.election import Election
from paasta_tools.deployd.common import PaastaThread
class PaastaLeaderElection(Election):
def __init__(self, client: KazooClient, *args: Any, **kwargs: Any) -> None:
self.client = client
self.control = kwargs.pop("control")
super().__init__(self.client, *args, **kwargs)
self.client.add_listener(self.connection_listener)
self.waiting_for_reconnect = False
@property
def log(self) -> logging.Logger:
name = ".".join([__name__, type(self).__name__])
return logging.getLogger(name)
def run(self, func: Callable, *args: Any, **kwargs: Any) -> None:
try:
super().run(func, *args, **kwargs)
except ConnectionClosedError:
self.log.error("Zookeeper connection closed so can't tidy up!")
return
def connection_listener(self, state: KazooState) -> None:
self.log.warning(f"Zookeeper connection transitioned to: {state}")
if state == KazooState.SUSPENDED:
self.log.warning(
"Zookeeper connection suspended, waiting to see if it recovers."
)
if not self.waiting_for_reconnect:
self.waiting_for_reconnect = True
reconnection_checker = PaastaThread(target=self.reconnection_listener)
reconnection_checker.daemon = True
reconnection_checker.start()
elif state == KazooState.LOST:
self.log.error("Leadership lost, quitting!")
self._terminate()
def reconnection_listener(self) -> None:
attempts = 0
while attempts < 5:
if self.client.state == KazooState.CONNECTED:
self.log.warning("Zookeeper connection recovered!")
self.waiting_for_reconnect = False
return
self.log.warning("Waiting for zookeeper connection to recover")
time.sleep(5)
attempts += 1
self.log.error("Connection did not recover, abdicating!")
self._terminate()
def _terminate(self) -> None:
thread_info = [
{"alive": t.is_alive(), "daemon": t.daemon, "name": t.__class__.__name__}
for t in threading.enumerate()
]
self.log.info(f"Thread info: {thread_info}")
self.control.put("ABORT")
|
import asyncio
from aiohttp import ClientError
from aiohttp.web_exceptions import HTTPForbidden
import pytest
from homeassistant.components.daikin.const import KEY_IP, KEY_MAC
from homeassistant.config_entries import (
SOURCE_DISCOVERY,
SOURCE_IMPORT,
SOURCE_USER,
SOURCE_ZEROCONF,
)
from homeassistant.const import CONF_HOST
from homeassistant.data_entry_flow import (
RESULT_TYPE_ABORT,
RESULT_TYPE_CREATE_ENTRY,
RESULT_TYPE_FORM,
)
from tests.async_mock import PropertyMock, patch
from tests.common import MockConfigEntry
MAC = "AABBCCDDEEFF"
HOST = "127.0.0.1"
@pytest.fixture
def mock_daikin():
"""Mock pydaikin."""
async def mock_daikin_factory(*args, **kwargs):
"""Mock the init function in pydaikin."""
return Appliance
with patch("homeassistant.components.daikin.config_flow.Appliance") as Appliance:
type(Appliance).mac = PropertyMock(return_value="AABBCCDDEEFF")
Appliance.factory.side_effect = mock_daikin_factory
yield Appliance
@pytest.fixture
def mock_daikin_discovery():
"""Mock pydaikin Discovery."""
with patch("homeassistant.components.daikin.config_flow.Discovery") as Discovery:
Discovery().poll.return_value = {
"127.0.01": {"mac": "AABBCCDDEEFF", "id": "test"}
}.values()
yield Discovery
async def test_user(hass, mock_daikin):
"""Test user config."""
result = await hass.config_entries.flow.async_init(
"daikin",
context={"source": SOURCE_USER},
)
assert result["type"] == RESULT_TYPE_FORM
assert result["step_id"] == "user"
result = await hass.config_entries.flow.async_init(
"daikin",
context={"source": SOURCE_USER},
data={CONF_HOST: HOST},
)
assert result["type"] == RESULT_TYPE_CREATE_ENTRY
assert result["title"] == HOST
assert result["data"][CONF_HOST] == HOST
assert result["data"][KEY_MAC] == MAC
async def test_abort_if_already_setup(hass, mock_daikin):
"""Test we abort if Daikin is already setup."""
MockConfigEntry(domain="daikin", unique_id=MAC).add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
"daikin",
context={"source": SOURCE_USER},
data={CONF_HOST: HOST, KEY_MAC: MAC},
)
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_import(hass, mock_daikin):
"""Test import step."""
result = await hass.config_entries.flow.async_init(
"daikin",
context={"source": SOURCE_IMPORT},
data={},
)
assert result["type"] == RESULT_TYPE_FORM
assert result["step_id"] == "user"
result = await hass.config_entries.flow.async_init(
"daikin",
context={"source": SOURCE_IMPORT},
data={CONF_HOST: HOST},
)
assert result["type"] == RESULT_TYPE_CREATE_ENTRY
assert result["title"] == HOST
assert result["data"][CONF_HOST] == HOST
assert result["data"][KEY_MAC] == MAC
@pytest.mark.parametrize(
"s_effect,reason",
[
(asyncio.TimeoutError, "cannot_connect"),
(HTTPForbidden, "invalid_auth"),
(ClientError, "unknown"),
(Exception, "unknown"),
],
)
async def test_device_abort(hass, mock_daikin, s_effect, reason):
"""Test device abort."""
mock_daikin.factory.side_effect = s_effect
result = await hass.config_entries.flow.async_init(
"daikin",
context={"source": SOURCE_USER},
data={CONF_HOST: HOST, KEY_MAC: MAC},
)
assert result["type"] == RESULT_TYPE_FORM
assert result["errors"] == {"base": reason}
assert result["step_id"] == "user"
@pytest.mark.parametrize(
"source, data, unique_id",
[
(SOURCE_DISCOVERY, {KEY_IP: HOST, KEY_MAC: MAC}, MAC),
(SOURCE_ZEROCONF, {CONF_HOST: HOST}, MAC),
],
)
async def test_discovery_zeroconf(
hass, mock_daikin, mock_daikin_discovery, source, data, unique_id
):
"""Test discovery/zeroconf step."""
result = await hass.config_entries.flow.async_init(
"daikin",
context={"source": source},
data=data,
)
assert result["type"] == RESULT_TYPE_FORM
assert result["step_id"] == "user"
MockConfigEntry(domain="daikin", unique_id=unique_id).add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
"daikin",
context={"source": SOURCE_USER, "unique_id": unique_id},
data={CONF_HOST: HOST},
)
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
result = await hass.config_entries.flow.async_init(
"daikin",
context={"source": source},
data=data,
)
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "already_in_progress"
|
import pytest
from homeassistant.components.NEW_DOMAIN import DOMAIN
import homeassistant.components.automation as automation
from homeassistant.const import STATE_OFF, STATE_ON
from homeassistant.helpers import device_registry
from homeassistant.setup import async_setup_component
from tests.common import (
MockConfigEntry,
assert_lists_same,
async_get_device_automations,
async_mock_service,
mock_device_registry,
mock_registry,
)
@pytest.fixture
def device_reg(hass):
"""Return an empty, loaded, registry."""
return mock_device_registry(hass)
@pytest.fixture
def entity_reg(hass):
"""Return an empty, loaded, registry."""
return mock_registry(hass)
@pytest.fixture
def calls(hass):
"""Track calls to a mock service."""
return async_mock_service(hass, "test", "automation")
async def test_get_conditions(hass, device_reg, entity_reg):
"""Test we get the expected conditions from a NEW_DOMAIN."""
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(DOMAIN, "test", "5678", device_id=device_entry.id)
expected_conditions = [
{
"condition": "device",
"domain": DOMAIN,
"type": "is_off",
"device_id": device_entry.id,
"entity_id": f"{DOMAIN}.test_5678",
},
{
"condition": "device",
"domain": DOMAIN,
"type": "is_on",
"device_id": device_entry.id,
"entity_id": f"{DOMAIN}.test_5678",
},
]
conditions = await async_get_device_automations(hass, "condition", device_entry.id)
assert_lists_same(conditions, expected_conditions)
async def test_if_state(hass, calls):
"""Test for turn_on and turn_off conditions."""
hass.states.async_set("NEW_DOMAIN.entity", STATE_ON)
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {"platform": "event", "event_type": "test_event1"},
"condition": [
{
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": "NEW_DOMAIN.entity",
"type": "is_on",
}
],
"action": {
"service": "test.automation",
"data_template": {
"some": "is_on - {{ trigger.platform }} - {{ trigger.event.event_type }}"
},
},
},
{
"trigger": {"platform": "event", "event_type": "test_event2"},
"condition": [
{
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": "NEW_DOMAIN.entity",
"type": "is_off",
}
],
"action": {
"service": "test.automation",
"data_template": {
"some": "is_off - {{ trigger.platform }} - {{ trigger.event.event_type }}"
},
},
},
]
},
)
hass.bus.async_fire("test_event1")
hass.bus.async_fire("test_event2")
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data["some"] == "is_on - event - test_event1"
hass.states.async_set("NEW_DOMAIN.entity", STATE_OFF)
hass.bus.async_fire("test_event1")
hass.bus.async_fire("test_event2")
await hass.async_block_till_done()
assert len(calls) == 2
assert calls[1].data["some"] == "is_off - event - test_event2"
|
import asyncio
from pdunehd import DuneHDPlayer
from homeassistant.const import CONF_HOST
from .const import DOMAIN
PLATFORMS = ["media_player"]
async def async_setup(hass, config):
"""Set up the Dune HD component."""
return True
async def async_setup_entry(hass, config_entry):
"""Set up a config entry."""
host = config_entry.data[CONF_HOST]
player = DuneHDPlayer(host)
hass.data.setdefault(DOMAIN, {})
hass.data[DOMAIN][config_entry.entry_id] = player
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, component)
)
return True
async def async_unload_entry(hass, config_entry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(config_entry, component)
for component in PLATFORMS
]
)
)
if unload_ok:
hass.data[DOMAIN].pop(config_entry.entry_id)
return unload_ok
|
import contextlib
import unittest
from absl import flags
import mock
from perfkitbenchmarker import errors
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.configs import benchmark_config_spec
from perfkitbenchmarker.providers.gcp import gcp_tpu
from perfkitbenchmarker.providers.gcp import util
from tests import pkb_common_test_case
from six.moves import builtins
FLAGS = flags.FLAGS
NAME = 'testname'
PROJECT = 'testproject'
ZONE = 'testzone'
class GcpTpuTestCase(pkb_common_test_case.PkbCommonTestCase):
def CreateTpuSpecDict(self):
return {
'tpu_name': 'pkb-tpu-123',
'tpu_cidr_range': '192.168.0.0/29',
'tpu_accelerator_type': 'tpu-v2',
'tpu_description': 'MyTFNode',
'tpu_network': 'default',
'tpu_tf_version': 'nightly',
'tpu_zone': 'us-central1-a',
'tpu_preemptible': True
}
def setUp(self):
super(GcpTpuTestCase, self).setUp()
FLAGS.run_uri = '123'
FLAGS.project = ''
FLAGS.tpu_cores_per_donut = 8
FLAGS.gcloud_path = 'gcloud'
mock_tpu_spec_attrs = self.CreateTpuSpecDict()
self.mock_tpu_spec = mock.Mock(
spec=benchmark_config_spec._TpuGroupSpec)
self.mock_tpu_spec.configure_mock(**mock_tpu_spec_attrs)
@contextlib.contextmanager
def _PatchCriticalObjects(self, stdout='', stderr='', return_code=0):
"""A context manager that patches a few critical objects with mocks."""
retval = (stdout, stderr, return_code)
with mock.patch(
vm_util.__name__ + '.IssueCommand',
return_value=retval) as issue_command, mock.patch(
builtins.__name__ +
'.open'), mock.patch(vm_util.__name__ +
'.NamedTemporaryFile'), mock.patch(
util.__name__ + '.GetDefaultProject',
return_value='fakeproject'):
yield issue_command
def testCreate(self):
with self._PatchCriticalObjects() as issue_command:
tpu = gcp_tpu.GcpTpu(self.mock_tpu_spec)
tpu._Create()
self.assertEqual(issue_command.call_count, 1)
command_string = ' '.join(issue_command.call_args[0][0])
self.assertTrue(
command_string.startswith(
'gcloud compute tpus create pkb-tpu-123'),
command_string)
self.assertIn('--project fakeproject', command_string)
self.assertIn('--range 192.168.0.0/29', command_string)
self.assertIn('--accelerator-type tpu-v2', command_string)
self.assertIn('--description MyTFNode', command_string)
self.assertIn('--network default', command_string)
self.assertIn('--version nightly', command_string)
self.assertIn('--zone us-central1-a', command_string)
self.assertIn('--preemptible', command_string)
def testStockout(self):
stderr = """Create request issued for: [pkb-tpu-train-9baf32202]
Waiting for operation [projects/artemis-prod/locations/us-central1-b/operations/operation-1567697651843-591d00da740fa-ed64d57f-8a2533cb] to complete failed.
ERROR: (gcloud.compute.tpus.create) {
"code": 8,
"message": "There is no more capacity in the zone \"us-central1-b\"; you can try in another zone where Cloud TPU Nodes are offered (see https://cloud.google.com/tpu/docs/regions) [EID: 0xf3bb52b78a15cd16]"
}"""
with self._PatchCriticalObjects(stderr=stderr, return_code=1):
with self.assertRaises(
errors.Benchmarks.InsufficientCapacityCloudFailure):
tpu = gcp_tpu.GcpTpu(self.mock_tpu_spec)
tpu._Create()
def testDelete(self):
with self._PatchCriticalObjects() as issue_command:
tpu = gcp_tpu.GcpTpu(self.mock_tpu_spec)
tpu._Delete()
self.assertEqual(issue_command.call_count, 1)
command_string = ' '.join(issue_command.call_args[0][0])
self.assertTrue(
command_string.startswith(
'gcloud compute tpus delete pkb-tpu-123'))
self.assertIn('--project fakeproject', command_string)
self.assertIn('--zone us-central1-a', command_string)
def testExists(self):
with self._PatchCriticalObjects() as issue_command:
tpu = gcp_tpu.GcpTpu(self.mock_tpu_spec)
tpu._Exists()
self.assertEqual(issue_command.call_count, 1)
command_string = ' '.join(issue_command.call_args[0][0])
self.assertTrue(
command_string.startswith(
'gcloud compute tpus describe pkb-tpu-123'))
self.assertIn('--project fakeproject', command_string)
self.assertIn('--zone us-central1-a', command_string)
def testGetName(self):
with self._PatchCriticalObjects():
tpu = gcp_tpu.GcpTpu(self.mock_tpu_spec)
name = tpu.GetName()
self.assertEqual(name, 'pkb-tpu-123')
def testGetNumShards(self):
with self._PatchCriticalObjects(stdout='{"networkEndpoints": [{"ipAddress":'
' "10.199.12.2", "port": 8470}]}'):
tpu = gcp_tpu.GcpTpu(self.mock_tpu_spec)
num_shards = tpu.GetNumShards()
self.assertEqual(num_shards, 8)
def testGetMasterGrpcAddress(self):
with self._PatchCriticalObjects(stdout="""{
"networkEndpoints": [{
"ipAddress": "10.199.12.2",
"port": 8470
}]
}
"""):
tpu = gcp_tpu.GcpTpu(self.mock_tpu_spec)
ip_address = tpu.GetMasterGrpcAddress()
self.assertEqual(ip_address, 'grpc://10.199.12.2:8470')
if __name__ == '__main__':
unittest.main()
|
from datetime import timedelta
from bsblan import BSBLan, BSBLanConnectionError
from homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_HOST, CONF_PORT
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.typing import ConfigType
from .const import CONF_PASSKEY, DATA_BSBLAN_CLIENT, DOMAIN
SCAN_INTERVAL = timedelta(seconds=30)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the BSB-Lan component."""
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up BSB-Lan from a config entry."""
session = async_get_clientsession(hass)
bsblan = BSBLan(
entry.data[CONF_HOST],
passkey=entry.data[CONF_PASSKEY],
port=entry.data[CONF_PORT],
session=session,
)
try:
await bsblan.info()
except BSBLanConnectionError as exception:
raise ConfigEntryNotReady from exception
hass.data.setdefault(DOMAIN, {})
hass.data[DOMAIN][entry.entry_id] = {DATA_BSBLAN_CLIENT: bsblan}
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, CLIMATE_DOMAIN)
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload BSBLan config entry."""
await hass.config_entries.async_forward_entry_unload(entry, CLIMATE_DOMAIN)
# Cleanup
del hass.data[DOMAIN][entry.entry_id]
if not hass.data[DOMAIN]:
del hass.data[DOMAIN]
return True
|
import urwid
def question():
return urwid.Pile([urwid.Edit(('I say', u"What is your name?\n"))])
def answer(name):
return urwid.Text(('I say', u"Nice to meet you, " + name + "\n"))
class ConversationListBox(urwid.ListBox):
def __init__(self):
body = urwid.SimpleFocusListWalker([question()])
super(ConversationListBox, self).__init__(body)
def keypress(self, size, key):
key = super(ConversationListBox, self).keypress(size, key)
if key != 'enter':
return key
name = self.focus[0].edit_text
if not name:
raise urwid.ExitMainLoop()
# replace or add response
self.focus.contents[1:] = [(answer(name), self.focus.options())]
pos = self.focus_position
# add a new question
self.body.insert(pos + 1, question())
self.focus_position = pos + 1
palette = [('I say', 'default,bold', 'default'),]
urwid.MainLoop(ConversationListBox(), palette).run()
|
from __future__ import absolute_import
import logging
import time
from absl import flags
import boto3
# This is the path that we SCP object_storage_interface to.
from providers import object_storage_interface
import six
FLAGS = flags.FLAGS
class S3Service(object_storage_interface.ObjectStorageServiceBase):
"""An interface to AWS S3, using the boto library."""
def __init__(self):
self.client = boto3.client('s3', region_name=FLAGS.region)
def ListObjects(self, bucket, prefix):
return self.client.list_objects_v2(Bucket=bucket, Prefix=prefix)
def DeleteObjects(self,
bucket,
objects_to_delete,
objects_deleted=None,
delay_time=0,
object_sizes=None):
start_times = []
latencies = []
sizes = []
for index, object_name in enumerate(objects_to_delete):
try:
time.sleep(delay_time)
start_time = time.time()
self.client.delete_object(Bucket=bucket, Key=object_name)
latency = time.time() - start_time
start_times.append(start_time)
latencies.append(latency)
if objects_deleted:
objects_deleted.append(object_name)
if object_sizes:
sizes.append(object_sizes[index])
except Exception as e: # pylint: disable=broad-except
logging.exception('Caught exception while deleting object %s: %s',
object_name, e)
return start_times, latencies, sizes
def BulkDeleteObjects(self, bucket, objects_to_delete, delay_time):
objects_to_delete_dict = {}
objects_to_delete_dict['Objects'] = []
for object_name in objects_to_delete:
objects_to_delete_dict['Objects'].append({'Key': object_name})
time.sleep(delay_time)
start_time = time.time()
self.client.delete_objects(Bucket=bucket, Delete=objects_to_delete_dict)
latency = time.time() - start_time
return start_time, latency
def WriteObjectFromBuffer(self, bucket, object_name, stream, size):
start_time = time.time()
stream.seek(0)
obj = six.BytesIO(stream.read(size))
self.client.put_object(Body=obj, Bucket=bucket, Key=object_name)
latency = time.time() - start_time
return start_time, latency
def ReadObject(self, bucket, object_name):
start_time = time.time()
s3_response_object = self.client.get_object(
Bucket=bucket, Key=object_name)
s3_response_object['Body'].read()
latency = time.time() - start_time
return start_time, latency
|
from functional.execution import ExecutionEngine
from functional.transformations import CACHE_T
class Lineage(object):
"""
Class for tracking the lineage of transformations, and applying them to a given sequence.
"""
def __init__(self, prior_lineage=None, engine=None):
"""
Construct an empty lineage if prior_lineage is None or if its not use it as the list of
current transformations
:param prior_lineage: Lineage object to inherit
:return: new Lineage object
"""
self.transformations = (
[] if prior_lineage is None else list(prior_lineage.transformations)
)
self.engine = (
(engine or ExecutionEngine())
if prior_lineage is None
else prior_lineage.engine
)
def __repr__(self):
"""
Returns readable representation of Lineage
:return: readable Lineage
"""
return "Lineage: " + " -> ".join(
["sequence"] + [transform.name for transform in self.transformations]
)
def __len__(self):
"""
Number of transformations in lineage
:return: number of transformations
"""
return len(self.transformations)
def __getitem__(self, item):
"""
Return specific transformation in lineage.
:param item: Transformation to retrieve
:return: Requested transformation
"""
return self.transformations[item]
def apply(self, transform):
"""
Add the transformation to the lineage
:param transform: Transformation to apply
"""
self.transformations.append(transform)
def evaluate(self, sequence):
"""
Compute the lineage on the sequence.
:param sequence: Sequence to compute
:return: Evaluated sequence
"""
last_cache_index = self.cache_scan()
transformations = self.transformations[last_cache_index:]
return self.engine.evaluate(sequence, transformations)
def cache_scan(self):
"""
Scan the lineage for the index of the most recent cache.
:return: Index of most recent cache
"""
try:
return len(self.transformations) - self.transformations[::-1].index(CACHE_T)
except ValueError:
return 0
|
from pymata_express.pymata_express_serial import serial
from homeassistant import config_entries, setup
from homeassistant.components.firmata.const import CONF_SERIAL_PORT, DOMAIN
from homeassistant.const import CONF_NAME
from homeassistant.core import HomeAssistant
from tests.async_mock import patch
async def test_import_cannot_connect_pymata(hass: HomeAssistant) -> None:
"""Test we fail with an invalid board."""
await setup.async_setup_component(hass, "persistent_notification", {})
with patch(
"homeassistant.components.firmata.board.PymataExpress.start_aio",
side_effect=RuntimeError,
):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={CONF_SERIAL_PORT: "/dev/nonExistent"},
)
assert result["type"] == "abort"
assert result["reason"] == "cannot_connect"
async def test_import_cannot_connect_serial(hass: HomeAssistant) -> None:
"""Test we fail with an invalid board."""
await setup.async_setup_component(hass, "persistent_notification", {})
with patch(
"homeassistant.components.firmata.board.PymataExpress.start_aio",
side_effect=serial.serialutil.SerialException,
):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={CONF_SERIAL_PORT: "/dev/nonExistent"},
)
assert result["type"] == "abort"
assert result["reason"] == "cannot_connect"
async def test_import_cannot_connect_serial_timeout(hass: HomeAssistant) -> None:
"""Test we fail with an invalid board."""
await setup.async_setup_component(hass, "persistent_notification", {})
with patch(
"homeassistant.components.firmata.board.PymataExpress.start_aio",
side_effect=serial.serialutil.SerialTimeoutException,
):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={CONF_SERIAL_PORT: "/dev/nonExistent"},
)
assert result["type"] == "abort"
assert result["reason"] == "cannot_connect"
async def test_import(hass: HomeAssistant) -> None:
"""Test we create an entry from config."""
await setup.async_setup_component(hass, "persistent_notification", {})
with patch(
"homeassistant.components.firmata.board.PymataExpress", autospec=True
), patch(
"homeassistant.components.firmata.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.firmata.async_setup_entry", return_value=True
) as mock_setup_entry:
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={CONF_SERIAL_PORT: "/dev/nonExistent"},
)
assert result["type"] == "create_entry"
assert result["title"] == "serial-/dev/nonExistent"
assert result["data"] == {
CONF_NAME: "serial-/dev/nonExistent",
CONF_SERIAL_PORT: "/dev/nonExistent",
}
await hass.async_block_till_done()
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
|
import os.path as op
import pytest
import numpy as np
from mne.datasets.testing import data_path
from mne.io import read_raw_nirx, BaseRaw, read_raw_fif
from mne.preprocessing.nirs import optical_density, beer_lambert_law
from mne.utils import _validate_type
from mne.datasets import testing
from mne.externals.pymatreader import read_mat
fname_nirx_15_0 = op.join(data_path(download=False),
'NIRx', 'nirscout', 'nirx_15_0_recording')
fname_nirx_15_2 = op.join(data_path(download=False),
'NIRx', 'nirscout', 'nirx_15_2_recording')
fname_nirx_15_2_short = op.join(data_path(download=False),
'NIRx', 'nirscout',
'nirx_15_2_recording_w_short')
@testing.requires_testing_data
@pytest.mark.parametrize('fname', ([fname_nirx_15_2_short, fname_nirx_15_2,
fname_nirx_15_0]))
@pytest.mark.parametrize('fmt', ('nirx', 'fif'))
def test_beer_lambert(fname, fmt, tmpdir):
"""Test converting NIRX files."""
assert fmt in ('nirx', 'fif')
raw = read_raw_nirx(fname)
if fmt == 'fif':
raw.save(tmpdir.join('test_raw.fif'))
raw = read_raw_fif(tmpdir.join('test_raw.fif'))
assert 'fnirs_cw_amplitude' in raw
assert 'fnirs_od' not in raw
raw = optical_density(raw)
_validate_type(raw, BaseRaw, 'raw')
assert 'fnirs_cw_amplitude' not in raw
assert 'fnirs_od' in raw
assert 'hbo' not in raw
raw = beer_lambert_law(raw)
_validate_type(raw, BaseRaw, 'raw')
assert 'fnirs_cw_amplitude' not in raw
assert 'fnirs_od' not in raw
assert 'hbo' in raw
assert 'hbr' in raw
@testing.requires_testing_data
def test_beer_lambert_unordered_errors():
"""NIRS data requires specific ordering and naming of channels."""
raw = read_raw_nirx(fname_nirx_15_0)
raw_od = optical_density(raw)
raw_od.pick([0, 1, 2])
with pytest.raises(ValueError, match='ordered'):
beer_lambert_law(raw_od)
# Test that an error is thrown if channel naming frequency doesn't match
# what is stored in loc[9], which should hold the light frequency too.
raw_od = optical_density(raw)
raw_od.rename_channels({'S2_D2 760': 'S2_D2 770'})
with pytest.raises(ValueError, match='frequency do not match'):
beer_lambert_law(raw_od)
# Test that an error is thrown if inconsistent frequencies used in data
raw_od.info['chs'][2]['loc'][9] = 770.0
with pytest.raises(ValueError, match='pairs with frequencies'):
beer_lambert_law(raw_od)
@testing.requires_testing_data
def test_beer_lambert_v_matlab():
"""Compare MNE results to MATLAB toolbox."""
raw = read_raw_nirx(fname_nirx_15_0)
raw = optical_density(raw)
raw = beer_lambert_law(raw, ppf=0.121)
raw._data *= 1e6 # Scale to uM for comparison to MATLAB
matlab_fname = op.join(data_path(download=False),
'NIRx', 'nirscout', 'validation',
'nirx_15_0_recording_bl.mat')
matlab_data = read_mat(matlab_fname)
for idx in range(raw.get_data().shape[0]):
mean_error = np.mean(matlab_data['data'][:, idx] -
raw._data[idx])
assert mean_error < 0.1
matlab_name = ("S" + str(int(matlab_data['sources'][idx])) +
"_D" + str(int(matlab_data['detectors'][idx])) +
" " + matlab_data['type'][idx])
assert raw.info['ch_names'][idx] == matlab_name
|
import io
import os
import shutil
from mako import exceptions, util, lexer, parsetree
from mako.lookup import TemplateLookup
from mako.template import Template
from markupsafe import Markup # It's ok, Mako requires it
from nikola.plugin_categories import TemplateSystem
from nikola.utils import makedirs, get_logger
LOGGER = get_logger('mako')
class MakoTemplates(TemplateSystem):
"""Support for Mako templates."""
name = "mako"
lookup = None
cache = {}
filters = {}
directories = []
cache_dir = None
def get_string_deps(self, text, filename=None):
"""Find dependencies for a template string."""
lex = lexer.Lexer(text=text, filename=filename, input_encoding='utf-8')
lex.parse()
deps = []
for n in lex.template.nodes:
keyword = getattr(n, 'keyword', None)
if keyword in ["inherit", "namespace"] or isinstance(n, parsetree.IncludeTag):
deps.append(n.attributes['file'])
# Some templates will include "foo.tmpl" and we need paths, so normalize them
# using the template lookup
for i, d in enumerate(deps):
dep = self.get_template_path(d)
if dep:
deps[i] = dep
else:
LOGGER.error("Cannot find template {0} referenced in {1}",
d, filename)
return deps
def get_deps(self, filename):
"""Get paths to dependencies for a template."""
text = util.read_file(filename)
return self.get_string_deps(text, filename)
def set_directories(self, directories, cache_folder):
"""Create a new template lookup with set directories."""
cache_dir = os.path.join(cache_folder, '.mako.tmp')
if os.path.exists(cache_dir):
shutil.rmtree(cache_dir)
self.directories = directories
self.cache_dir = cache_dir
self.create_lookup()
def inject_directory(self, directory):
"""Add a directory to the lookup and recreate it if it's not there yet."""
if directory not in self.directories:
self.directories.append(directory)
self.create_lookup()
def create_lookup(self):
"""Create a template lookup."""
self.lookup = TemplateLookup(
directories=self.directories,
module_directory=self.cache_dir,
input_encoding='utf-8',
output_encoding='utf-8')
def set_site(self, site):
"""Set the Nikola site."""
self.site = site
self.filters.update(self.site.config['TEMPLATE_FILTERS'])
def render_template(self, template_name, output_name, context):
"""Render the template into output_name using context."""
context['striphtml'] = striphtml
template = self.lookup.get_template(template_name)
data = template.render_unicode(**context)
if output_name is not None:
makedirs(os.path.dirname(output_name))
with io.open(output_name, 'w', encoding='utf-8') as output:
output.write(data)
return data
def render_template_to_string(self, template, context):
"""Render template to a string using context."""
context.update(self.filters)
return Template(template, lookup=self.lookup).render(**context)
def template_deps(self, template_name):
"""Generate list of dependencies for a template."""
# We can cache here because dependencies should
# not change between runs
if self.cache.get(template_name, None) is None:
template = self.lookup.get_template(template_name)
dep_filenames = self.get_deps(template.filename)
deps = [template.filename]
for fname in dep_filenames:
# yes, it uses forward slashes on Windows
deps += self.template_deps(fname.split('/')[-1])
self.cache[template_name] = list(set(deps))
return self.cache[template_name]
def get_template_path(self, template_name):
"""Get the path to a template or return None."""
try:
t = self.lookup.get_template(template_name)
return t.filename
except exceptions.TopLevelLookupException:
return None
def striphtml(text):
"""Strip HTML tags from text."""
return Markup(text).striptags()
|
from datetime import timedelta
import pytest
import homeassistant.components.automation as automation
from homeassistant.components.light import DOMAIN
from homeassistant.const import CONF_PLATFORM, STATE_OFF, STATE_ON
from homeassistant.helpers import device_registry
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from tests.async_mock import patch
from tests.common import (
MockConfigEntry,
async_get_device_automation_capabilities,
async_get_device_automations,
async_mock_service,
mock_device_registry,
mock_registry,
)
@pytest.fixture
def device_reg(hass):
"""Return an empty, loaded, registry."""
return mock_device_registry(hass)
@pytest.fixture
def entity_reg(hass):
"""Return an empty, loaded, registry."""
return mock_registry(hass)
@pytest.fixture
def calls(hass):
"""Track calls to a mock service."""
return async_mock_service(hass, "test", "automation")
async def test_get_conditions(hass, device_reg, entity_reg):
"""Test we get the expected conditions from a light."""
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(DOMAIN, "test", "5678", device_id=device_entry.id)
expected_conditions = [
{
"condition": "device",
"domain": DOMAIN,
"type": "is_off",
"device_id": device_entry.id,
"entity_id": f"{DOMAIN}.test_5678",
},
{
"condition": "device",
"domain": DOMAIN,
"type": "is_on",
"device_id": device_entry.id,
"entity_id": f"{DOMAIN}.test_5678",
},
]
conditions = await async_get_device_automations(hass, "condition", device_entry.id)
assert conditions == expected_conditions
async def test_get_condition_capabilities(hass, device_reg, entity_reg):
"""Test we get the expected capabilities from a light condition."""
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(DOMAIN, "test", "5678", device_id=device_entry.id)
expected_capabilities = {
"extra_fields": [
{"name": "for", "optional": True, "type": "positive_time_period_dict"}
]
}
conditions = await async_get_device_automations(hass, "condition", device_entry.id)
for condition in conditions:
capabilities = await async_get_device_automation_capabilities(
hass, "condition", condition
)
assert capabilities == expected_capabilities
async def test_if_state(hass, calls):
"""Test for turn_on and turn_off conditions."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
ent1, ent2, ent3 = platform.ENTITIES
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {"platform": "event", "event_type": "test_event1"},
"condition": [
{
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": ent1.entity_id,
"type": "is_on",
}
],
"action": {
"service": "test.automation",
"data_template": {
"some": "is_on {{ trigger.%s }}"
% "}} - {{ trigger.".join(("platform", "event.event_type"))
},
},
},
{
"trigger": {"platform": "event", "event_type": "test_event2"},
"condition": [
{
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": ent1.entity_id,
"type": "is_off",
}
],
"action": {
"service": "test.automation",
"data_template": {
"some": "is_off {{ trigger.%s }}"
% "}} - {{ trigger.".join(("platform", "event.event_type"))
},
},
},
]
},
)
await hass.async_block_till_done()
assert hass.states.get(ent1.entity_id).state == STATE_ON
assert len(calls) == 0
hass.bus.async_fire("test_event1")
hass.bus.async_fire("test_event2")
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data["some"] == "is_on event - test_event1"
hass.states.async_set(ent1.entity_id, STATE_OFF)
hass.bus.async_fire("test_event1")
hass.bus.async_fire("test_event2")
await hass.async_block_till_done()
assert len(calls) == 2
assert calls[1].data["some"] == "is_off event - test_event2"
async def test_if_fires_on_for_condition(hass, calls):
"""Test for firing if condition is on with delay."""
point1 = dt_util.utcnow()
point2 = point1 + timedelta(seconds=10)
point3 = point2 + timedelta(seconds=10)
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
ent1, ent2, ent3 = platform.ENTITIES
with patch("homeassistant.core.dt_util.utcnow") as mock_utcnow:
mock_utcnow.return_value = point1
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {"platform": "event", "event_type": "test_event1"},
"condition": {
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": ent1.entity_id,
"type": "is_off",
"for": {"seconds": 5},
},
"action": {
"service": "test.automation",
"data_template": {
"some": "is_off {{ trigger.%s }}"
% "}} - {{ trigger.".join(
("platform", "event.event_type")
)
},
},
}
]
},
)
await hass.async_block_till_done()
assert hass.states.get(ent1.entity_id).state == STATE_ON
assert len(calls) == 0
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 0
# Time travel 10 secs into the future
mock_utcnow.return_value = point2
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 0
hass.states.async_set(ent1.entity_id, STATE_OFF)
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 0
# Time travel 20 secs into the future
mock_utcnow.return_value = point3
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data["some"] == "is_off event - test_event1"
|
from Handler import Handler
try:
import zmq
except ImportError:
zmq = None
class zmqHandler (Handler):
"""
Implements the abstract Handler class
Sending data to a Zer0MQ pub channel
"""
def __init__(self, config=None):
"""
Create a new instance of zmqHandler class
"""
# Initialize Handler
Handler.__init__(self, config)
if not zmq:
self.log.error('zmq import failed. Handler disabled')
self.enabled = False
return
# Initialize Data
self.context = None
self.socket = None
# Initialize Options
self.port = int(self.config['port'])
# Create ZMQ pub socket and bind
self._bind()
def get_default_config_help(self):
"""
Returns the help text for the configuration options for this handler
"""
config = super(zmqHandler, self).get_default_config_help()
config.update({
'port': '',
})
return config
def get_default_config(self):
"""
Return the default config for the handler
"""
config = super(zmqHandler, self).get_default_config()
config.update({
'port': 1234,
})
return config
def _bind(self):
"""
Create PUB socket and bind
"""
if not zmq:
return
self.context = zmq.Context()
self.socket = self.context.socket(zmq.PUB)
self.socket.bind("tcp://*:%i" % self.port)
def __del__(self):
"""
Destroy instance of the zmqHandler class
"""
pass
def process(self, metric):
"""
Process a metric and send it to zmq pub socket
"""
if not zmq:
return
# Send the data as ......
self.socket.send("%s" % str(metric))
|
import os
import json
from types import ModuleType
import flexx
from flexx import ui, app
from urllib.request import urlopen, Request
from uiexample import create_ui_example
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
DOC_DIR = os.path.abspath(os.path.join(THIS_DIR, '..'))
EXAMPLES_DIR = os.path.join(os.path.dirname(DOC_DIR), 'flexxamples')
# Get list of (submodule, dirname) tuples
EXAMPLES_DIRS = []
for dname in os.listdir(EXAMPLES_DIR):
dirname = os.path.join(EXAMPLES_DIR, dname)
if os.path.isfile(os.path.join(dirname, '__init__.py')):
EXAMPLES_DIRS.append((dname, dirname))
created_files = []
# NOTE: not used anymore, but keep in case we want to automate again
def get_notebook_list():
url = 'https://api.github.com/repos/flexxui/flexx-notebooks/contents'
print('downloading %s ... ' % url, end='')
# https://github.com/travis-ci/travis-ci/issues/5649
req = Request(url, headers={'User-Agent': 'flexx/%s' % flexx.__version__})
s = json.loads(urlopen(req, timeout=5.0).read().decode())
print('done')
filenames = []
for file in s:
if file['name'].endswith('ipynb'):
filenames.append(file['name'])
return filenames
def main():
output_dir = os.path.join(DOC_DIR, 'examples')
# Collect examples
examples = {}
for sub, dirname in EXAMPLES_DIRS:
examples[sub] = {}
for fname in os.listdir(dirname):
filename = os.path.join(dirname, fname)
if os.path.isfile(filename) and fname.endswith('.py') and fname[0] != '_':
# Create example content
code = open(filename, 'rt', encoding='utf-8').read()
text = ':orphan:\n\n' # avoid toctree warning
text += '.. _%s:\n\n' % fname
text += '%s\n%s\n\n' % (fname, '=' * len(fname))
if code.startswith('# doc-export:'):
code = code.split('\n', 1)[1].lstrip()
html = create_ui_example(filename, '..', source=filename)
text += '.. raw:: html\n\n ' + html + '\n\n'
text += '.. code-block:: py\n :linenos:\n\n'
text += '\n ' + code.replace('\n', '\n ').rstrip() + '\n'
examples[sub][fname] = text
if not examples[sub]:
del examples[sub]
# Write source for all examples
for name in examples[sub]:
filename = os.path.join(output_dir, name[:-3] + '_src.rst')
created_files.append(filename)
open(filename, 'wt', encoding='utf-8').write(examples[sub][name])
# Create example page
docs = 'Examples'
docs += '\n%s\n\n' % (len(docs) * '=')
docs += "This page provides a list of examples. Some demonstate a specific application, "
docs += "while others show a tool/technique that is generically useful. In the latter case "
docs += "you can import the widget using e.g. ``from flexxamples.howtos.editor_cm import CodeEditor``."
docs += "\n\n"
for sub, _ in EXAMPLES_DIRS:
section = sub.capitalize()
docs += '\n%s\n%s\n\n' % (section, len(section) * '-')
for name in sorted(examples[sub]):
docs += '* :ref:`%s`\n' % name
filename = os.path.join(DOC_DIR, 'examples', 'index.rst')
created_files.append(filename)
open(filename, 'wt', encoding='utf-8').write(docs)
print(' generated %i examples' % sum([len(x) for x in examples.values()]))
def clean():
while created_files:
filename = created_files.pop()
if os.path.isfile(filename):
os.remove(filename)
elif os.path.isdir(filename) and not os.listdir(filename):
os.rmdir(filename)
|
import logging
import pytest
from homeassistant.components.sentry import get_channel, process_before_send
from homeassistant.components.sentry.const import (
CONF_DSN,
CONF_ENVIRONMENT,
CONF_EVENT_CUSTOM_COMPONENTS,
CONF_EVENT_HANDLED,
CONF_EVENT_THIRD_PARTY_PACKAGES,
CONF_TRACING,
CONF_TRACING_SAMPLE_RATE,
DOMAIN,
)
from homeassistant.const import __version__ as current_version
from homeassistant.core import HomeAssistant
from tests.async_mock import MagicMock, Mock, patch
from tests.common import MockConfigEntry
async def test_setup_entry(hass: HomeAssistant) -> None:
"""Test integration setup from entry."""
entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_DSN: "http://[email protected]/1", CONF_ENVIRONMENT: "production"},
)
entry.add_to_hass(hass)
with patch(
"homeassistant.components.sentry.AioHttpIntegration"
) as sentry_aiohttp_mock, patch(
"homeassistant.components.sentry.SqlalchemyIntegration"
) as sentry_sqlalchemy_mock, patch(
"homeassistant.components.sentry.LoggingIntegration"
) as sentry_logging_mock, patch(
"homeassistant.components.sentry.sentry_sdk"
) as sentry_mock:
assert await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
# Test CONF_ENVIRONMENT is migrated to entry options
assert CONF_ENVIRONMENT not in entry.data
assert CONF_ENVIRONMENT in entry.options
assert entry.options[CONF_ENVIRONMENT] == "production"
assert sentry_logging_mock.call_count == 1
assert sentry_logging_mock.called_once_with(
level=logging.WARNING, event_level=logging.WARNING
)
assert sentry_aiohttp_mock.call_count == 1
assert sentry_sqlalchemy_mock.call_count == 1
assert sentry_mock.init.call_count == 1
call_args = sentry_mock.init.call_args[1]
assert set(call_args) == {
"dsn",
"environment",
"integrations",
"release",
"before_send",
}
assert call_args["dsn"] == "http://[email protected]/1"
assert call_args["environment"] == "production"
assert call_args["integrations"] == [
sentry_logging_mock.return_value,
sentry_aiohttp_mock.return_value,
sentry_sqlalchemy_mock.return_value,
]
assert call_args["release"] == current_version
assert call_args["before_send"]
async def test_setup_entry_with_tracing(hass: HomeAssistant) -> None:
"""Test integration setup from entry with tracing enabled."""
entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_DSN: "http://[email protected]/1"},
options={CONF_TRACING: True, CONF_TRACING_SAMPLE_RATE: 0.5},
)
entry.add_to_hass(hass)
with patch("homeassistant.components.sentry.AioHttpIntegration"), patch(
"homeassistant.components.sentry.SqlalchemyIntegration"
), patch("homeassistant.components.sentry.LoggingIntegration"), patch(
"homeassistant.components.sentry.sentry_sdk"
) as sentry_mock:
assert await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
call_args = sentry_mock.init.call_args[1]
assert set(call_args) == {
"dsn",
"environment",
"integrations",
"release",
"before_send",
"traces_sample_rate",
}
assert call_args["traces_sample_rate"] == 0.5
@pytest.mark.parametrize(
"version,channel",
[
("0.115.0.dev20200815", "nightly"),
("0.115.0", "stable"),
("0.115.0b4", "beta"),
("0.115.0dev0", "dev"),
],
)
async def test_get_channel(version, channel) -> None:
"""Test if channel detection works from Home Assistant version number."""
assert get_channel(version) == channel
async def test_process_before_send(hass: HomeAssistant):
"""Test regular use of the Sentry process before sending function."""
hass.config.components.add("puppies")
hass.config.components.add("a_integration")
# These should not show up in the result.
hass.config.components.add("puppies.light")
hass.config.components.add("auth")
result = process_before_send(
hass,
options={},
channel="test",
huuid="12345",
system_info={"installation_type": "pytest"},
custom_components=["ironing_robot", "fridge_opener"],
event={},
hint={},
)
assert result
assert result["tags"]
assert result["contexts"]
assert result["contexts"]
ha_context = result["contexts"]["Home Assistant"]
assert ha_context["channel"] == "test"
assert ha_context["custom_components"] == "fridge_opener\nironing_robot"
assert ha_context["integrations"] == "a_integration\npuppies"
tags = result["tags"]
assert tags["channel"] == "test"
assert tags["uuid"] == "12345"
assert tags["installation_type"] == "pytest"
user = result["user"]
assert user["id"] == "12345"
async def test_event_with_platform_context(hass: HomeAssistant):
"""Test extraction of platform context information during Sentry events."""
current_platform_mock = Mock()
current_platform_mock.get().platform_name = "hue"
current_platform_mock.get().domain = "light"
with patch(
"homeassistant.components.sentry.entity_platform.current_platform",
new=current_platform_mock,
):
result = process_before_send(
hass,
options={},
channel="test",
huuid="12345",
system_info={"installation_type": "pytest"},
custom_components=["ironing_robot"],
event={},
hint={},
)
assert result
assert result["tags"]["integration"] == "hue"
assert result["tags"]["platform"] == "light"
assert result["tags"]["custom_component"] == "no"
current_platform_mock.get().platform_name = "ironing_robot"
current_platform_mock.get().domain = "switch"
with patch(
"homeassistant.components.sentry.entity_platform.current_platform",
new=current_platform_mock,
):
result = process_before_send(
hass,
options={CONF_EVENT_CUSTOM_COMPONENTS: True},
channel="test",
huuid="12345",
system_info={"installation_type": "pytest"},
custom_components=["ironing_robot"],
event={},
hint={},
)
assert result
assert result["tags"]["integration"] == "ironing_robot"
assert result["tags"]["platform"] == "switch"
assert result["tags"]["custom_component"] == "yes"
@pytest.mark.parametrize(
"logger,tags",
[
("adguard", {"package": "adguard"}),
(
"homeassistant.components.hue.coordinator",
{"integration": "hue", "custom_component": "no"},
),
(
"homeassistant.components.hue.light",
{"integration": "hue", "platform": "light", "custom_component": "no"},
),
(
"homeassistant.components.ironing_robot.switch",
{
"integration": "ironing_robot",
"platform": "switch",
"custom_component": "yes",
},
),
(
"homeassistant.components.ironing_robot",
{"integration": "ironing_robot", "custom_component": "yes"},
),
("homeassistant.helpers.network", {"helpers": "network"}),
("tuyapi.test", {"package": "tuyapi"}),
],
)
async def test_logger_event_extraction(hass: HomeAssistant, logger, tags):
"""Test extraction of information from Sentry logger events."""
result = process_before_send(
hass,
options={
CONF_EVENT_CUSTOM_COMPONENTS: True,
CONF_EVENT_THIRD_PARTY_PACKAGES: True,
},
channel="test",
huuid="12345",
system_info={"installation_type": "pytest"},
custom_components=["ironing_robot"],
event={"logger": logger},
hint={},
)
assert result
assert result["tags"] == {
"channel": "test",
"uuid": "12345",
"installation_type": "pytest",
**tags,
}
@pytest.mark.parametrize(
"logger,options,event",
[
("adguard", {CONF_EVENT_THIRD_PARTY_PACKAGES: True}, True),
("adguard", {CONF_EVENT_THIRD_PARTY_PACKAGES: False}, False),
(
"homeassistant.components.ironing_robot.switch",
{CONF_EVENT_CUSTOM_COMPONENTS: True},
True,
),
(
"homeassistant.components.ironing_robot.switch",
{CONF_EVENT_CUSTOM_COMPONENTS: False},
False,
),
],
)
async def test_filter_log_events(hass: HomeAssistant, logger, options, event):
"""Test filtering of events based on configuration options."""
result = process_before_send(
hass,
options=options,
channel="test",
huuid="12345",
system_info={"installation_type": "pytest"},
custom_components=["ironing_robot"],
event={"logger": logger},
hint={},
)
if event:
assert result
else:
assert result is None
@pytest.mark.parametrize(
"handled,options,event",
[
("yes", {CONF_EVENT_HANDLED: True}, True),
("yes", {CONF_EVENT_HANDLED: False}, False),
("no", {CONF_EVENT_HANDLED: False}, True),
("no", {CONF_EVENT_HANDLED: True}, True),
],
)
async def test_filter_handled_events(hass: HomeAssistant, handled, options, event):
"""Tests filtering of handled events based on configuration options."""
event_mock = MagicMock()
event_mock.__iter__ = ["tags"]
event_mock.__contains__ = lambda _, val: val == "tags"
event_mock.tags = {"handled": handled}
result = process_before_send(
hass,
options=options,
channel="test",
huuid="12345",
system_info={"installation_type": "pytest"},
custom_components=[],
event=event_mock,
hint={},
)
if event:
assert result
else:
assert result is None
|
import unittest, sys
from lxml.tests.common_imports import make_doctest, doctest
import lxml.html
from lxml.html import html_parser, XHTML_NAMESPACE
class FrameTest(unittest.TestCase):
def test_parse_fragments_fromstring(self):
parser = lxml.html.HTMLParser(encoding='utf-8', remove_comments=True)
html = """<frameset>
<frame src="main.php" name="srcpg" id="srcpg" frameborder="0" rolling="Auto" marginwidth="" marginheight="0">
</frameset>"""
etree_document = lxml.html.fragments_fromstring(html, parser=parser)
self.assertEqual(len(etree_document), 1)
root = etree_document[0]
self.assertEqual(root.tag, "frameset")
frame_element = root[0]
self.assertEqual(frame_element.tag, 'frame')
def test_parse_fromstring(self):
parser = lxml.html.HTMLParser(encoding='utf-8', remove_comments=True)
html = """<html><frameset>
<frame src="main.php" name="srcpg" id="srcpg" frameborder="0" rolling="Auto" marginwidth="" marginheight="0">
</frameset></html>"""
etree_document = lxml.html.fromstring(html, parser=parser)
self.assertEqual(etree_document.tag, 'html')
self.assertEqual(len(etree_document), 1)
frameset_element = etree_document[0]
self.assertEqual(len(frameset_element), 1)
frame_element = frameset_element[0]
self.assertEqual(frame_element.tag, 'frame')
def test_suite():
loader = unittest.TestLoader()
return loader.loadTestsFromModule(sys.modules[__name__])
|
import pkgutil
import numpy
import copyreg
import types
import dedupe.variables
import dedupe.variables.base as base
from dedupe.variables.base import MissingDataType
from dedupe.variables.interaction import InteractionType
for _, module, _ in pkgutil.iter_modules(dedupe.variables.__path__,
'dedupe.variables.'):
__import__(module)
FIELD_CLASSES = {k: v for k, v in base.allSubclasses(base.FieldType) if k}
class DataModel(object):
def __init__(self, fields):
if not fields:
raise ValueError('The field definitions cannot be empty')
primary_fields, variables = typifyFields(fields)
self.primary_fields = primary_fields
self._derived_start = len(variables)
variables += interactions(fields, primary_fields)
variables += missing(variables)
self._missing_field_indices = missing_field_indices(variables)
self._interaction_indices = interaction_indices(variables)
self._variables = variables
def __len__(self):
return len(self._variables)
# Changing this from a property to just a normal attribute causes
# pickling problems, because we are removing static methods from
# their class context. This could be fixed by defining comparators
# outside of classes in fieldclasses
@property
def _field_comparators(self):
start = 0
stop = 0
comparators = []
for field in self.primary_fields:
stop = start + len(field)
comparators.append((field.field, field.comparator, start, stop))
start = stop
return comparators
def predicates(self, index_predicates=True, canopies=True):
predicates = set()
for definition in self.primary_fields:
for predicate in definition.predicates:
if hasattr(predicate, 'index'):
if index_predicates:
if hasattr(predicate, 'canopy'):
if canopies:
predicates.add(predicate)
else:
if not canopies:
predicates.add(predicate)
else:
predicates.add(predicate)
return predicates
def distances(self, record_pairs):
num_records = len(record_pairs)
distances = numpy.empty((num_records, len(self)), 'f4')
field_comparators = self._field_comparators
for i, (record_1, record_2) in enumerate(record_pairs):
for field, compare, start, stop in field_comparators:
if record_1[field] is not None and record_2[field] is not None:
distances[i, start:stop] = compare(record_1[field],
record_2[field])
elif hasattr(compare, 'missing'):
distances[i, start:stop] = compare(record_1[field],
record_2[field])
else:
distances[i, start:stop] = numpy.nan
distances = self._derivedDistances(distances)
return distances
def _derivedDistances(self, primary_distances):
distances = primary_distances
current_column = self._derived_start
for interaction in self._interaction_indices:
distances[:, current_column] =\
numpy.prod(distances[:, interaction], axis=1)
current_column += 1
missing_data = numpy.isnan(distances[:, :current_column])
distances[:, :current_column][missing_data] = 0
if self._missing_field_indices:
distances[:, current_column:] =\
1 - missing_data[:, self._missing_field_indices]
return distances
def check(self, record):
for field_comparator in self._field_comparators:
field = field_comparator[0]
if field not in record:
raise ValueError("Records do not line up with data model. "
"The field '%s' is in data_model but not "
"in a record" % field)
def typifyFields(fields):
primary_fields = []
data_model = []
only_custom = True
for definition in fields:
try:
field_type = definition['type']
except TypeError:
raise TypeError("Incorrect field specification: field "
"specifications are dictionaries that must "
"include a type definition, ex. "
"{'field' : 'Phone', type: 'String'}")
except KeyError:
raise KeyError("Missing field type: fields "
"specifications are dictionaries that must "
"include a type definition, ex. "
"{'field' : 'Phone', type: 'String'}")
if field_type != 'Custom':
only_custom = False
if field_type == 'Interaction':
continue
if field_type == 'FuzzyCategorical' and 'other fields' not in definition:
definition['other fields'] = [d['field'] for d in fields
if ('field' in d and
d['field'] != definition['field'])]
try:
field_class = FIELD_CLASSES[field_type]
except KeyError:
raise KeyError("Field type %s not valid. Valid types include %s"
% (definition['type'], ', '.join(FIELD_CLASSES)))
field_object = field_class(definition)
primary_fields.append(field_object)
if hasattr(field_object, 'higher_vars'):
data_model.extend(field_object.higher_vars)
else:
data_model.append(field_object)
if only_custom:
raise ValueError("At least one of the field types needs to be a type"
"other than 'Custom'. 'Custom' types have no associated"
"blocking rules")
return primary_fields, data_model
def missing(data_model):
missing_variables = []
for definition in data_model[:]:
if definition.has_missing:
missing_variables.append(MissingDataType(definition.name))
return missing_variables
def interactions(definitions, primary_fields):
field_d = {field.name: field for field in primary_fields}
interaction_class = InteractionType
interactions = []
for definition in definitions:
if definition['type'] == 'Interaction':
field = interaction_class(definition)
field.expandInteractions(field_d)
interactions.extend(field.higher_vars)
return interactions
def missing_field_indices(variables):
return [i for i, definition
in enumerate(variables)
if definition.has_missing]
def interaction_indices(variables):
indices = []
field_names = [field.name for field in variables]
for definition in variables:
if hasattr(definition, 'interaction_fields'):
interaction_indices = []
for interaction_field in definition.interaction_fields:
interaction_indices.append(
field_names.index(interaction_field))
indices.append(interaction_indices)
return indices
def reduce_method(m):
return (getattr, (m.__self__, m.__func__.__name__))
copyreg.pickle(types.MethodType, reduce_method)
|
import numpy as np
import opt_einsum
import pytest
from tensornetwork import Node
from tensornetwork.contractors.opt_einsum_paths import utils
def check_path(calculated_path, correct_path):
if not isinstance(calculated_path, list):
return False
if len(calculated_path) != len(correct_path):
return False
ret = True
for calc, correct in zip(calculated_path, correct_path):
ret &= isinstance(calc, tuple)
ret &= calc == correct
return ret
# We do not use the backend fixture as this file tests only contraction paths
# that `opt_einsum` returns and not the actual contractions performed by
# `TensorNetwork`.
def gemm_network():
"""Creates 'GEMM1' contraction from `opt_einsum` tests."""
x = Node(np.ones([1, 2, 4]))
y = Node(np.ones([1, 3]))
z = Node(np.ones([2, 4, 3]))
# pylint: disable=pointless-statement
x[0] ^ y[0]
x[1] ^ z[0]
x[2] ^ z[1]
y[1] ^ z[2]
return [x, y, z]
def inner_network():
"""Creates a (modified) `Inner1` contraction from `opt_einsum` tests."""
x = Node(np.ones([5, 2, 3, 4]))
y = Node(np.ones([5, 3]))
z = Node(np.ones([2, 4]))
# pylint: disable=pointless-statement
x[0] ^ y[0]
x[1] ^ z[0]
x[2] ^ y[1]
x[3] ^ z[1]
return [x, y, z]
def matrix_chain():
"""Creates a contraction of chain of matrices.
The `greedy` algorithm does not find the optimal path in this case!
"""
d = [10, 8, 6, 4, 2]
nodes = [Node(np.ones([d1, d2])) for d1, d2 in zip(d[:-1], d[1:])]
for a, b in zip(nodes[:-1], nodes[1:]):
# pylint: disable=pointless-statement
a[1] ^ b[0]
return nodes
# Parametrize tests by giving:
# (contraction algorithm, network, correct path that is expected)
test_list = [
("optimal", "gemm_network", [(0, 2), (0, 1)]),
("branch", "gemm_network", [(0, 2), (0, 1)]),
("greedy", "gemm_network", [(0, 2), (0, 1)]),
("optimal", "inner_network", [(0, 1), (0, 1)]),
("branch", "inner_network", [(0, 1), (0, 1)]),
("greedy", "inner_network", [(0, 1), (0, 1)]),
("optimal", "matrix_chain", [(2, 3), (1, 2), (0, 1)]),
("branch", "matrix_chain", [(2, 3), (1, 2), (0, 1)]),
("greedy", "matrix_chain", [(0, 1), (0, 2), (0, 1)]),
]
@pytest.mark.parametrize("params", test_list)
def test_path_optimal(params):
algorithm_name, network_name, correct_path = params
nodes = globals()[network_name]()
path_algorithm = getattr(opt_einsum.paths, algorithm_name)
calculated_path, _ = utils.get_path(nodes, path_algorithm)
assert check_path(calculated_path, correct_path)
|
from datetime import timedelta
import logging
from typing import Any, Dict, List, Optional
import voluptuous as vol
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
SERVICE_TOGGLE,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_ON,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.config_validation import ( # noqa: F401
PLATFORM_SCHEMA,
PLATFORM_SCHEMA_BASE,
)
from homeassistant.helpers.entity import ToggleEntity
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
from homeassistant.loader import bind_hass
from .const import (
ATTR_AVAILABLE_MODES,
ATTR_HUMIDITY,
ATTR_MAX_HUMIDITY,
ATTR_MIN_HUMIDITY,
ATTR_MODE,
DEFAULT_MAX_HUMIDITY,
DEFAULT_MIN_HUMIDITY,
DEVICE_CLASS_DEHUMIDIFIER,
DEVICE_CLASS_HUMIDIFIER,
DOMAIN,
SERVICE_SET_HUMIDITY,
SERVICE_SET_MODE,
SUPPORT_MODES,
)
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(seconds=60)
DEVICE_CLASSES = [DEVICE_CLASS_HUMIDIFIER, DEVICE_CLASS_DEHUMIDIFIER]
DEVICE_CLASSES_SCHEMA = vol.All(vol.Lower, vol.In(DEVICE_CLASSES))
@bind_hass
def is_on(hass, entity_id):
"""Return if the humidifier is on based on the statemachine.
Async friendly.
"""
return hass.states.is_state(entity_id, STATE_ON)
async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool:
"""Set up humidifier devices."""
component = hass.data[DOMAIN] = EntityComponent(
_LOGGER, DOMAIN, hass, SCAN_INTERVAL
)
await component.async_setup(config)
component.async_register_entity_service(SERVICE_TURN_ON, {}, "async_turn_on")
component.async_register_entity_service(SERVICE_TURN_OFF, {}, "async_turn_off")
component.async_register_entity_service(SERVICE_TOGGLE, {}, "async_toggle")
component.async_register_entity_service(
SERVICE_SET_MODE,
{vol.Required(ATTR_MODE): cv.string},
"async_set_mode",
[SUPPORT_MODES],
)
component.async_register_entity_service(
SERVICE_SET_HUMIDITY,
{
vol.Required(ATTR_HUMIDITY): vol.All(
vol.Coerce(int), vol.Range(min=0, max=100)
)
},
"async_set_humidity",
)
return True
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool:
"""Set up a config entry."""
return await hass.data[DOMAIN].async_setup_entry(entry)
async def async_unload_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.data[DOMAIN].async_unload_entry(entry)
class HumidifierEntity(ToggleEntity):
"""Representation of a humidifier device."""
@property
def capability_attributes(self) -> Dict[str, Any]:
"""Return capability attributes."""
supported_features = self.supported_features or 0
data = {
ATTR_MIN_HUMIDITY: self.min_humidity,
ATTR_MAX_HUMIDITY: self.max_humidity,
}
if supported_features & SUPPORT_MODES:
data[ATTR_AVAILABLE_MODES] = self.available_modes
return data
@property
def state_attributes(self) -> Dict[str, Any]:
"""Return the optional state attributes."""
supported_features = self.supported_features or 0
data = {}
if self.target_humidity is not None:
data[ATTR_HUMIDITY] = self.target_humidity
if supported_features & SUPPORT_MODES:
data[ATTR_MODE] = self.mode
return data
@property
def target_humidity(self) -> Optional[int]:
"""Return the humidity we try to reach."""
return None
@property
def mode(self) -> Optional[str]:
"""Return the current mode, e.g., home, auto, baby.
Requires SUPPORT_MODES.
"""
raise NotImplementedError
@property
def available_modes(self) -> Optional[List[str]]:
"""Return a list of available modes.
Requires SUPPORT_MODES.
"""
raise NotImplementedError
def set_humidity(self, humidity: int) -> None:
"""Set new target humidity."""
raise NotImplementedError()
async def async_set_humidity(self, humidity: int) -> None:
"""Set new target humidity."""
await self.hass.async_add_executor_job(self.set_humidity, humidity)
def set_mode(self, mode: str) -> None:
"""Set new mode."""
raise NotImplementedError()
async def async_set_mode(self, mode: str) -> None:
"""Set new mode."""
await self.hass.async_add_executor_job(self.set_mode, mode)
@property
def min_humidity(self) -> int:
"""Return the minimum humidity."""
return DEFAULT_MIN_HUMIDITY
@property
def max_humidity(self) -> int:
"""Return the maximum humidity."""
return DEFAULT_MAX_HUMIDITY
|
import re
from datetime import datetime
from functools import wraps
from django.urls import reverse
from django.utils.dateformat import DateFormat
from django.utils.timezone import is_aware
from django.utils.timezone import localtime
from django.utils.translation import gettext as _
class Crumb(object):
"""
Part of the breadcrumbs.
"""
def __init__(self, name, url=None):
self.name = name
self.url = url
def year_crumb(date):
"""
Crumb for a year.
"""
year = date.strftime('%Y')
return Crumb(year, reverse('zinnia:entry_archive_year',
args=[year]))
def month_crumb(date):
"""
Crumb for a month.
"""
year = date.strftime('%Y')
month = date.strftime('%m')
month_text = DateFormat(date).format('F').capitalize()
return Crumb(month_text, reverse('zinnia:entry_archive_month',
args=[year, month]))
def day_crumb(date):
"""
Crumb for a day.
"""
year = date.strftime('%Y')
month = date.strftime('%m')
day = date.strftime('%d')
return Crumb(day, reverse('zinnia:entry_archive_day',
args=[year, month, day]))
def entry_breadcrumbs(entry):
"""
Breadcrumbs for an Entry.
"""
date = entry.publication_date
if is_aware(date):
date = localtime(date)
return [year_crumb(date), month_crumb(date),
day_crumb(date), Crumb(entry.title)]
MODEL_BREADCRUMBS = {'Tag': lambda x: [Crumb(_('Tags'),
reverse('zinnia:tag_list')),
Crumb(x.name)],
'Author': lambda x: [Crumb(_('Authors'),
reverse('zinnia:author_list')),
Crumb(x.__str__())],
'Category': lambda x: [Crumb(
_('Categories'), reverse('zinnia:category_list'))] +
[Crumb(anc.__str__(), anc.get_absolute_url())
for anc in x.get_ancestors()] + [Crumb(x.title)],
'Entry': entry_breadcrumbs}
ARCHIVE_REGEXP = re.compile(
r'.*(?P<year>\d{4})/(?P<month>\d{2})?/(?P<day>\d{2})?.*')
ARCHIVE_WEEK_REGEXP = re.compile(
r'.*(?P<year>\d{4})/week/(?P<week>\d+)?.*')
PAGE_REGEXP = re.compile(r'page/(?P<page>\d+).*$')
def handle_page_crumb(func):
"""
Decorator for handling the current page in the breadcrumbs.
"""
@wraps(func)
def wrapper(path, model, page, root_name):
path = PAGE_REGEXP.sub('', path)
breadcrumbs = func(path, model, root_name)
if page:
if page.number > 1:
breadcrumbs[-1].url = path
page_crumb = Crumb(_('Page %s') % page.number)
breadcrumbs.append(page_crumb)
return breadcrumbs
return wrapper
@handle_page_crumb
def retrieve_breadcrumbs(path, model_instance, root_name=''):
"""
Build a semi-hardcoded breadcrumbs
based of the model's url handled by Zinnia.
"""
breadcrumbs = []
zinnia_root_path = reverse('zinnia:entry_archive_index')
if root_name:
breadcrumbs.append(Crumb(root_name, zinnia_root_path))
if model_instance is not None:
key = model_instance.__class__.__name__
if key in MODEL_BREADCRUMBS:
breadcrumbs.extend(MODEL_BREADCRUMBS[key](model_instance))
return breadcrumbs
date_match = ARCHIVE_WEEK_REGEXP.match(path)
if date_match:
year, week = date_match.groups()
year_date = datetime(int(year), 1, 1)
date_breadcrumbs = [year_crumb(year_date),
Crumb(_('Week %s') % week)]
breadcrumbs.extend(date_breadcrumbs)
return breadcrumbs
date_match = ARCHIVE_REGEXP.match(path)
if date_match:
date_dict = date_match.groupdict()
path_date = datetime(
int(date_dict['year']),
date_dict.get('month') is not None and
int(date_dict.get('month')) or 1,
date_dict.get('day') is not None and
int(date_dict.get('day')) or 1)
date_breadcrumbs = [year_crumb(path_date)]
if date_dict['month']:
date_breadcrumbs.append(month_crumb(path_date))
if date_dict['day']:
date_breadcrumbs.append(day_crumb(path_date))
breadcrumbs.extend(date_breadcrumbs)
breadcrumbs[-1].url = None
return breadcrumbs
url_components = [comp for comp in
path.replace(zinnia_root_path, '', 1).split('/')
if comp]
if len(url_components):
breadcrumbs.append(Crumb(_(url_components[-1].capitalize())))
return breadcrumbs
|
from .asserts import assert_equals_with_unidiff
class OutputCollector:
def __init__(self):
from unit_tests.myStringIO import StringIO
self.stream = StringIO()
self.getvalue = self.stream.getvalue
def write(self,data):
self.stream.write(data)
def should_be(self, expected):
assert_equals_with_unidiff(expected, self.output())
def output(self):
return self.stream.getvalue()
|
from datetime import datetime, timezone
import pytest
from dateutil.tz import tzoffset
from twtxt.parser import make_aware, parse_iso8601
from twtxt.parser import parse_tweet, parse_tweets
from twtxt.models import Source
def test_make_aware():
"""Test making unaware datetime objects tzinfo aware."""
aware = datetime.now(timezone.utc)
unaware = aware.replace(tzinfo=None)
assert make_aware(unaware) >= aware
assert make_aware(aware) == aware
def test_parse_iso8601():
"""Test parsing ISO-8601 date/time strings."""
as_string = "2016-02-05T02:52:15.030474+01:00"
as_datetime = datetime(2016, 2, 5, 2, 52, 15, 30474, tzinfo=tzoffset(None, 3600))
assert parse_iso8601(as_string) == as_datetime
as_string = "2016-02-05T02:52:15"
as_datetime = datetime(2016, 2, 5, 2, 52, 15, tzinfo=timezone.utc)
assert parse_iso8601(as_string) == as_datetime
with pytest.raises(ValueError) as e:
parse_iso8601("foobar")
assert "Unknown string format" in str(e.value)
def test_parse_tweet():
"""Test parsing single tweet line."""
source = Source("foo", "bar")
raw_line = "2016-02-08T00:00:00\tHallo"
tweet = parse_tweet(raw_line, source)
assert tweet.text == "Hallo"
assert tweet.created_at == datetime(year=2016, month=2, day=8, tzinfo=timezone.utc)
with pytest.raises(ValueError) as e:
raw_line = "3000-02-08T00:00:00\tHallo"
parse_tweet(raw_line, source)
assert "Tweet is from the future" in str(e.value)
def test_parse_tweets():
"""Test parsing multiple tweet lines"""
source = Source("foo", "bar")
raw_tweets = [
"2016-02-08T00:00:00\tHallo",
"2016-02-08T00:00:00\tBar\n",
"2016-02-08T00:00:00\tFoo\n",
"3000-02-08T00:00:00\tHallo\n",
]
tweets = parse_tweets(raw_tweets, source)
assert len(tweets) == 3
|
import os
from gitless import core
from . import helpers, pprint
def parser(subparsers, repo):
"""Adds the status parser to the given subparsers object."""
desc = 'show status of the repo'
status_parser = subparsers.add_parser(
'status', help=desc, description=desc.capitalize(), aliases=['st'])
status_parser.add_argument(
'paths', nargs='*', help='the specific path(s) to status',
action=helpers.PathProcessor, repo=repo)
status_parser.set_defaults(func=main)
def main(args, repo):
curr_b = repo.current_branch
pprint.msg('On branch {0}, repo-directory {1}'.format(
pprint.green(curr_b.branch_name), pprint.green('//' + repo.cwd)))
if curr_b.merge_in_progress:
pprint.blank()
_print_conflict_exp('merge')
elif curr_b.fuse_in_progress:
pprint.blank()
_print_conflict_exp('fuse')
tracked_mod_list = []
untracked_list = []
paths = frozenset(args.paths)
for f in curr_b.status():
if paths and (f.fp not in paths):
continue
if f.type == core.GL_STATUS_TRACKED and f.modified:
tracked_mod_list.append(f)
elif f.type == core.GL_STATUS_UNTRACKED:
untracked_list.append(f)
relative_paths = True # git seems to default to true
try:
relative_paths = repo.config.get_bool('status.relativePaths')
except KeyError:
pass
pprint.blank()
tracked_mod_list.sort(key=lambda f: f.fp)
_print_tracked_mod_files(tracked_mod_list, relative_paths, repo)
pprint.blank()
pprint.blank()
untracked_list.sort(key=lambda f: f.fp)
_print_untracked_files(untracked_list, relative_paths, repo)
return True
def _print_tracked_mod_files(tracked_mod_list, relative_paths, repo):
pprint.msg('Tracked files with modifications:')
pprint.exp('these will be automatically considered for commit')
pprint.exp(
'use gl untrack f if you don\'t want to track changes to file f')
pprint.exp(
'if file f was committed before, use gl checkout f to discard '
'local changes')
pprint.blank()
if not tracked_mod_list:
pprint.item('There are no tracked files with modifications to list')
return
root = repo.root
for f in tracked_mod_list:
exp = ''
color = pprint.yellow
if not f.exists_at_head:
exp = ' (new file)'
color = pprint.green
elif not f.exists_in_wd:
exp = ' (deleted)'
color = pprint.red
elif f.in_conflict:
exp = ' (with conflicts)'
color = pprint.cyan
fp = os.path.relpath(os.path.join(root, f.fp)) if relative_paths else f.fp
if fp == '.':
continue
pprint.item(color(fp), opt_text=exp)
def _print_untracked_files(untracked_list, relative_paths, repo):
pprint.msg('Untracked files:')
pprint.exp('these won\'t be considered for commit')
pprint.exp('use gl track f if you want to track changes to file f')
pprint.blank()
if not untracked_list:
pprint.item('There are no untracked files to list')
return
root = repo.root
for f in untracked_list:
exp = ''
color = pprint.blue
if f.in_conflict:
exp = ' (with conflicts)'
color = pprint.cyan
elif f.exists_at_head:
color = pprint.magenta
if f.exists_in_wd:
exp = ' (exists at head)'
else:
exp = ' (exists at head but not in working directory)'
fp = os.path.relpath(os.path.join(root, f.fp)) if relative_paths else f.fp
if fp == '.':
continue
pprint.item(color(fp), opt_text=exp)
def _print_conflict_exp(op):
pprint.msg(
'You are in the middle of a {0}; all conflicts must be resolved before '
'commiting'.format(op))
pprint.exp(
'use gl {0} --abort to go back to the state before the {0}'.format(op))
pprint.exp('use gl resolve f to mark file f as resolved')
pprint.exp('once you solved all conflicts do gl commit to continue')
pprint.blank()
|
import pytest
from homeassistant.components import vacuum
from homeassistant.components.demo.vacuum import (
DEMO_VACUUM_BASIC,
DEMO_VACUUM_COMPLETE,
DEMO_VACUUM_MINIMAL,
DEMO_VACUUM_MOST,
DEMO_VACUUM_NONE,
DEMO_VACUUM_STATE,
FAN_SPEEDS,
)
from homeassistant.components.vacuum import (
ATTR_BATTERY_LEVEL,
ATTR_COMMAND,
ATTR_FAN_SPEED,
ATTR_FAN_SPEED_LIST,
ATTR_PARAMS,
ATTR_STATUS,
DOMAIN,
SERVICE_SEND_COMMAND,
SERVICE_SET_FAN_SPEED,
STATE_CLEANING,
STATE_DOCKED,
STATE_IDLE,
STATE_PAUSED,
STATE_RETURNING,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_SUPPORTED_FEATURES,
CONF_PLATFORM,
STATE_OFF,
STATE_ON,
)
from homeassistant.setup import async_setup_component
from tests.common import async_mock_service
from tests.components.vacuum import common
ENTITY_VACUUM_BASIC = f"{DOMAIN}.{DEMO_VACUUM_BASIC}".lower()
ENTITY_VACUUM_COMPLETE = f"{DOMAIN}.{DEMO_VACUUM_COMPLETE}".lower()
ENTITY_VACUUM_MINIMAL = f"{DOMAIN}.{DEMO_VACUUM_MINIMAL}".lower()
ENTITY_VACUUM_MOST = f"{DOMAIN}.{DEMO_VACUUM_MOST}".lower()
ENTITY_VACUUM_NONE = f"{DOMAIN}.{DEMO_VACUUM_NONE}".lower()
ENTITY_VACUUM_STATE = f"{DOMAIN}.{DEMO_VACUUM_STATE}".lower()
@pytest.fixture(autouse=True)
async def setup_demo_vacuum(hass):
"""Initialize setup demo vacuum."""
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "demo"}})
await hass.async_block_till_done()
async def test_supported_features(hass):
"""Test vacuum supported features."""
state = hass.states.get(ENTITY_VACUUM_COMPLETE)
assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 2047
assert state.attributes.get(ATTR_STATUS) == "Charging"
assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100
assert state.attributes.get(ATTR_FAN_SPEED) == "medium"
assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS
assert state.state == STATE_OFF
state = hass.states.get(ENTITY_VACUUM_MOST)
assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 219
assert state.attributes.get(ATTR_STATUS) == "Charging"
assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100
assert state.attributes.get(ATTR_FAN_SPEED) is None
assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None
assert state.state == STATE_OFF
state = hass.states.get(ENTITY_VACUUM_BASIC)
assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 195
assert state.attributes.get(ATTR_STATUS) == "Charging"
assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100
assert state.attributes.get(ATTR_FAN_SPEED) is None
assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None
assert state.state == STATE_OFF
state = hass.states.get(ENTITY_VACUUM_MINIMAL)
assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 3
assert state.attributes.get(ATTR_STATUS) is None
assert state.attributes.get(ATTR_BATTERY_LEVEL) is None
assert state.attributes.get(ATTR_FAN_SPEED) is None
assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None
assert state.state == STATE_OFF
state = hass.states.get(ENTITY_VACUUM_NONE)
assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 0
assert state.attributes.get(ATTR_STATUS) is None
assert state.attributes.get(ATTR_BATTERY_LEVEL) is None
assert state.attributes.get(ATTR_FAN_SPEED) is None
assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None
assert state.state == STATE_OFF
state = hass.states.get(ENTITY_VACUUM_STATE)
assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 13436
assert state.state == STATE_DOCKED
assert state.attributes.get(ATTR_BATTERY_LEVEL) == 100
assert state.attributes.get(ATTR_FAN_SPEED) == "medium"
assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS
async def test_methods(hass):
"""Test if methods call the services as expected."""
hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_ON)
await hass.async_block_till_done()
assert vacuum.is_on(hass, ENTITY_VACUUM_BASIC)
hass.states.async_set(ENTITY_VACUUM_BASIC, STATE_OFF)
await hass.async_block_till_done()
assert not vacuum.is_on(hass, ENTITY_VACUUM_BASIC)
await common.async_turn_on(hass, ENTITY_VACUUM_COMPLETE)
assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE)
await common.async_turn_off(hass, ENTITY_VACUUM_COMPLETE)
assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE)
await common.async_toggle(hass, ENTITY_VACUUM_COMPLETE)
assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE)
await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE)
assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE)
await common.async_start_pause(hass, ENTITY_VACUUM_COMPLETE)
assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE)
await common.async_stop(hass, ENTITY_VACUUM_COMPLETE)
assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE)
state = hass.states.get(ENTITY_VACUUM_COMPLETE)
assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100
assert state.attributes.get(ATTR_STATUS) != "Charging"
await common.async_locate(hass, ENTITY_VACUUM_COMPLETE)
state = hass.states.get(ENTITY_VACUUM_COMPLETE)
assert "I'm over here" in state.attributes.get(ATTR_STATUS)
await common.async_return_to_base(hass, ENTITY_VACUUM_COMPLETE)
state = hass.states.get(ENTITY_VACUUM_COMPLETE)
assert "Returning home" in state.attributes.get(ATTR_STATUS)
await common.async_set_fan_speed(
hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_COMPLETE
)
state = hass.states.get(ENTITY_VACUUM_COMPLETE)
assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1]
await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_COMPLETE)
state = hass.states.get(ENTITY_VACUUM_COMPLETE)
assert "spot" in state.attributes.get(ATTR_STATUS)
assert state.state == STATE_ON
await common.async_start(hass, ENTITY_VACUUM_STATE)
state = hass.states.get(ENTITY_VACUUM_STATE)
assert state.state == STATE_CLEANING
await common.async_pause(hass, ENTITY_VACUUM_STATE)
state = hass.states.get(ENTITY_VACUUM_STATE)
assert state.state == STATE_PAUSED
await common.async_stop(hass, ENTITY_VACUUM_STATE)
state = hass.states.get(ENTITY_VACUUM_STATE)
assert state.state == STATE_IDLE
state = hass.states.get(ENTITY_VACUUM_STATE)
assert state.attributes.get(ATTR_BATTERY_LEVEL) < 100
assert state.state != STATE_DOCKED
await common.async_return_to_base(hass, ENTITY_VACUUM_STATE)
state = hass.states.get(ENTITY_VACUUM_STATE)
assert state.state == STATE_RETURNING
await common.async_set_fan_speed(
hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_STATE
)
state = hass.states.get(ENTITY_VACUUM_STATE)
assert state.attributes.get(ATTR_FAN_SPEED) == FAN_SPEEDS[-1]
await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_STATE)
state = hass.states.get(ENTITY_VACUUM_STATE)
assert state.state == STATE_CLEANING
async def test_unsupported_methods(hass):
"""Test service calls for unsupported vacuums."""
hass.states.async_set(ENTITY_VACUUM_NONE, STATE_ON)
await hass.async_block_till_done()
assert vacuum.is_on(hass, ENTITY_VACUUM_NONE)
await common.async_turn_off(hass, ENTITY_VACUUM_NONE)
assert vacuum.is_on(hass, ENTITY_VACUUM_NONE)
await common.async_stop(hass, ENTITY_VACUUM_NONE)
assert vacuum.is_on(hass, ENTITY_VACUUM_NONE)
hass.states.async_set(ENTITY_VACUUM_NONE, STATE_OFF)
await hass.async_block_till_done()
assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE)
await common.async_turn_on(hass, ENTITY_VACUUM_NONE)
assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE)
await common.async_toggle(hass, ENTITY_VACUUM_NONE)
assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE)
# Non supported methods:
await common.async_start_pause(hass, ENTITY_VACUUM_NONE)
assert not vacuum.is_on(hass, ENTITY_VACUUM_NONE)
await common.async_locate(hass, ENTITY_VACUUM_NONE)
state = hass.states.get(ENTITY_VACUUM_NONE)
assert state.attributes.get(ATTR_STATUS) is None
await common.async_return_to_base(hass, ENTITY_VACUUM_NONE)
state = hass.states.get(ENTITY_VACUUM_NONE)
assert state.attributes.get(ATTR_STATUS) is None
await common.async_set_fan_speed(hass, FAN_SPEEDS[-1], entity_id=ENTITY_VACUUM_NONE)
state = hass.states.get(ENTITY_VACUUM_NONE)
assert state.attributes.get(ATTR_FAN_SPEED) != FAN_SPEEDS[-1]
await common.async_clean_spot(hass, entity_id=ENTITY_VACUUM_BASIC)
state = hass.states.get(ENTITY_VACUUM_BASIC)
assert "spot" not in state.attributes.get(ATTR_STATUS)
assert state.state == STATE_OFF
# VacuumEntity should not support start and pause methods.
hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_ON)
await hass.async_block_till_done()
assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE)
await common.async_pause(hass, ENTITY_VACUUM_COMPLETE)
assert vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE)
hass.states.async_set(ENTITY_VACUUM_COMPLETE, STATE_OFF)
await hass.async_block_till_done()
assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE)
await common.async_start(hass, ENTITY_VACUUM_COMPLETE)
assert not vacuum.is_on(hass, ENTITY_VACUUM_COMPLETE)
# StateVacuumEntity does not support on/off
await common.async_turn_on(hass, entity_id=ENTITY_VACUUM_STATE)
state = hass.states.get(ENTITY_VACUUM_STATE)
assert state.state != STATE_CLEANING
await common.async_turn_off(hass, entity_id=ENTITY_VACUUM_STATE)
state = hass.states.get(ENTITY_VACUUM_STATE)
assert state.state != STATE_RETURNING
await common.async_toggle(hass, entity_id=ENTITY_VACUUM_STATE)
state = hass.states.get(ENTITY_VACUUM_STATE)
assert state.state != STATE_CLEANING
async def test_services(hass):
"""Test vacuum services."""
# Test send_command
send_command_calls = async_mock_service(hass, DOMAIN, SERVICE_SEND_COMMAND)
params = {"rotate": 150, "speed": 20}
await common.async_send_command(
hass, "test_command", entity_id=ENTITY_VACUUM_BASIC, params=params
)
assert len(send_command_calls) == 1
call = send_command_calls[-1]
assert call.domain == DOMAIN
assert call.service == SERVICE_SEND_COMMAND
assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_BASIC
assert call.data[ATTR_COMMAND] == "test_command"
assert call.data[ATTR_PARAMS] == params
# Test set fan speed
set_fan_speed_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_FAN_SPEED)
await common.async_set_fan_speed(
hass, FAN_SPEEDS[0], entity_id=ENTITY_VACUUM_COMPLETE
)
assert len(set_fan_speed_calls) == 1
call = set_fan_speed_calls[-1]
assert call.domain == DOMAIN
assert call.service == SERVICE_SET_FAN_SPEED
assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_COMPLETE
assert call.data[ATTR_FAN_SPEED] == FAN_SPEEDS[0]
async def test_set_fan_speed(hass):
"""Test vacuum service to set the fan speed."""
group_vacuums = ",".join(
[ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE, ENTITY_VACUUM_STATE]
)
old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC)
old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE)
old_state_state = hass.states.get(ENTITY_VACUUM_STATE)
await common.async_set_fan_speed(hass, FAN_SPEEDS[0], entity_id=group_vacuums)
new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC)
new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE)
new_state_state = hass.states.get(ENTITY_VACUUM_STATE)
assert old_state_basic == new_state_basic
assert ATTR_FAN_SPEED not in new_state_basic.attributes
assert old_state_complete != new_state_complete
assert old_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1]
assert new_state_complete.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0]
assert old_state_state != new_state_state
assert old_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[1]
assert new_state_state.attributes[ATTR_FAN_SPEED] == FAN_SPEEDS[0]
async def test_send_command(hass):
"""Test vacuum service to send a command."""
group_vacuums = ",".join([ENTITY_VACUUM_BASIC, ENTITY_VACUUM_COMPLETE])
old_state_basic = hass.states.get(ENTITY_VACUUM_BASIC)
old_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE)
await common.async_send_command(
hass, "test_command", params={"p1": 3}, entity_id=group_vacuums
)
new_state_basic = hass.states.get(ENTITY_VACUUM_BASIC)
new_state_complete = hass.states.get(ENTITY_VACUUM_COMPLETE)
assert old_state_basic == new_state_basic
assert old_state_complete != new_state_complete
assert new_state_complete.state == STATE_ON
assert (
new_state_complete.attributes[ATTR_STATUS]
== "Executing test_command({'p1': 3})"
)
|
from aioguardian import Client
from aioguardian.errors import GuardianError
import voluptuous as vol
from homeassistant import config_entries, core
from homeassistant.const import CONF_IP_ADDRESS, CONF_PORT
from homeassistant.core import callback
from .const import CONF_UID, DOMAIN, LOGGER # pylint:disable=unused-import
DATA_SCHEMA = vol.Schema(
{vol.Required(CONF_IP_ADDRESS): str, vol.Required(CONF_PORT, default=7777): int}
)
UNIQUE_ID = "guardian_{0}"
@callback
def async_get_pin_from_discovery_hostname(hostname):
"""Get the device's 4-digit PIN from its zeroconf-discovered hostname."""
return hostname.split(".")[0].split("-")[1]
@callback
def async_get_pin_from_uid(uid):
"""Get the device's 4-digit PIN from its UID."""
return uid[-4:]
async def validate_input(hass: core.HomeAssistant, data):
"""Validate the user input allows us to connect.
Data has the keys from DATA_SCHEMA with values provided by the user.
"""
async with Client(data[CONF_IP_ADDRESS]) as client:
ping_data = await client.system.ping()
return {
CONF_UID: ping_data["data"]["uid"],
}
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Elexa Guardian."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL
def __init__(self):
"""Initialize."""
self.discovery_info = {}
async def _async_set_unique_id(self, pin):
"""Set the config entry's unique ID (based on the device's 4-digit PIN)."""
await self.async_set_unique_id(UNIQUE_ID.format(pin))
self._abort_if_unique_id_configured()
async def async_step_user(self, user_input=None):
"""Handle configuration via the UI."""
if user_input is None:
return self.async_show_form(
step_id="user", data_schema=DATA_SCHEMA, errors={}
)
try:
info = await validate_input(self.hass, user_input)
except GuardianError as err:
LOGGER.error("Error while connecting to unit: %s", err)
return self.async_show_form(
step_id="user",
data_schema=DATA_SCHEMA,
errors={CONF_IP_ADDRESS: "cannot_connect"},
)
pin = async_get_pin_from_uid(info[CONF_UID])
await self._async_set_unique_id(pin)
return self.async_create_entry(
title=info[CONF_UID], data={CONF_UID: info["uid"], **user_input}
)
async def async_step_zeroconf(self, discovery_info):
"""Handle the configuration via zeroconf."""
if discovery_info is None:
return self.async_abort(reason="cannot_connect")
pin = async_get_pin_from_discovery_hostname(discovery_info["hostname"])
await self._async_set_unique_id(pin)
# pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167
self.context[CONF_IP_ADDRESS] = discovery_info["host"]
if any(
discovery_info["host"] == flow["context"][CONF_IP_ADDRESS]
for flow in self._async_in_progress()
):
return self.async_abort(reason="already_in_progress")
self.discovery_info = {
CONF_IP_ADDRESS: discovery_info["host"],
CONF_PORT: discovery_info["port"],
}
return await self.async_step_zeroconf_confirm()
async def async_step_zeroconf_confirm(self, user_input=None):
"""Finish the configuration via zeroconf."""
if user_input is None:
return self.async_show_form(step_id="zeroconf_confirm")
return await self.async_step_user(self.discovery_info)
|
from homeassistant.components.geonetnz_volcano import DOMAIN, FEED
from tests.async_mock import AsyncMock, patch
async def test_component_unload_config_entry(hass, config_entry):
"""Test that loading and unloading of a config entry works."""
config_entry.add_to_hass(hass)
with patch(
"aio_geojson_geonetnz_volcano.GeonetnzVolcanoFeedManager.update",
new_callable=AsyncMock,
) as mock_feed_manager_update:
# Load config entry.
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
assert mock_feed_manager_update.call_count == 1
assert hass.data[DOMAIN][FEED][config_entry.entry_id] is not None
# Unload config entry.
assert await hass.config_entries.async_unload(config_entry.entry_id)
await hass.async_block_till_done()
assert hass.data[DOMAIN][FEED].get(config_entry.entry_id) is None
|
from homeassistant.core import callback
from homeassistant.helpers.device_registry import async_get_registry as get_dev_reg
from .const import DOMAIN, LOGGER
@callback
def async_add_acmeda_entities(
hass, entity_class, config_entry, current, async_add_entities
):
"""Add any new entities."""
hub = hass.data[DOMAIN][config_entry.entry_id]
LOGGER.debug("Looking for new %s on: %s", entity_class.__name__, hub.host)
api = hub.api.rollers
new_items = []
for unique_id, roller in api.items():
if unique_id not in current:
LOGGER.debug("New %s %s", entity_class.__name__, unique_id)
new_item = entity_class(roller)
current.add(unique_id)
new_items.append(new_item)
async_add_entities(new_items)
async def update_devices(hass, config_entry, api):
"""Tell hass that device info has been updated."""
dev_registry = await get_dev_reg(hass)
for api_item in api.values():
# Update Device name
device = dev_registry.async_get_device(
identifiers={(DOMAIN, api_item.id)}, connections=set()
)
if device is not None:
dev_registry.async_update_device(
device.id,
name=api_item.name,
)
|
import operator
import hypothesis
from hypothesis import strategies
import pytest
from PyQt5.QtCore import Qt, QEvent, pyqtSignal
from PyQt5.QtGui import QKeyEvent, QKeySequence
from PyQt5.QtWidgets import QWidget
from unit.keyinput import key_data
from qutebrowser.keyinput import keyutils
@pytest.fixture(params=key_data.KEYS, ids=lambda k: k.attribute)
def qt_key(request):
"""Get all existing keys from key_data.py.
Keys which don't exist with this Qt version result in skipped tests.
"""
key = request.param
if key.member is None:
pytest.skip("Did not find key {}".format(key.attribute))
return key
@pytest.fixture(params=key_data.MODIFIERS, ids=lambda m: m.attribute)
def qt_mod(request):
"""Get all existing modifiers from key_data.py."""
mod = request.param
assert mod.member is not None
return mod
@pytest.fixture(params=[key for key in key_data.KEYS if key.qtest],
ids=lambda k: k.attribute)
def qtest_key(request):
"""Get keys from key_data.py which can be used with QTest."""
return request.param
def test_key_data_keys():
"""Make sure all possible keys are in key_data.KEYS."""
key_names = {name[len("Key_"):]
for name, value in sorted(vars(Qt).items())
if isinstance(value, Qt.Key)}
key_data_names = {key.attribute for key in sorted(key_data.KEYS)}
diff = key_names - key_data_names
assert not diff
def test_key_data_modifiers():
"""Make sure all possible modifiers are in key_data.MODIFIERS."""
mod_names = {name[:-len("Modifier")]
for name, value in sorted(vars(Qt).items())
if isinstance(value, Qt.KeyboardModifier) and
value not in [Qt.NoModifier, Qt.KeyboardModifierMask]}
mod_data_names = {mod.attribute for mod in sorted(key_data.MODIFIERS)}
diff = mod_names - mod_data_names
assert not diff
class KeyTesterWidget(QWidget):
"""Widget to get the text of QKeyPressEvents.
This is done so we can check QTest::keyToAscii (qasciikey.cpp) as we can't
call that directly, only via QTest::keyPress.
"""
got_text = pyqtSignal()
def __init__(self, parent=None):
super().__init__(parent)
self.text = None
def keyPressEvent(self, e):
self.text = e.text()
self.got_text.emit()
class TestKeyInfoText:
@pytest.mark.parametrize('upper', [False, True])
def test_text(self, qt_key, upper):
"""Test KeyInfo.text() with all possible keys.
See key_data.py for inputs and expected values.
"""
modifiers = Qt.ShiftModifier if upper else Qt.KeyboardModifiers()
info = keyutils.KeyInfo(qt_key.member, modifiers=modifiers)
expected = qt_key.uppertext if upper else qt_key.text
assert info.text() == expected
@pytest.fixture
def key_tester(self, qtbot):
w = KeyTesterWidget()
qtbot.add_widget(w)
return w
def test_text_qtest(self, qtest_key, qtbot, key_tester):
"""Make sure KeyInfo.text() lines up with QTest::keyToAscii.
See key_data.py for inputs and expected values.
"""
with qtbot.wait_signal(key_tester.got_text):
qtbot.keyPress(key_tester, qtest_key.member)
info = keyutils.KeyInfo(qtest_key.member,
modifiers=Qt.KeyboardModifiers())
assert info.text() == key_tester.text.lower()
class TestKeyToString:
def test_to_string(self, qt_key):
assert keyutils._key_to_string(qt_key.member) == qt_key.name
def test_modifiers_to_string(self, qt_mod):
expected = qt_mod.name + '+'
assert keyutils._modifiers_to_string(qt_mod.member) == expected
def test_missing(self, monkeypatch):
monkeypatch.delattr(keyutils.Qt, 'Key_AltGr')
# We don't want to test the key which is actually missing - we only
# want to know if the mapping still behaves properly.
assert keyutils._key_to_string(Qt.Key_A) == 'A'
@pytest.mark.parametrize('key, modifiers, expected', [
(Qt.Key_A, Qt.NoModifier, 'a'),
(Qt.Key_A, Qt.ShiftModifier, 'A'),
(Qt.Key_Space, Qt.NoModifier, '<Space>'),
(Qt.Key_Space, Qt.ShiftModifier, '<Shift+Space>'),
(Qt.Key_Tab, Qt.ShiftModifier, '<Shift+Tab>'),
(Qt.Key_A, Qt.ControlModifier, '<Ctrl+a>'),
(Qt.Key_A, Qt.ControlModifier | Qt.ShiftModifier, '<Ctrl+Shift+a>'),
(Qt.Key_A,
Qt.ControlModifier | Qt.AltModifier | Qt.MetaModifier | Qt.ShiftModifier,
'<Meta+Ctrl+Alt+Shift+a>'),
(ord('Œ'), Qt.NoModifier, '<Œ>'),
(ord('Œ'), Qt.ShiftModifier, '<Shift+Œ>'),
(ord('Œ'), Qt.GroupSwitchModifier, '<AltGr+Œ>'),
(ord('Œ'), Qt.GroupSwitchModifier | Qt.ShiftModifier, '<AltGr+Shift+Œ>'),
(Qt.Key_Shift, Qt.ShiftModifier, '<Shift>'),
(Qt.Key_Shift, Qt.ShiftModifier | Qt.ControlModifier, '<Ctrl+Shift>'),
(Qt.Key_Alt, Qt.AltModifier, '<Alt>'),
(Qt.Key_Shift, Qt.GroupSwitchModifier | Qt.ShiftModifier, '<AltGr+Shift>'),
(Qt.Key_AltGr, Qt.GroupSwitchModifier, '<AltGr>'),
])
def test_key_info_str(key, modifiers, expected):
assert str(keyutils.KeyInfo(key, modifiers)) == expected
@pytest.mark.parametrize('info1, info2, equal', [
(keyutils.KeyInfo(Qt.Key_A, Qt.NoModifier),
keyutils.KeyInfo(Qt.Key_A, Qt.NoModifier),
True),
(keyutils.KeyInfo(Qt.Key_A, Qt.NoModifier),
keyutils.KeyInfo(Qt.Key_B, Qt.NoModifier),
False),
(keyutils.KeyInfo(Qt.Key_A, Qt.NoModifier),
keyutils.KeyInfo(Qt.Key_B, Qt.ControlModifier),
False),
])
def test_hash(info1, info2, equal):
assert (hash(info1) == hash(info2)) == equal
@pytest.mark.parametrize('key, modifiers, text, expected', [
(0xd83c, Qt.NoModifier, '🏻', '<🏻>'),
(0xd867, Qt.NoModifier, '𩷶', '<𩷶>'),
(0xd867, Qt.ShiftModifier, '𩷶', '<Shift+𩷶>'),
])
def test_surrogates(key, modifiers, text, expected):
evt = QKeyEvent(QKeyEvent.KeyPress, key, modifiers, text)
assert str(keyutils.KeyInfo.from_event(evt)) == expected
@pytest.mark.parametrize('keys, expected', [
([0x1f3fb], '<🏻>'),
([0x29df6], '<𩷶>'),
([Qt.Key_Shift, 0x29df6], '<Shift><𩷶>'),
([0x1f468, 0x200d, 0x1f468, 0x200d, 0x1f466], '<👨><><👨><><👦>'),
])
def test_surrogate_sequences(keys, expected):
seq = keyutils.KeySequence(*keys)
assert str(seq) == expected
# This shouldn't happen, but if it does we should handle it well
def test_surrogate_error():
evt = QKeyEvent(QKeyEvent.KeyPress, 0xd83e, Qt.NoModifier, '🤞🏻')
with pytest.raises(keyutils.KeyParseError):
keyutils.KeyInfo.from_event(evt)
@pytest.mark.parametrize('keystr, expected', [
('foo', "Could not parse 'foo': error"),
(None, "Could not parse keystring: error"),
])
def test_key_parse_error(keystr, expected):
exc = keyutils.KeyParseError(keystr, "error")
assert str(exc) == expected
@pytest.mark.parametrize('keystr, parts', [
('a', ['a']),
('ab', ['a', 'b']),
('a<', ['a', '<']),
('a>', ['a', '>']),
('<a', ['<', 'a']),
('>a', ['>', 'a']),
('aA', ['a', 'Shift+A']),
('a<Ctrl+a>b', ['a', 'ctrl+a', 'b']),
('<Ctrl+a>a', ['ctrl+a', 'a']),
('a<Ctrl+a>', ['a', 'ctrl+a']),
('<Ctrl-a>', ['ctrl+a']),
('<Num-a>', ['num+a']),
])
def test_parse_keystr(keystr, parts):
assert list(keyutils._parse_keystring(keystr)) == parts
class TestKeySequence:
def test_init(self):
seq = keyutils.KeySequence(Qt.Key_A, Qt.Key_B, Qt.Key_C, Qt.Key_D,
Qt.Key_E)
assert len(seq._sequences) == 2
assert len(seq._sequences[0]) == 4
assert len(seq._sequences[1]) == 1
def test_init_empty(self):
seq = keyutils.KeySequence()
assert not seq
@pytest.mark.parametrize('key', [Qt.Key_unknown, -1, 0])
def test_init_unknown(self, key):
with pytest.raises(keyutils.KeyParseError):
keyutils.KeySequence(key)
def test_parse_unknown(self):
with pytest.raises(keyutils.KeyParseError):
keyutils.KeySequence.parse('\x1f')
@pytest.mark.parametrize('orig, normalized', [
('<Control+x>', '<Ctrl+x>'),
('<Windows+x>', '<Meta+x>'),
('<Mod4+x>', '<Meta+x>'),
('<Command+x>', '<Meta+x>'),
('<Cmd+x>', '<Meta+x>'),
('<Mod1+x>', '<Alt+x>'),
('<Control-->', '<Ctrl+->'),
('<Windows++>', '<Meta++>'),
('<ctrl-x>', '<Ctrl+x>'),
('<control+x>', '<Ctrl+x>'),
('<a>b', 'ab'),
])
def test_str_normalization(self, orig, normalized):
assert str(keyutils.KeySequence.parse(orig)) == normalized
def test_iter(self):
seq = keyutils.KeySequence(Qt.Key_A | Qt.ControlModifier,
Qt.Key_B | Qt.ShiftModifier,
Qt.Key_C,
Qt.Key_D,
Qt.Key_E)
expected = [keyutils.KeyInfo(Qt.Key_A, Qt.ControlModifier),
keyutils.KeyInfo(Qt.Key_B, Qt.ShiftModifier),
keyutils.KeyInfo(Qt.Key_C, Qt.NoModifier),
keyutils.KeyInfo(Qt.Key_D, Qt.NoModifier),
keyutils.KeyInfo(Qt.Key_E, Qt.NoModifier)]
assert list(seq) == expected
def test_repr(self):
seq = keyutils.KeySequence(Qt.Key_A | Qt.ControlModifier,
Qt.Key_B | Qt.ShiftModifier)
assert repr(seq) == ("<qutebrowser.keyinput.keyutils.KeySequence "
"keys='<Ctrl+a>B'>")
@pytest.mark.parametrize('sequences, expected', [
(['a', ''], ['', 'a']),
(['abcdf', 'abcd', 'abcde'], ['abcd', 'abcde', 'abcdf']),
])
def test_sorting(self, sequences, expected):
result = sorted(keyutils.KeySequence.parse(seq) for seq in sequences)
expected_result = [keyutils.KeySequence.parse(seq) for seq in expected]
assert result == expected_result
@pytest.mark.parametrize('seq1, seq2, op, result', [
('a', 'a', operator.eq, True),
('a', '<a>', operator.eq, True),
('a', '<Shift-a>', operator.eq, False),
('a', 'b', operator.lt, True),
('a', 'b', operator.le, True),
])
def test_operators(self, seq1, seq2, op, result):
seq1 = keyutils.KeySequence.parse(seq1)
seq2 = keyutils.KeySequence.parse(seq2)
assert op(seq1, seq2) == result
opposite = {
operator.lt: operator.ge,
operator.gt: operator.le,
operator.le: operator.gt,
operator.ge: operator.lt,
operator.eq: operator.ne,
operator.ne: operator.eq,
}
assert opposite[op](seq1, seq2) != result
@pytest.mark.parametrize('op, result', [
(operator.eq, False),
(operator.ne, True),
])
def test_operators_other_type(self, op, result):
seq = keyutils.KeySequence.parse('a')
assert op(seq, 'x') == result
@pytest.mark.parametrize('seq1, seq2, equal', [
('a', 'a', True),
('a', 'A', False),
('a', '<a>', True),
('abcd', 'abcde', False),
])
def test_hash(self, seq1, seq2, equal):
seq1 = keyutils.KeySequence.parse(seq1)
seq2 = keyutils.KeySequence.parse(seq2)
assert (hash(seq1) == hash(seq2)) == equal
@pytest.mark.parametrize('seq, length', [
('', 0),
('a', 1),
('A', 1),
('<Ctrl-a>', 1),
('abcde', 5)
])
def test_len(self, seq, length):
assert len(keyutils.KeySequence.parse(seq)) == length
def test_bool(self):
seq1 = keyutils.KeySequence.parse('abcd')
seq2 = keyutils.KeySequence()
assert seq1
assert not seq2
def test_getitem(self):
seq = keyutils.KeySequence.parse('ab')
expected = keyutils.KeyInfo(Qt.Key_B, Qt.NoModifier)
assert seq[1] == expected
def test_getitem_slice(self):
s1 = 'abcdef'
s2 = 'de'
seq = keyutils.KeySequence.parse(s1)
expected = keyutils.KeySequence.parse(s2)
assert s1[3:5] == s2
assert seq[3:5] == expected
MATCH_TESTS = [
# config: abcd
('abc', 'abcd', QKeySequence.PartialMatch),
('abcd', 'abcd', QKeySequence.ExactMatch),
('ax', 'abcd', QKeySequence.NoMatch),
('abcdef', 'abcd', QKeySequence.NoMatch),
# config: abcd ef
('abc', 'abcdef', QKeySequence.PartialMatch),
('abcde', 'abcdef', QKeySequence.PartialMatch),
('abcd', 'abcdef', QKeySequence.PartialMatch),
('abcdx', 'abcdef', QKeySequence.NoMatch),
('ax', 'abcdef', QKeySequence.NoMatch),
('abcdefg', 'abcdef', QKeySequence.NoMatch),
('abcdef', 'abcdef', QKeySequence.ExactMatch),
# other examples
('ab', 'a', QKeySequence.NoMatch),
# empty strings
('', '', QKeySequence.ExactMatch),
('', 'a', QKeySequence.PartialMatch),
('a', '', QKeySequence.NoMatch)]
@pytest.mark.parametrize('entered, configured, match_type', MATCH_TESTS)
def test_matches(self, entered, configured, match_type):
entered = keyutils.KeySequence.parse(entered)
configured = keyutils.KeySequence.parse(configured)
assert entered.matches(configured) == match_type
@pytest.mark.parametrize('old, key, modifiers, text, expected', [
('a', Qt.Key_B, Qt.NoModifier, 'b', 'ab'),
('a', Qt.Key_B, Qt.ShiftModifier, 'B', 'aB'),
('a', Qt.Key_B, Qt.AltModifier | Qt.ShiftModifier, 'B',
'a<Alt+Shift+b>'),
# Modifier stripping with symbols
('', Qt.Key_Colon, Qt.NoModifier, ':', ':'),
('', Qt.Key_Colon, Qt.ShiftModifier, ':', ':'),
('', Qt.Key_Colon, Qt.AltModifier | Qt.ShiftModifier, ':',
'<Alt+Shift+:>'),
# Modifiers
('', Qt.Key_A, Qt.ControlModifier, '', '<Ctrl+A>'),
('', Qt.Key_A, Qt.ControlModifier | Qt.ShiftModifier, '', '<Ctrl+Shift+A>'),
('', Qt.Key_A, Qt.MetaModifier, '', '<Meta+A>'),
# Handling of Backtab
('', Qt.Key_Backtab, Qt.NoModifier, '', '<Backtab>'),
('', Qt.Key_Backtab, Qt.ShiftModifier, '', '<Shift+Tab>'),
('', Qt.Key_Backtab, Qt.AltModifier | Qt.ShiftModifier, '',
'<Alt+Shift+Tab>'),
# Stripping of Qt.GroupSwitchModifier
('', Qt.Key_A, Qt.GroupSwitchModifier, 'a', 'a'),
])
def test_append_event(self, old, key, modifiers, text, expected):
seq = keyutils.KeySequence.parse(old)
event = QKeyEvent(QKeyEvent.KeyPress, key, modifiers, text)
new = seq.append_event(event)
assert new == keyutils.KeySequence.parse(expected)
@pytest.mark.parametrize('key', [Qt.Key_unknown, 0x0])
def test_append_event_invalid(self, key):
seq = keyutils.KeySequence()
event = QKeyEvent(QKeyEvent.KeyPress, key, Qt.NoModifier, '')
with pytest.raises(keyutils.KeyParseError):
seq.append_event(event)
def test_strip_modifiers(self):
seq = keyutils.KeySequence(Qt.Key_0,
Qt.Key_1 | Qt.KeypadModifier,
Qt.Key_A | Qt.ControlModifier)
expected = keyutils.KeySequence(Qt.Key_0,
Qt.Key_1,
Qt.Key_A | Qt.ControlModifier)
assert seq.strip_modifiers() == expected
def test_with_mappings(self):
seq = keyutils.KeySequence.parse('foobar')
mappings = {
keyutils.KeySequence.parse('b'): keyutils.KeySequence.parse('t')
}
seq2 = seq.with_mappings(mappings)
assert seq2 == keyutils.KeySequence.parse('footar')
@pytest.mark.parametrize('keystr, expected', [
('<Ctrl-Alt-y>',
keyutils.KeySequence(Qt.ControlModifier | Qt.AltModifier | Qt.Key_Y)),
('x', keyutils.KeySequence(Qt.Key_X)),
('X', keyutils.KeySequence(Qt.ShiftModifier | Qt.Key_X)),
('<Escape>', keyutils.KeySequence(Qt.Key_Escape)),
('xyz', keyutils.KeySequence(Qt.Key_X, Qt.Key_Y, Qt.Key_Z)),
('<Control-x><Meta-y>',
keyutils.KeySequence(Qt.ControlModifier | Qt.Key_X,
Qt.MetaModifier | Qt.Key_Y)),
('<Shift-x>', keyutils.KeySequence(Qt.ShiftModifier | Qt.Key_X)),
('<Alt-x>', keyutils.KeySequence(Qt.AltModifier | Qt.Key_X)),
('<Control-x>', keyutils.KeySequence(Qt.ControlModifier | Qt.Key_X)),
('<Meta-x>', keyutils.KeySequence(Qt.MetaModifier | Qt.Key_X)),
('<Num-x>', keyutils.KeySequence(Qt.KeypadModifier | Qt.Key_X)),
('>', keyutils.KeySequence(Qt.Key_Greater)),
('<', keyutils.KeySequence(Qt.Key_Less)),
('a>', keyutils.KeySequence(Qt.Key_A, Qt.Key_Greater)),
('a<', keyutils.KeySequence(Qt.Key_A, Qt.Key_Less)),
('>a', keyutils.KeySequence(Qt.Key_Greater, Qt.Key_A)),
('<a', keyutils.KeySequence(Qt.Key_Less, Qt.Key_A)),
('<alt+greater>',
keyutils.KeySequence(Qt.Key_Greater | Qt.AltModifier)),
('<alt+less>',
keyutils.KeySequence(Qt.Key_Less | Qt.AltModifier)),
('<alt+<>', keyutils.KeyParseError),
('<alt+>>', keyutils.KeyParseError),
('<blub>', keyutils.KeyParseError),
('<>', keyutils.KeyParseError),
('\U00010000', keyutils.KeyParseError),
])
def test_parse(self, keystr, expected):
if expected is keyutils.KeyParseError:
with pytest.raises(keyutils.KeyParseError):
keyutils.KeySequence.parse(keystr)
else:
assert keyutils.KeySequence.parse(keystr) == expected
@hypothesis.given(strategies.text())
def test_parse_hypothesis(self, keystr):
try:
seq = keyutils.KeySequence.parse(keystr)
except keyutils.KeyParseError:
pass
else:
str(seq)
def test_key_info_from_event():
ev = QKeyEvent(QEvent.KeyPress, Qt.Key_A, Qt.ShiftModifier, 'A')
info = keyutils.KeyInfo.from_event(ev)
assert info.key == Qt.Key_A
assert info.modifiers == Qt.ShiftModifier
def test_key_info_to_event():
info = keyutils.KeyInfo(Qt.Key_A, Qt.ShiftModifier)
ev = info.to_event()
assert ev.key() == Qt.Key_A
assert ev.modifiers() == Qt.ShiftModifier
assert ev.text() == 'A'
def test_key_info_to_int():
info = keyutils.KeyInfo(Qt.Key_A, Qt.ShiftModifier)
assert info.to_int() == Qt.Key_A | Qt.ShiftModifier
@pytest.mark.parametrize('key, printable', [
(Qt.Key_Control, False),
(Qt.Key_Escape, False),
(Qt.Key_Tab, False),
(Qt.Key_Backtab, False),
(Qt.Key_Backspace, False),
(Qt.Key_Return, False),
(Qt.Key_Enter, False),
(Qt.Key_Space, False),
(0x0, False), # Used by Qt for unknown keys
(Qt.Key_ydiaeresis, True),
(Qt.Key_X, True),
])
def test_is_printable(key, printable):
assert keyutils._is_printable(key) == printable
assert keyutils.is_special(key, Qt.NoModifier) != printable
@pytest.mark.parametrize('key, modifiers, special', [
(Qt.Key_Escape, Qt.NoModifier, True),
(Qt.Key_Escape, Qt.ShiftModifier, True),
(Qt.Key_Escape, Qt.ControlModifier, True),
(Qt.Key_X, Qt.ControlModifier, True),
(Qt.Key_X, Qt.NoModifier, False),
(Qt.Key_2, Qt.KeypadModifier, True),
(Qt.Key_2, Qt.NoModifier, False),
(Qt.Key_Shift, Qt.ShiftModifier, True),
(Qt.Key_Control, Qt.ControlModifier, True),
(Qt.Key_Alt, Qt.AltModifier, True),
(Qt.Key_Meta, Qt.MetaModifier, True),
(Qt.Key_Mode_switch, Qt.GroupSwitchModifier, True),
])
def test_is_special(key, modifiers, special):
assert keyutils.is_special(key, modifiers) == special
@pytest.mark.parametrize('key, ismodifier', [
(Qt.Key_Control, True),
(Qt.Key_X, False),
(Qt.Key_Super_L, False), # Modifier but not in _MODIFIER_MAP
])
def test_is_modifier_key(key, ismodifier):
assert keyutils.is_modifier_key(key) == ismodifier
@pytest.mark.parametrize('func', [
keyutils._assert_plain_key,
keyutils._assert_plain_modifier,
keyutils._is_printable,
keyutils.is_modifier_key,
keyutils._key_to_string,
keyutils._modifiers_to_string,
])
def test_non_plain(func):
with pytest.raises(AssertionError):
func(Qt.Key_X | Qt.ControlModifier)
|
import os
import signal
import time
from http.client import BadStatusLine
import pytest
import portend
import cherrypy
import cherrypy.process.servers
from cherrypy.test import helper
engine = cherrypy.engine
thisdir = os.path.join(os.getcwd(), os.path.dirname(__file__))
class Dependency:
def __init__(self, bus):
self.bus = bus
self.running = False
self.startcount = 0
self.gracecount = 0
self.threads = {}
def subscribe(self):
self.bus.subscribe('start', self.start)
self.bus.subscribe('stop', self.stop)
self.bus.subscribe('graceful', self.graceful)
self.bus.subscribe('start_thread', self.startthread)
self.bus.subscribe('stop_thread', self.stopthread)
def start(self):
self.running = True
self.startcount += 1
def stop(self):
self.running = False
def graceful(self):
self.gracecount += 1
def startthread(self, thread_id):
self.threads[thread_id] = None
def stopthread(self, thread_id):
del self.threads[thread_id]
db_connection = Dependency(engine)
def setup_server():
class Root:
@cherrypy.expose
def index(self):
return 'Hello World'
@cherrypy.expose
def ctrlc(self):
raise KeyboardInterrupt()
@cherrypy.expose
def graceful(self):
engine.graceful()
return 'app was (gracefully) restarted succesfully'
cherrypy.tree.mount(Root())
cherrypy.config.update({
'environment': 'test_suite',
})
db_connection.subscribe()
# ------------ Enough helpers. Time for real live test cases. ------------ #
class ServerStateTests(helper.CPWebCase):
setup_server = staticmethod(setup_server)
def setUp(self):
cherrypy.server.socket_timeout = 0.1
self.do_gc_test = False
def test_0_NormalStateFlow(self):
engine.stop()
# Our db_connection should not be running
self.assertEqual(db_connection.running, False)
self.assertEqual(db_connection.startcount, 1)
self.assertEqual(len(db_connection.threads), 0)
# Test server start
engine.start()
self.assertEqual(engine.state, engine.states.STARTED)
host = cherrypy.server.socket_host
port = cherrypy.server.socket_port
portend.occupied(host, port, timeout=0.1)
# The db_connection should be running now
self.assertEqual(db_connection.running, True)
self.assertEqual(db_connection.startcount, 2)
self.assertEqual(len(db_connection.threads), 0)
self.getPage('/')
self.assertBody('Hello World')
self.assertEqual(len(db_connection.threads), 1)
# Test engine stop. This will also stop the HTTP server.
engine.stop()
self.assertEqual(engine.state, engine.states.STOPPED)
# Verify that our custom stop function was called
self.assertEqual(db_connection.running, False)
self.assertEqual(len(db_connection.threads), 0)
# Block the main thread now and verify that exit() works.
def exittest():
self.getPage('/')
self.assertBody('Hello World')
engine.exit()
cherrypy.server.start()
engine.start_with_callback(exittest)
engine.block()
self.assertEqual(engine.state, engine.states.EXITING)
def test_1_Restart(self):
cherrypy.server.start()
engine.start()
# The db_connection should be running now
self.assertEqual(db_connection.running, True)
grace = db_connection.gracecount
self.getPage('/')
self.assertBody('Hello World')
self.assertEqual(len(db_connection.threads), 1)
# Test server restart from this thread
engine.graceful()
self.assertEqual(engine.state, engine.states.STARTED)
self.getPage('/')
self.assertBody('Hello World')
self.assertEqual(db_connection.running, True)
self.assertEqual(db_connection.gracecount, grace + 1)
self.assertEqual(len(db_connection.threads), 1)
# Test server restart from inside a page handler
self.getPage('/graceful')
self.assertEqual(engine.state, engine.states.STARTED)
self.assertBody('app was (gracefully) restarted succesfully')
self.assertEqual(db_connection.running, True)
self.assertEqual(db_connection.gracecount, grace + 2)
# Since we are requesting synchronously, is only one thread used?
# Note that the "/graceful" request has been flushed.
self.assertEqual(len(db_connection.threads), 0)
engine.stop()
self.assertEqual(engine.state, engine.states.STOPPED)
self.assertEqual(db_connection.running, False)
self.assertEqual(len(db_connection.threads), 0)
def test_2_KeyboardInterrupt(self):
# Raise a keyboard interrupt in the HTTP server's main thread.
# We must start the server in this, the main thread
engine.start()
cherrypy.server.start()
self.persistent = True
try:
# Make the first request and assert there's no "Connection: close".
self.getPage('/')
self.assertStatus('200 OK')
self.assertBody('Hello World')
self.assertNoHeader('Connection')
cherrypy.server.httpserver.interrupt = KeyboardInterrupt
engine.block()
self.assertEqual(db_connection.running, False)
self.assertEqual(len(db_connection.threads), 0)
self.assertEqual(engine.state, engine.states.EXITING)
finally:
self.persistent = False
# Raise a keyboard interrupt in a page handler; on multithreaded
# servers, this should occur in one of the worker threads.
# This should raise a BadStatusLine error, since the worker
# thread will just die without writing a response.
engine.start()
cherrypy.server.start()
# From python3.5 a new exception is retuned when the connection
# ends abruptly:
# http.client.RemoteDisconnected
# RemoteDisconnected is a subclass of:
# (ConnectionResetError, http.client.BadStatusLine)
# and ConnectionResetError is an indirect subclass of:
# OSError
# From python 3.3 an up socket.error is an alias to OSError
# following PEP-3151, therefore http.client.RemoteDisconnected
# is considered a socket.error.
#
# raise_subcls specifies the classes that are not going
# to be considered as a socket.error for the retries.
# Given that RemoteDisconnected is part BadStatusLine
# we can use the same call for all py3 versions without
# sideffects. python < 3.5 will raise directly BadStatusLine
# which is not a subclass for socket.error/OSError.
try:
self.getPage('/ctrlc', raise_subcls=BadStatusLine)
except BadStatusLine:
pass
else:
print(self.body)
self.fail('AssertionError: BadStatusLine not raised')
engine.block()
self.assertEqual(db_connection.running, False)
self.assertEqual(len(db_connection.threads), 0)
@pytest.mark.xfail(
'sys.platform == "Darwin" '
'and sys.version_info > (3, 7) '
'and os.environ["TRAVIS"]',
reason='https://github.com/cherrypy/cherrypy/issues/1693',
)
def test_4_Autoreload(self):
# If test_3 has not been executed, the server won't be stopped,
# so we'll have to do it.
if engine.state != engine.states.EXITING:
engine.exit()
# Start the demo script in a new process
p = helper.CPProcess(ssl=(self.scheme.lower() == 'https'))
p.write_conf(extra='test_case_name: "test_4_Autoreload"')
p.start(imports='cherrypy.test._test_states_demo')
try:
self.getPage('/start')
start = float(self.body)
# Give the autoreloader time to cache the file time.
time.sleep(2)
# Touch the file
os.utime(os.path.join(thisdir, '_test_states_demo.py'), None)
# Give the autoreloader time to re-exec the process
time.sleep(2)
host = cherrypy.server.socket_host
port = cherrypy.server.socket_port
portend.occupied(host, port, timeout=5)
self.getPage('/start')
if not (float(self.body) > start):
raise AssertionError('start time %s not greater than %s' %
(float(self.body), start))
finally:
# Shut down the spawned process
self.getPage('/exit')
p.join()
def test_5_Start_Error(self):
# If test_3 has not been executed, the server won't be stopped,
# so we'll have to do it.
if engine.state != engine.states.EXITING:
engine.exit()
# If a process errors during start, it should stop the engine
# and exit with a non-zero exit code.
p = helper.CPProcess(ssl=(self.scheme.lower() == 'https'),
wait=True)
p.write_conf(
extra="""starterror: True
test_case_name: "test_5_Start_Error"
"""
)
p.start(imports='cherrypy.test._test_states_demo')
if p.exit_code == 0:
self.fail('Process failed to return nonzero exit code.')
class PluginTests(helper.CPWebCase):
def test_daemonize(self):
if os.name not in ['posix']:
return self.skip('skipped (not on posix) ')
self.HOST = '127.0.0.1'
self.PORT = 8081
# Spawn the process and wait, when this returns, the original process
# is finished. If it daemonized properly, we should still be able
# to access pages.
p = helper.CPProcess(ssl=(self.scheme.lower() == 'https'),
wait=True, daemonize=True,
socket_host='127.0.0.1',
socket_port=8081)
p.write_conf(
extra='test_case_name: "test_daemonize"')
p.start(imports='cherrypy.test._test_states_demo')
try:
# Just get the pid of the daemonization process.
self.getPage('/pid')
self.assertStatus(200)
page_pid = int(self.body)
self.assertEqual(page_pid, p.get_pid())
finally:
# Shut down the spawned process
self.getPage('/exit')
p.join()
# Wait until here to test the exit code because we want to ensure
# that we wait for the daemon to finish running before we fail.
if p.exit_code != 0:
self.fail('Daemonized parent process failed to exit cleanly.')
class SignalHandlingTests(helper.CPWebCase):
def test_SIGHUP_tty(self):
# When not daemonized, SIGHUP should shut down the server.
try:
from signal import SIGHUP
except ImportError:
return self.skip('skipped (no SIGHUP) ')
# Spawn the process.
p = helper.CPProcess(ssl=(self.scheme.lower() == 'https'))
p.write_conf(
extra='test_case_name: "test_SIGHUP_tty"')
p.start(imports='cherrypy.test._test_states_demo')
# Send a SIGHUP
os.kill(p.get_pid(), SIGHUP)
# This might hang if things aren't working right, but meh.
p.join()
def test_SIGHUP_daemonized(self):
# When daemonized, SIGHUP should restart the server.
try:
from signal import SIGHUP
except ImportError:
return self.skip('skipped (no SIGHUP) ')
if os.name not in ['posix']:
return self.skip('skipped (not on posix) ')
# Spawn the process and wait, when this returns, the original process
# is finished. If it daemonized properly, we should still be able
# to access pages.
p = helper.CPProcess(ssl=(self.scheme.lower() == 'https'),
wait=True, daemonize=True)
p.write_conf(
extra='test_case_name: "test_SIGHUP_daemonized"')
p.start(imports='cherrypy.test._test_states_demo')
pid = p.get_pid()
try:
# Send a SIGHUP
os.kill(pid, SIGHUP)
# Give the server some time to restart
time.sleep(2)
self.getPage('/pid')
self.assertStatus(200)
new_pid = int(self.body)
self.assertNotEqual(new_pid, pid)
finally:
# Shut down the spawned process
self.getPage('/exit')
p.join()
def _require_signal_and_kill(self, signal_name):
if not hasattr(signal, signal_name):
self.skip('skipped (no %(signal_name)s)' % vars())
if not hasattr(os, 'kill'):
self.skip('skipped (no os.kill)')
def test_SIGTERM(self):
'SIGTERM should shut down the server whether daemonized or not.'
self._require_signal_and_kill('SIGTERM')
# Spawn a normal, undaemonized process.
p = helper.CPProcess(ssl=(self.scheme.lower() == 'https'))
p.write_conf(
extra='test_case_name: "test_SIGTERM"')
p.start(imports='cherrypy.test._test_states_demo')
# Send a SIGTERM
os.kill(p.get_pid(), signal.SIGTERM)
# This might hang if things aren't working right, but meh.
p.join()
if os.name in ['posix']:
# Spawn a daemonized process and test again.
p = helper.CPProcess(ssl=(self.scheme.lower() == 'https'),
wait=True, daemonize=True)
p.write_conf(
extra='test_case_name: "test_SIGTERM_2"')
p.start(imports='cherrypy.test._test_states_demo')
# Send a SIGTERM
os.kill(p.get_pid(), signal.SIGTERM)
# This might hang if things aren't working right, but meh.
p.join()
def test_signal_handler_unsubscribe(self):
self._require_signal_and_kill('SIGTERM')
# Although Windows has `os.kill` and SIGTERM is defined, the
# platform does not implement signals and sending SIGTERM
# will result in a forced termination of the process.
# Therefore, this test is not suitable for Windows.
if os.name == 'nt':
self.skip('SIGTERM not available')
# Spawn a normal, undaemonized process.
p = helper.CPProcess(ssl=(self.scheme.lower() == 'https'))
p.write_conf(
extra="""unsubsig: True
test_case_name: "test_signal_handler_unsubscribe"
""")
p.start(imports='cherrypy.test._test_states_demo')
# Ask the process to quit
os.kill(p.get_pid(), signal.SIGTERM)
# This might hang if things aren't working right, but meh.
p.join()
# Assert the old handler ran.
log_lines = list(open(p.error_log, 'rb'))
assert any(
line.endswith(b'I am an old SIGTERM handler.\n')
for line in log_lines
)
def test_safe_wait_INADDR_ANY(): # pylint: disable=invalid-name
"""
Wait on INADDR_ANY should not raise IOError
In cases where the loopback interface does not exist, CherryPy cannot
effectively determine if a port binding to INADDR_ANY was effected.
In this situation, CherryPy should assume that it failed to detect
the binding (not that the binding failed) and only warn that it could
not verify it.
"""
# At such a time that CherryPy can reliably determine one or more
# viable IP addresses of the host, this test may be removed.
# Simulate the behavior we observe when no loopback interface is
# present by: finding a port that's not occupied, then wait on it.
free_port = portend.find_available_local_port()
servers = cherrypy.process.servers
inaddr_any = '0.0.0.0'
# Wait on the free port that's unbound
with pytest.warns(
UserWarning,
match='Unable to verify that the server is bound on ',
) as warnings:
# pylint: disable=protected-access
with servers._safe_wait(inaddr_any, free_port):
portend.occupied(inaddr_any, free_port, timeout=1)
assert len(warnings) == 1
# The wait should still raise an IO error if INADDR_ANY was
# not supplied.
with pytest.raises(IOError):
# pylint: disable=protected-access
with servers._safe_wait('127.0.0.1', free_port):
portend.occupied('127.0.0.1', free_port, timeout=1)
|
import typing
import keras
import matchzoo
from matchzoo.engine.base_model import BaseModel
from matchzoo.engine.param import Param
from matchzoo.engine.param_table import ParamTable
from matchzoo.engine import hyper_spaces
class ArcII(BaseModel):
"""
ArcII Model.
Examples:
>>> model = ArcII()
>>> model.params['embedding_output_dim'] = 300
>>> model.params['num_blocks'] = 2
>>> model.params['kernel_1d_count'] = 32
>>> model.params['kernel_1d_size'] = 3
>>> model.params['kernel_2d_count'] = [16, 32]
>>> model.params['kernel_2d_size'] = [[3, 3], [3, 3]]
>>> model.params['pool_2d_size'] = [[2, 2], [2, 2]]
>>> model.guess_and_fill_missing_params(verbose=0)
>>> model.build()
"""
@classmethod
def get_default_params(cls) -> ParamTable:
""":return: model default parameters."""
params = super().get_default_params(with_embedding=True)
params['optimizer'] = 'adam'
opt_space = hyper_spaces.choice(['adam', 'rmsprop', 'adagrad'])
params.get('optimizer').hyper_space = opt_space
params.add(Param(name='num_blocks', value=1,
desc="Number of 2D convolution blocks."))
params.add(Param(name='kernel_1d_count', value=32,
desc="Kernel count of 1D convolution layer."))
params.add(Param(name='kernel_1d_size', value=3,
desc="Kernel size of 1D convolution layer."))
params.add(Param(name='kernel_2d_count', value=[32],
desc="Kernel count of 2D convolution layer in"
"each block"))
params.add(Param(name='kernel_2d_size', value=[[3, 3]],
desc="Kernel size of 2D convolution layer in"
" each block."))
params.add(Param(name='activation', value='relu',
desc="Activation function."))
params.add(Param(name='pool_2d_size', value=[[2, 2]],
desc="Size of pooling layer in each block."))
params.add(Param(
name='padding', value='same',
hyper_space=hyper_spaces.choice(
['same', 'valid']),
desc="The padding mode in the convolution layer. It should be one"
"of `same`, `valid`."
))
params.add(Param(
name='dropout_rate', value=0.0,
hyper_space=hyper_spaces.quniform(low=0.0, high=0.8,
q=0.01),
desc="The dropout rate."
))
return params
def build(self):
"""
Build model structure.
ArcII has the desirable property of letting two sentences meet before
their own high-level representations mature.
"""
input_left, input_right = self._make_inputs()
embedding = self._make_embedding_layer()
embed_left = embedding(input_left)
embed_right = embedding(input_right)
# Phrase level representations
conv_1d_left = keras.layers.Conv1D(
self._params['kernel_1d_count'],
self._params['kernel_1d_size'],
padding=self._params['padding']
)(embed_left)
conv_1d_right = keras.layers.Conv1D(
self._params['kernel_1d_count'],
self._params['kernel_1d_size'],
padding=self._params['padding']
)(embed_right)
# Interaction
matching_layer = matchzoo.layers.MatchingLayer(matching_type='plus')
embed_cross = matching_layer([conv_1d_left, conv_1d_right])
for i in range(self._params['num_blocks']):
embed_cross = self._conv_pool_block(
embed_cross,
self._params['kernel_2d_count'][i],
self._params['kernel_2d_size'][i],
self._params['padding'],
self._params['activation'],
self._params['pool_2d_size'][i]
)
embed_flat = keras.layers.Flatten()(embed_cross)
x = keras.layers.Dropout(rate=self._params['dropout_rate'])(embed_flat)
inputs = [input_left, input_right]
x_out = self._make_output_layer()(x)
self._backend = keras.Model(inputs=inputs, outputs=x_out)
@classmethod
def _conv_pool_block(
cls, x,
kernel_count: int,
kernel_size: int,
padding: str,
activation: str,
pool_size: int
) -> typing.Any:
output = keras.layers.Conv2D(kernel_count,
kernel_size,
padding=padding,
activation=activation)(x)
output = keras.layers.MaxPooling2D(pool_size=pool_size)(output)
return output
|
from homeassistant.helpers import intent
async def test_recent_items_intent(hass, sl_setup):
"""Test recent items."""
await intent.async_handle(
hass, "test", "HassShoppingListAddItem", {"item": {"value": "beer"}}
)
await intent.async_handle(
hass, "test", "HassShoppingListAddItem", {"item": {"value": "wine"}}
)
await intent.async_handle(
hass, "test", "HassShoppingListAddItem", {"item": {"value": "soda"}}
)
response = await intent.async_handle(hass, "test", "HassShoppingListLastItems")
assert (
response.speech["plain"]["speech"]
== "These are the top 3 items on your shopping list: soda, wine, beer"
)
|
import logging
from itertools import cycle
from netort.resource import manager as resource
from . import info
from .module_exceptions import AmmoFileError
class HttpAmmo(object):
'''
Represents HTTP missile
>>> print HttpAmmo('/', []).to_s() # doctest: +NORMALIZE_WHITESPACE
GET / HTTP/1.1
>>> print HttpAmmo('/', ['Connection: Close', 'Content-Type: Application/JSON']).to_s() # doctest: +NORMALIZE_WHITESPACE
GET / HTTP/1.1
Connection: Close
Content-Type: Application/JSON
>>> print HttpAmmo('/', ['Connection: Close'], method='POST', body='hello!').to_s() # doctest: +NORMALIZE_WHITESPACE
POST / HTTP/1.1
Connection: Close
Content-Length: 6
<BLANKLINE>
hello!
'''
def __init__(self, uri, headers, method='GET', http_ver='1.1', body=b''):
self.method = method
self.uri = uri.encode('utf8') if isinstance(uri, str) else uri
self.proto = 'HTTP/%s' % http_ver
self.headers = set(headers)
self.body = body
if len(body):
self.headers.add("Content-Length: %s" % len(body))
def to_s(self):
if self.headers:
headers = b'\r\n'.join(sorted([h.encode('utf8') for h in self.headers])) + b'\r\n'
else:
headers = b''
return b"%s %s %s\r\n%s\r\n%s" % (
self.method.encode('utf8'),
self.uri,
self.proto.encode('utf8'),
headers,
self.body)
class SimpleGenerator(object):
'''
Generates ammo based on a given sample.
'''
def __init__(self, missile_sample):
'''
Missile sample is any object that has to_s method which
returns its string representation.
'''
self.missiles = cycle([(missile_sample.to_s(), None)])
def __iter__(self):
for m in self.missiles:
info.status.inc_loop_count()
yield m
class UriStyleGenerator(object):
'''
Generates GET ammo based on given URI list.
'''
def __init__(self, uris, headers, http_ver='1.1'):
'''
uris - a list of URIs as strings.
'''
self.uri_count = len(uris)
self.missiles = cycle([(
HttpAmmo(
uri, headers, http_ver=http_ver).to_s(), None) for uri in uris])
def __iter__(self):
for m in self.missiles:
yield m
info.status.loop_count = info.status.ammo_count / self.uri_count
class Reader(object):
def __init__(self, filename, use_cache=True, **kwargs):
self.filename = filename
self.use_cache = use_cache
class AmmoFileReader(Reader):
'''Read missiles from ammo file'''
def __init__(self, filename, use_cache=True, **kwargs):
super(AmmoFileReader, self).__init__(filename, use_cache)
self.log = logging.getLogger(__name__)
self.log.info("Loading ammo from '%s'" % filename)
def __iter__(self):
def read_chunk_header(ammo_file):
chunk_header = b''
while chunk_header == b'':
line = ammo_file.readline()
if line == b'':
return line
chunk_header = line.strip(b'\r\n')
return chunk_header
opener = resource.get_opener(self.filename)
with opener(self.use_cache) as ammo_file:
info.status.af_size = opener.data_length
# if we got StopIteration here, the file is empty
chunk_header = read_chunk_header(ammo_file)
while chunk_header:
if chunk_header != b'':
try:
fields = chunk_header.split()
chunk_size = int(fields[0])
if chunk_size == 0:
if info.status.loop_count == 0:
self.log.info(
'Zero-sized chunk in ammo file at %s. Starting over.'
% ammo_file.tell())
ammo_file.seek(0)
info.status.inc_loop_count()
chunk_header = read_chunk_header(ammo_file)
continue
marker = fields[1] if len(fields) > 1 else None
missile = ammo_file.read(chunk_size)
if len(missile) < chunk_size:
raise AmmoFileError(
"Unexpected end of file: read %s bytes instead of %s"
% (len(missile), chunk_size))
yield (missile, marker)
except (IndexError, ValueError) as e:
raise AmmoFileError(
"Error while reading ammo file. Position: %s, header: '%s', original exception: %s"
% (ammo_file.tell(), chunk_header, e))
chunk_header = read_chunk_header(ammo_file)
if chunk_header == b'':
ammo_file.seek(0)
info.status.inc_loop_count()
chunk_header = read_chunk_header(ammo_file)
info.status.af_position = ammo_file.tell()
class SlowLogReader(Reader):
"""Read missiles from SQL slow log. Not usable with Phantom"""
def __iter__(self):
opener = resource.get_opener(self.filename)
with opener(self.use_cache) as ammo_file:
info.status.af_size = opener.data_length
request = ""
while True:
for line in ammo_file:
info.status.af_position = ammo_file.tell()
if isinstance(line, bytes):
line = line.decode('utf-8')
if line.startswith('#'):
if request != "":
yield (request, None)
request = ""
else:
request += line
ammo_file.seek(0)
info.status.af_position = 0
info.status.inc_loop_count()
class LineReader(Reader):
"""One line -- one missile"""
def __iter__(self):
opener = resource.get_opener(self.filename)
with opener(self.use_cache) as ammo_file:
info.status.af_size = opener.data_length
while True:
for line in ammo_file:
info.status.af_position = ammo_file.tell()
yield (line.rstrip(b'\r\n'), None) if isinstance(line, bytes) else (line.rstrip('\r\n').encode('utf8'), None)
ammo_file.seek(0)
info.status.af_position = 0
info.status.inc_loop_count()
class CaseLineReader(Reader):
"""One line -- one missile with case, tab separated"""
def __iter__(self):
opener = resource.get_opener(self.filename)
with opener(self.use_cache) as ammo_file:
info.status.af_size = opener.data_length
while True:
for line in ammo_file:
info.status.af_position = ammo_file.tell()
parts = line.rstrip(b'\r\n').split(b'\t', 1)
if len(parts) == 2:
yield (parts[1], parts[0])
elif len(parts) == 1:
yield (parts[0], None)
else:
raise RuntimeError("Unreachable branch")
ammo_file.seek(0)
info.status.af_position = 0
info.status.inc_loop_count()
class AccessLogReader(Reader):
"""Missiles from access log"""
def __init__(self, filename, headers=None, http_ver='1.1', use_cache=True, **kwargs):
super(AccessLogReader, self).__init__(filename, use_cache)
self.warned = False
self.headers = set(headers) if headers else set()
self.log = logging.getLogger(__name__)
def warn(self, message):
if not self.warned:
self.warned = True
self.log.warning(
"There are some skipped lines. See full log for details.")
self.log.debug(message)
def __iter__(self):
opener = resource.get_opener(self.filename)
with opener(self.use_cache) as ammo_file:
info.status.af_size = opener.data_length
while True:
for line in ammo_file:
info.status.af_position = ammo_file.tell()
if isinstance(line, bytes):
line = line.decode('utf-8')
try:
request = line.split('"')[1]
method, uri, proto = request.split()
http_ver = proto.split('/')[1]
if method == "GET":
yield (
HttpAmmo(
uri,
headers=self.headers,
http_ver=http_ver, ).to_s(), None)
else:
self.warn(
"Skipped line: %s (unsupported method)" % line)
except (ValueError, IndexError) as e:
self.warn("Skipped line: %s (%s)" % (line, e))
ammo_file.seek(0)
info.status.af_position = 0
info.status.inc_loop_count()
def _parse_header(header):
return dict([(h.strip().decode('utf8') for h in header.split(b':', 1))])
class UriReader(Reader):
def __init__(self, filename, headers=None, http_ver='1.1', use_cache=True, **kwargs):
super(UriReader, self).__init__(filename, use_cache)
self.headers = {pair[0].strip(): pair[1].strip() for pair in [h.split(':', 1) for h in headers]} \
if headers else {}
self.http_ver = http_ver
self.log = logging.getLogger(__name__)
self.log.info("Loading ammo from '%s' using URI format." % filename)
def __iter__(self):
opener = resource.get_opener(self.filename)
with opener(self.use_cache) as ammo_file:
info.status.af_size = opener.data_length
while True:
for line in ammo_file:
info.status.af_position = ammo_file.tell()
# if isinstance(line, bytes):
# line = line.decode('utf-8')
if line.startswith(b'['):
self.headers.update(
_parse_header(line.strip(b'\r\n[]\t ')))
elif len(line.rstrip(b'\r\n')):
fields = line.split()
uri = fields[0]
if len(fields) > 1:
marker = fields[1]
else:
marker = None
yield (
HttpAmmo(
uri,
headers=[
': '.join(header)
for header in self.headers.items()
],
http_ver=self.http_ver, ).to_s(), marker)
if info.status.ammo_count == 0:
self.log.error("No ammo in uri-style file")
raise AmmoFileError("No ammo! Cover me!")
ammo_file.seek(0)
info.status.af_position = 0
info.status.inc_loop_count()
class UriPostReader(Reader):
"""Read POST missiles from ammo file"""
def __init__(self, filename, headers=None, http_ver='1.1', use_cache=True, **kwargs):
super(UriPostReader, self).__init__(filename, use_cache)
self.headers = {pair[0].strip(): pair[1].strip() for pair in [h.split(':', 1) for h in headers]} \
if headers else {}
self.http_ver = http_ver
self.log = logging.getLogger(__name__)
self.log.info("Loading ammo from '%s' using URI+POST format", filename)
def __iter__(self):
def read_chunk_header(ammo_file):
chunk_header = b''
while chunk_header == b'':
line = ammo_file.readline()
if line.startswith(b'['):
self.headers.update(_parse_header(line.strip(b'\r\n[]\t ')))
elif line == b'':
return line
else:
chunk_header = line.strip(b'\r\n')
return chunk_header
opener = resource.get_opener(self.filename)
with opener(self.use_cache) as ammo_file:
info.status.af_size = opener.data_length
# if we got StopIteration here, the file is empty
chunk_header = read_chunk_header(ammo_file)
while chunk_header:
if chunk_header != b'':
try:
fields = chunk_header.split()
chunk_size = int(fields[0])
uri = fields[1]
marker = fields[2] if len(fields) > 2 else None
if chunk_size == 0:
missile = b""
else:
missile = ammo_file.read(chunk_size)
if len(missile) < chunk_size:
raise AmmoFileError(
"Unexpected end of file: read %s bytes instead of %s"
% (len(missile), chunk_size))
yield (
HttpAmmo(
uri=uri,
headers=[
': '.join(header)
for header in self.headers.items()
],
method='POST',
body=missile,
http_ver=self.http_ver, ).to_s(), marker)
except (IndexError, ValueError) as e:
raise AmmoFileError(
"Error while reading ammo file. Position: %s, header: '%s', original exception: %s"
% (ammo_file.tell(), chunk_header, e))
chunk_header = read_chunk_header(ammo_file)
if chunk_header == '':
self.log.debug(
'Reached the end of ammo file. Starting over.')
ammo_file.seek(0)
info.status.inc_loop_count()
chunk_header = read_chunk_header(ammo_file)
info.status.af_position = ammo_file.tell()
|
from homeassistant.components.switch import DOMAIN, SwitchEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.helpers.typing import HomeAssistantType
from . import AqualinkEntity, refresh_system
from .const import DOMAIN as AQUALINK_DOMAIN
PARALLEL_UPDATES = 0
async def async_setup_entry(
hass: HomeAssistantType, config_entry: ConfigEntry, async_add_entities
) -> None:
"""Set up discovered switches."""
devs = []
for dev in hass.data[AQUALINK_DOMAIN][DOMAIN]:
devs.append(HassAqualinkSwitch(dev))
async_add_entities(devs, True)
class HassAqualinkSwitch(AqualinkEntity, SwitchEntity):
"""Representation of a switch."""
@property
def name(self) -> str:
"""Return the name of the switch."""
return self.dev.label
@property
def icon(self) -> str:
"""Return an icon based on the switch type."""
if self.name == "Cleaner":
return "mdi:robot-vacuum"
if self.name == "Waterfall" or self.name.endswith("Dscnt"):
return "mdi:fountain"
if self.name.endswith("Pump") or self.name.endswith("Blower"):
return "mdi:fan"
if self.name.endswith("Heater"):
return "mdi:radiator"
@property
def is_on(self) -> bool:
"""Return whether the switch is on or not."""
return self.dev.is_on
@refresh_system
async def async_turn_on(self, **kwargs) -> None:
"""Turn on the switch."""
await self.dev.turn_on()
@refresh_system
async def async_turn_off(self, **kwargs) -> None:
"""Turn off the switch."""
await self.dev.turn_off()
|
import sys
import pytest
import pygal
from pygal.etree import etree
from . import get_data
@pytest.fixture
def etreefx(request):
"""Fixture allowing to test with builtin etree and lxml"""
if request.param == 'etree':
etree.to_etree()
if request.param == 'lxml':
etree.to_lxml()
def pytest_generate_tests(metafunc):
"""Generate the tests for etree and lxml"""
if etree._lxml_etree:
metafunc.fixturenames.append('etreefx')
metafunc.parametrize('etreefx', ['lxml', 'etree'], indirect=True)
if not hasattr(sys, 'pypy_version_info'):
etree.to_lxml()
if hasattr(sys, 'pypy_version_info'):
etree.to_etree()
if "Chart" in metafunc.fixturenames:
metafunc.parametrize("Chart", pygal.CHARTS)
if "datas" in metafunc.fixturenames:
metafunc.parametrize(
"datas", [[("Serie %d" % i, get_data(i)) for i in range(s)]
for s in (5, 1, 0)]
)
|
import rumps
class AwesomeStatusBarApp(rumps.App):
@rumps.clicked("Preferences")
def prefs(self, _):
rumps.alert("jk! no preferences available!")
@rumps.clicked("Silly button")
def onoff(self, sender):
sender.state = not sender.state
@rumps.clicked("Say hi")
def sayhi(self, _):
rumps.notification("Awesome title", "amazing subtitle", "hi!!1")
if __name__ == "__main__":
AwesomeStatusBarApp("Awesome App").run()
|
import pywink
import voluptuous as vol
from homeassistant.components.lock import LockEntity
from homeassistant.const import (
ATTR_CODE,
ATTR_ENTITY_ID,
ATTR_MODE,
ATTR_NAME,
STATE_UNKNOWN,
)
import homeassistant.helpers.config_validation as cv
from . import DOMAIN, WinkDevice
SERVICE_SET_VACATION_MODE = "set_lock_vacation_mode"
SERVICE_SET_ALARM_MODE = "set_lock_alarm_mode"
SERVICE_SET_ALARM_SENSITIVITY = "set_lock_alarm_sensitivity"
SERVICE_SET_ALARM_STATE = "set_lock_alarm_state"
SERVICE_SET_BEEPER_STATE = "set_lock_beeper_state"
SERVICE_ADD_KEY = "add_new_lock_key_code"
ATTR_ENABLED = "enabled"
ATTR_SENSITIVITY = "sensitivity"
ALARM_SENSITIVITY_MAP = {
"low": 0.2,
"medium_low": 0.4,
"medium": 0.6,
"medium_high": 0.8,
"high": 1.0,
}
ALARM_MODES_MAP = {
"activity": "alert",
"forced_entry": "forced_entry",
"tamper": "tamper",
}
SET_ENABLED_SCHEMA = vol.Schema(
{vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, vol.Required(ATTR_ENABLED): cv.string}
)
SET_SENSITIVITY_SCHEMA = vol.Schema(
{
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
vol.Required(ATTR_SENSITIVITY): vol.In(ALARM_SENSITIVITY_MAP),
}
)
SET_ALARM_MODES_SCHEMA = vol.Schema(
{
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
vol.Required(ATTR_MODE): vol.In(ALARM_MODES_MAP),
}
)
ADD_KEY_SCHEMA = vol.Schema(
{
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
vol.Required(ATTR_NAME): cv.string,
vol.Required(ATTR_CODE): cv.positive_int,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Wink platform."""
for lock in pywink.get_locks():
_id = lock.object_id() + lock.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
add_entities([WinkLockDevice(lock, hass)])
def service_handle(service):
"""Handle for services."""
entity_ids = service.data.get("entity_id")
all_locks = hass.data[DOMAIN]["entities"]["lock"]
locks_to_set = []
if entity_ids is None:
locks_to_set = all_locks
else:
for lock in all_locks:
if lock.entity_id in entity_ids:
locks_to_set.append(lock)
for lock in locks_to_set:
if service.service == SERVICE_SET_VACATION_MODE:
lock.set_vacation_mode(service.data.get(ATTR_ENABLED))
elif service.service == SERVICE_SET_ALARM_STATE:
lock.set_alarm_state(service.data.get(ATTR_ENABLED))
elif service.service == SERVICE_SET_BEEPER_STATE:
lock.set_beeper_state(service.data.get(ATTR_ENABLED))
elif service.service == SERVICE_SET_ALARM_MODE:
lock.set_alarm_mode(service.data.get(ATTR_MODE))
elif service.service == SERVICE_SET_ALARM_SENSITIVITY:
lock.set_alarm_sensitivity(service.data.get(ATTR_SENSITIVITY))
elif service.service == SERVICE_ADD_KEY:
name = service.data.get(ATTR_NAME)
code = service.data.get(ATTR_CODE)
lock.add_new_key(code, name)
hass.services.register(
DOMAIN, SERVICE_SET_VACATION_MODE, service_handle, schema=SET_ENABLED_SCHEMA
)
hass.services.register(
DOMAIN, SERVICE_SET_ALARM_STATE, service_handle, schema=SET_ENABLED_SCHEMA
)
hass.services.register(
DOMAIN, SERVICE_SET_BEEPER_STATE, service_handle, schema=SET_ENABLED_SCHEMA
)
hass.services.register(
DOMAIN, SERVICE_SET_ALARM_MODE, service_handle, schema=SET_ALARM_MODES_SCHEMA
)
hass.services.register(
DOMAIN,
SERVICE_SET_ALARM_SENSITIVITY,
service_handle,
schema=SET_SENSITIVITY_SCHEMA,
)
hass.services.register(
DOMAIN, SERVICE_ADD_KEY, service_handle, schema=ADD_KEY_SCHEMA
)
class WinkLockDevice(WinkDevice, LockEntity):
"""Representation of a Wink lock."""
async def async_added_to_hass(self):
"""Call when entity is added to hass."""
self.hass.data[DOMAIN]["entities"]["lock"].append(self)
@property
def is_locked(self):
"""Return true if device is locked."""
return self.wink.state()
def lock(self, **kwargs):
"""Lock the device."""
self.wink.set_state(True)
def unlock(self, **kwargs):
"""Unlock the device."""
self.wink.set_state(False)
def set_alarm_state(self, enabled):
"""Set lock's alarm state."""
self.wink.set_alarm_state(enabled)
def set_vacation_mode(self, enabled):
"""Set lock's vacation mode."""
self.wink.set_vacation_mode(enabled)
def set_beeper_state(self, enabled):
"""Set lock's beeper mode."""
self.wink.set_beeper_mode(enabled)
def add_new_key(self, code, name):
"""Add a new user key code."""
self.wink.add_new_key(code, name)
def set_alarm_sensitivity(self, sensitivity):
"""
Set lock's alarm sensitivity.
Valid sensitivities:
0.2, 0.4, 0.6, 0.8, 1.0
"""
self.wink.set_alarm_sensitivity(sensitivity)
def set_alarm_mode(self, mode):
"""
Set lock's alarm mode.
Valid modes:
alert - Beep when lock is locked or unlocked
tamper - 15 sec alarm when lock is disturbed when locked
forced_entry - 3 min alarm when significant force applied
to door when locked.
"""
self.wink.set_alarm_mode(mode)
@property
def device_state_attributes(self):
"""Return the state attributes."""
super_attrs = super().device_state_attributes
sensitivity = dict_value_to_key(
ALARM_SENSITIVITY_MAP, self.wink.alarm_sensitivity()
)
super_attrs["alarm_sensitivity"] = sensitivity
super_attrs["vacation_mode"] = self.wink.vacation_mode_enabled()
super_attrs["beeper_mode"] = self.wink.beeper_enabled()
super_attrs["auto_lock"] = self.wink.auto_lock_enabled()
alarm_mode = dict_value_to_key(ALARM_MODES_MAP, self.wink.alarm_mode())
super_attrs["alarm_mode"] = alarm_mode
super_attrs["alarm_enabled"] = self.wink.alarm_enabled()
return super_attrs
def dict_value_to_key(dict_map, comp_value):
"""Return the key that has the provided value."""
for key, value in dict_map.items():
if value == comp_value:
return key
return STATE_UNKNOWN
|
from Handler import Handler
from graphite import GraphiteHandler
class HostedGraphiteHandler(Handler):
def __init__(self, config=None):
"""
Create a new instance of the HostedGraphiteHandler class
"""
# Initialize Handler
Handler.__init__(self, config)
self.key = self.config['apikey'].lower().strip()
self.graphite = GraphiteHandler(self.config)
def get_default_config_help(self):
"""
Returns the help text for the configuration options for this handler
"""
config = super(HostedGraphiteHandler, self).get_default_config_help()
config.update({
'apikey': 'Api key to use',
'host': 'Hostname',
'port': 'Port',
'proto': 'udp or tcp',
'timeout': '',
'batch': 'How many to store before sending to the graphite server',
'max_backlog_multiplier': 'how many batches to store before trimming', # NOQA
'trim_backlog_multiplier': 'Trim down how many batches',
})
return config
def get_default_config(self):
"""
Return the default config for the handler
"""
config = super(HostedGraphiteHandler, self).get_default_config()
config.update({
'apikey': '',
'host': 'carbon.hostedgraphite.com',
'port': 2003,
'proto': 'tcp',
'timeout': 15,
'batch': 1,
'max_backlog_multiplier': 5,
'trim_backlog_multiplier': 4,
})
return config
def process(self, metric):
"""
Process a metric by sending it to graphite
"""
metric = self.key + '.' + str(metric)
self.graphite.process(metric)
def _process(self, metric):
"""
Process a metric by sending it to graphite
"""
metric = self.key + '.' + str(metric)
self.graphite._process(metric)
def _flush(self):
self.graphite._flush()
def flush(self):
self.graphite.flush()
|
import json
import os
from radicale import item as radicale_item
class CollectionMetaMixin:
def __init__(self):
super().__init__()
self._meta_cache = None
self._props_path = os.path.join(
self._filesystem_path, ".Radicale.props")
def get_meta(self, key=None):
# reuse cached value if the storage is read-only
if self._storage._lock.locked == "w" or self._meta_cache is None:
try:
try:
with open(self._props_path, encoding=self._encoding) as f:
self._meta_cache = json.load(f)
except FileNotFoundError:
self._meta_cache = {}
radicale_item.check_and_sanitize_props(self._meta_cache)
except ValueError as e:
raise RuntimeError("Failed to load properties of collection "
"%r: %s" % (self.path, e)) from e
return self._meta_cache.get(key) if key else self._meta_cache
def set_meta(self, props):
with self._atomic_write(self._props_path, "w") as f:
json.dump(props, f, sort_keys=True)
|
from __future__ import absolute_import, unicode_literals
import logging
from libtmux.exc import TmuxSessionExists
from libtmux.pane import Pane
from libtmux.server import Server
from libtmux.session import Session
from libtmux.window import Window
from . import exc
from .util import run_before_script
logger = logging.getLogger(__name__)
class WorkspaceBuilder(object):
"""
Load workspace from session :py:obj:`dict`.
Build tmux workspace from a configuration. Creates and names windows, sets
options, splits windows into panes.
The normal phase of loading is:
1. :term:`kaptan` imports json/yaml/ini. ``.get()`` returns python
:class:`dict`::
import kaptan
sconf = kaptan.Kaptan(handler='yaml')
sconf = sconfig.import_config(self.yaml_config).get()
or from config file with extension::
import kaptan
sconf = kaptan.Kaptan()
sconf = sconfig.import_config('path/to/config.yaml').get()
kaptan automatically detects the handler from filenames.
2. :meth:`config.expand` sconf inline shorthand::
from tmuxp import config
sconf = config.expand(sconf)
3. :meth:`config.trickle` passes down default values from session
-> window -> pane if applicable::
sconf = config.trickle(sconf)
4. (You are here) We will create a :class:`libtmux.Session` (a real
``tmux(1)`` session) and iterate through the list of windows, and
their panes, returning full :class:`libtmux.Window` and
:class:`libtmux.Pane` objects each step of the way::
workspace = WorkspaceBuilder(sconf=sconf)
It handles the magic of cases where the user may want to start
a session inside tmux (when `$TMUX` is in the env variables).
"""
def __init__(self, sconf, server=None):
"""
Initialize workspace loading.
Parameters
----------
sconf : dict
session config, includes a :py:obj:`list` of ``windows``.
server : :class:`libtmux.Server`
tmux server to build session in
Notes
-----
TODO: Initialize :class:`libtmux.Session` from here, in
``self.session``.
"""
if not sconf:
raise exc.EmptyConfigException('session configuration is empty.')
# config.validate_schema(sconf)
if isinstance(server, Server):
self.server = server
else:
self.server = None
self.sconf = sconf
def session_exists(self, session_name=None):
exists = self.server.has_session(session_name)
if not exists:
return exists
self.session = self.server.find_where({'session_name': session_name})
return True
def build(self, session=None):
"""
Build tmux workspace in session.
Optionally accepts ``session`` to build with only session object.
Without ``session``, it will use :class:`libmtux.Server` at
``self.server`` passed in on initialization to create a new Session
object.
Parameters
----------
session : :class:`libtmux.Session`
session to build workspace in
"""
if not session:
if not self.server:
raise exc.TmuxpException(
'WorkspaceBuilder.build requires server to be passed '
+ 'on initialization, or pass in session object to here.'
)
if self.server.has_session(self.sconf['session_name']):
self.session = self.server.find_where(
{'session_name': self.sconf['session_name']}
)
raise TmuxSessionExists(
'Session name %s is already running.' % self.sconf['session_name']
)
else:
session = self.server.new_session(
session_name=self.sconf['session_name']
)
assert self.sconf['session_name'] == session.name
assert len(self.sconf['session_name']) > 0
self.session = session
self.server = session.server
self.server._list_sessions()
assert self.server.has_session(session.name)
assert session.id
assert isinstance(session, Session)
focus = None
if 'before_script' in self.sconf:
try:
cwd = None
# we want to run the before_script file cwd'd from the
# session start directory, if it exists.
if 'start_directory' in self.sconf:
cwd = self.sconf['start_directory']
run_before_script(self.sconf['before_script'], cwd=cwd)
except Exception as e:
self.session.kill_session()
raise e
if 'options' in self.sconf:
for option, value in self.sconf['options'].items():
self.session.set_option(option, value)
if 'global_options' in self.sconf:
for option, value in self.sconf['global_options'].items():
self.session.set_option(option, value, _global=True)
if 'environment' in self.sconf:
for option, value in self.sconf['environment'].items():
self.session.set_environment(option, value)
for w, wconf in self.iter_create_windows(session):
assert isinstance(w, Window)
focus_pane = None
for p, pconf in self.iter_create_panes(w, wconf):
assert isinstance(p, Pane)
p = p
if 'layout' in wconf:
w.select_layout(wconf['layout'])
if 'focus' in pconf and pconf['focus']:
focus_pane = p
if 'focus' in wconf and wconf['focus']:
focus = w
self.config_after_window(w, wconf)
if focus_pane:
focus_pane.select_pane()
if focus:
focus.select_window()
def iter_create_windows(self, s):
"""
Return :class:`libtmux.Window` iterating through session config dict.
Generator yielding :class:`libtmux.Window` by iterating through
``sconf['windows']``.
Applies ``window_options`` to window.
Parameters
----------
session : :class:`libtmux.Session`
session to create windows in
Returns
-------
tuple of (:class:`libtmux.Window`, ``wconf``)
Newly created window, and the section from the tmuxp configuration
that was used to create the window.
"""
for i, wconf in enumerate(self.sconf['windows'], start=1):
if 'window_name' not in wconf:
window_name = None
else:
window_name = wconf['window_name']
w1 = None
if i == int(1): # if first window, use window 1
w1 = s.attached_window
w1.move_window(99)
pass
if 'start_directory' in wconf:
sd = wconf['start_directory']
else:
sd = None
if 'window_shell' in wconf:
ws = wconf['window_shell']
else:
ws = None
w = s.new_window(
window_name=window_name,
start_directory=sd,
attach=False, # do not move to the new window
window_index=wconf.get('window_index', ''),
window_shell=ws,
)
if i == int(1) and w1: # if first window, use window 1
w1.kill_window()
assert isinstance(w, Window)
s.server._update_windows()
if 'options' in wconf and isinstance(wconf['options'], dict):
for key, val in wconf['options'].items():
w.set_window_option(key, val)
if 'focus' in wconf and wconf['focus']:
w.select_window()
s.server._update_windows()
yield w, wconf
def iter_create_panes(self, w, wconf):
"""
Return :class:`libtmux.Pane` iterating through window config dict.
Run ``shell_command`` with ``$ tmux send-keys``.
Parameters
----------
w : :class:`libtmux.Window`
window to create panes for
wconf : dict
config section for window
Returns
-------
tuple of (:class:`libtmux.Pane`, ``pconf``)
Newly created pane, and the section from the tmuxp configuration
that was used to create the pane.
"""
assert isinstance(w, Window)
pane_base_index = int(w.show_window_option('pane-base-index', g=True))
p = None
for pindex, pconf in enumerate(wconf['panes'], start=pane_base_index):
if pindex == int(pane_base_index):
p = w.attached_pane
else:
def get_pane_start_directory():
if 'start_directory' in pconf:
return pconf['start_directory']
elif 'start_directory' in wconf:
return wconf['start_directory']
else:
return None
p = w.split_window(
attach=True, start_directory=get_pane_start_directory(), target=p.id
)
assert isinstance(p, Pane)
if 'layout' in wconf:
w.select_layout(wconf['layout'])
if 'suppress_history' in pconf:
suppress = pconf['suppress_history']
elif 'suppress_history' in wconf:
suppress = wconf['suppress_history']
else:
suppress = True
for cmd in pconf['shell_command']:
p.send_keys(cmd, suppress_history=suppress)
if 'focus' in pconf and pconf['focus']:
w.select_pane(p['pane_id'])
w.server._update_panes()
yield p, pconf
def config_after_window(self, w, wconf):
"""Actions to apply to window after window and pane finished.
When building a tmux session, sometimes its easier to postpone things
like setting options until after things are already structurally
prepared.
Parameters
----------
w : :class:`libtmux.Window`
window to create panes for
wconf : dict
config section for window
"""
if 'options_after' in wconf and isinstance(wconf['options_after'], dict):
for key, val in wconf['options_after'].items():
w.set_window_option(key, val)
def freeze(session):
"""
Freeze live tmux session and Return session config :py:obj:`dict`.
Parameters
----------
session : :class:`libtmux.Session`
session object
Returns
-------
dict
tmuxp compatible workspace config
"""
sconf = {'session_name': session['session_name'], 'windows': []}
for w in session.windows:
wconf = {
'options': w.show_window_options(),
'window_name': w.name,
'layout': w.layout,
'panes': [],
}
if w.get('window_active', '0') == '1':
wconf['focus'] = 'true'
# If all panes have same path, set 'start_directory' instead
# of using 'cd' shell commands.
def pane_has_same_path(p):
return w.panes[0].current_path == p.current_path
if all(pane_has_same_path(p) for p in w.panes):
wconf['start_directory'] = w.panes[0].current_path
for p in w.panes:
pconf = {'shell_command': []}
if 'start_directory' not in wconf:
pconf['shell_command'].append('cd ' + p.current_path)
if p.get('pane_active', '0') == '1':
pconf['focus'] = 'true'
current_cmd = p.current_command
def filter_interpretters_and_shells():
return current_cmd.startswith('-') or any(
current_cmd.endswith(cmd) for cmd in ['python', 'ruby', 'node']
)
if filter_interpretters_and_shells():
current_cmd = None
if current_cmd:
pconf['shell_command'].append(current_cmd)
else:
if not len(pconf['shell_command']):
pconf = 'pane'
wconf['panes'].append(pconf)
sconf['windows'].append(wconf)
return sconf
|
import logging
import voluptuous as vol
from homeassistant.components.cover import PLATFORM_SCHEMA, CoverEntity
from homeassistant.const import CONF_NAME, CONF_TYPE, STATE_OPEN
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.restore_state import RestoreEntity
from . import (
CONF_ALIASES,
CONF_DEVICE_DEFAULTS,
CONF_DEVICES,
CONF_FIRE_EVENT,
CONF_GROUP,
CONF_GROUP_ALIASES,
CONF_NOGROUP_ALIASES,
CONF_SIGNAL_REPETITIONS,
DEVICE_DEFAULTS_SCHEMA,
RflinkCommand,
)
_LOGGER = logging.getLogger(__name__)
PARALLEL_UPDATES = 0
TYPE_STANDARD = "standard"
TYPE_INVERTED = "inverted"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(
CONF_DEVICE_DEFAULTS, default=DEVICE_DEFAULTS_SCHEMA({})
): DEVICE_DEFAULTS_SCHEMA,
vol.Optional(CONF_DEVICES, default={}): vol.Schema(
{
cv.string: {
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_TYPE): vol.Any(TYPE_STANDARD, TYPE_INVERTED),
vol.Optional(CONF_ALIASES, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(CONF_GROUP_ALIASES, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(CONF_NOGROUP_ALIASES, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(CONF_FIRE_EVENT, default=False): cv.boolean,
vol.Optional(CONF_SIGNAL_REPETITIONS): vol.Coerce(int),
vol.Optional(CONF_GROUP, default=True): cv.boolean,
}
}
),
}
)
def entity_type_for_device_id(device_id):
"""Return entity class for protocol of a given device_id.
Async friendly.
"""
entity_type_mapping = {
# KlikAanKlikUit cover have the controls inverted
"newkaku": TYPE_INVERTED
}
protocol = device_id.split("_")[0]
return entity_type_mapping.get(protocol, TYPE_STANDARD)
def entity_class_for_type(entity_type):
"""Translate entity type to entity class.
Async friendly.
"""
entity_device_mapping = {
# default cover implementation
TYPE_STANDARD: RflinkCover,
# cover with open/close commands inverted
# like KAKU/COCO ASUN-650
TYPE_INVERTED: InvertedRflinkCover,
}
return entity_device_mapping.get(entity_type, RflinkCover)
def devices_from_config(domain_config):
"""Parse configuration and add Rflink cover devices."""
devices = []
for device_id, config in domain_config[CONF_DEVICES].items():
# Determine what kind of entity to create, RflinkCover
# or InvertedRflinkCover
if CONF_TYPE in config:
# Remove type from config to not pass it as and argument
# to entity instantiation
entity_type = config.pop(CONF_TYPE)
else:
entity_type = entity_type_for_device_id(device_id)
entity_class = entity_class_for_type(entity_type)
device_config = dict(domain_config[CONF_DEVICE_DEFAULTS], **config)
device = entity_class(device_id, **device_config)
devices.append(device)
return devices
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Rflink cover platform."""
async_add_entities(devices_from_config(config))
class RflinkCover(RflinkCommand, CoverEntity, RestoreEntity):
"""Rflink entity which can switch on/stop/off (eg: cover)."""
async def async_added_to_hass(self):
"""Restore RFLink cover state (OPEN/CLOSE)."""
await super().async_added_to_hass()
old_state = await self.async_get_last_state()
if old_state is not None:
self._state = old_state.state == STATE_OPEN
def _handle_event(self, event):
"""Adjust state if Rflink picks up a remote command for this device."""
self.cancel_queued_send_commands()
command = event["command"]
if command in ["on", "allon", "up"]:
self._state = True
elif command in ["off", "alloff", "down"]:
self._state = False
@property
def should_poll(self):
"""No polling available in RFlink cover."""
return False
@property
def is_closed(self):
"""Return if the cover is closed."""
return not self._state
@property
def assumed_state(self):
"""Return True because covers can be stopped midway."""
return True
async def async_close_cover(self, **kwargs):
"""Turn the device close."""
await self._async_handle_command("close_cover")
async def async_open_cover(self, **kwargs):
"""Turn the device open."""
await self._async_handle_command("open_cover")
async def async_stop_cover(self, **kwargs):
"""Turn the device stop."""
await self._async_handle_command("stop_cover")
class InvertedRflinkCover(RflinkCover):
"""Rflink cover that has inverted open/close commands."""
async def _async_send_command(self, cmd, repetitions):
"""Will invert only the UP/DOWN commands."""
_LOGGER.debug("Getting command: %s for Rflink device: %s", cmd, self._device_id)
cmd_inv = {"UP": "DOWN", "DOWN": "UP"}
await super()._async_send_command(cmd_inv.get(cmd, cmd), repetitions)
|
from unittest import TestCase
import numpy as np
from scipy.sparse import csr_matrix
import scattertext
from scattertext.CSRMatrixTools import CSRMatrixFactory
class TestCSRMatrixFactory(TestCase):
def test_main(self):
mat_factory = CSRMatrixFactory()
mat_factory[0, 0] = 4
mat_factory[1, 5] = 3
mat = mat_factory.get_csr_matrix()
self.assertEqual(type(mat), csr_matrix)
np.testing.assert_array_almost_equal(
np.array([[4, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 3]]),
mat.todense())
def test_delete_row(self):
a = csr_matrix(np.array([[0, 1, 3, 0, 1, 0],
[0, 0, 1, 0, 1, 1],
[0, 5, 1, 0, 5, 5]]))
b = scattertext.CSRMatrixTools.delete_columns(a, [0, 3])
desired_array = np.array([[1, 3, 1, 0],
[0, 1, 1, 1],
[5, 1, 5, 5]])
self.assertEqual(type(b), csr_matrix)
np.testing.assert_array_almost_equal(b.todense(), desired_array)
self.assertEqual(a.shape, (3,6))
def test_typing(self):
mat_factory = CSRMatrixFactory()
mat_factory[0, 0] = 4
mat_factory[1, 5] = 3.1
mat = mat_factory.get_csr_matrix()
self.assertEqual(type(mat), csr_matrix)
np.testing.assert_array_almost_equal(
np.array([[4, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 3.1]]),
mat.todense())
mat = mat_factory.get_csr_matrix(dtype=np.bool)
self.assertEqual(type(mat), csr_matrix)
np.testing.assert_array_almost_equal(
np.array([[1, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 1]]),
mat.todense())
mat = mat_factory.get_csr_matrix(dtype=np.int32)
self.assertEqual(type(mat), csr_matrix)
np.testing.assert_array_almost_equal(
np.array([[4, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 3]]),
mat.todense())
|
import collections
import functools
import json
import logging
import re
from absl import flags
from perfkitbenchmarker import context
from perfkitbenchmarker import errors
from perfkitbenchmarker import virtual_machine
from perfkitbenchmarker import vm_util
import six
FLAGS = flags.FLAGS
RATE_LIMITED_MESSAGE = 'Rate Limit Exceeded'
RATE_LIMITED_MAX_RETRIES = 10
# 200s is chosen because 1) quota is measured in 100s intervals and 2) fuzzing
# causes a random number between 100 and this to be chosen.
RATE_LIMITED_MAX_POLLING_INTERVAL = 200
# This must be set. Otherwise, calling Issue() will fail in util_test.py.
RATE_LIMITED_FUZZ = 0.5
RATE_LIMITED_TIMEOUT = 1200
STOCKOUT_MESSAGE = ('Creation failed due to insufficient capacity indicating a '
'potential stockout scenario.')
@functools.lru_cache()
def GetDefaultProject():
"""Get the default project."""
cmd = [FLAGS.gcloud_path, 'config', 'list', '--format=json']
stdout, _, _ = vm_util.IssueCommand(cmd)
result = json.loads(stdout)
return result['core']['project']
@functools.lru_cache()
def GetDefaultUser():
"""Get the default project."""
cmd = [FLAGS.gcloud_path, 'config', 'list', '--format=json']
stdout, _, _ = vm_util.IssueCommand(cmd)
result = json.loads(stdout)
return result['core']['account']
def GetRegionFromZone(zone):
"""Returns the region name from a fully-qualified zone name.
Each fully-qualified GCP zone name is formatted as <region>-<zone> where, for
example, each region looks like us-central1, europe-west1, or asia-east1.
Therefore, we pull the first two parts the fully qualified zone name delimited
by a dash and assume the rest is the name of the zone. See
https://cloud.google.com/compute/docs/regions-zones for more information.
Args:
zone: The fully-qualified name of a GCP zone.
"""
parts = zone.split('-')
return '-'.join(parts[:2])
def GetMultiRegionFromRegion(region):
"""Gets the closest multi-region location to the region."""
if (region.startswith('us') or
region.startswith('northamerica') or
region.startswith('southamerica')):
return 'us'
elif region.startswith('europe'):
return 'eu'
elif region.startswith('asia') or region.startswith('australia'):
return 'asia'
else:
raise Exception('Unknown region "%s".' % region)
def IssueCommandFunction(cmd, **kwargs):
"""Use vm_util to issue the given command.
Args:
cmd: the gcloud command to run
**kwargs: additional arguments for the gcloud command
Returns:
stdout, stderr, retcode tuple from running the command
"""
return vm_util.IssueCommand(cmd.GetCommand(), **kwargs)
def IssueRetryableCommandFunction(cmd, **kwargs):
"""Use vm_util to issue the given retryable command.
Args:
cmd: the gcloud command to run
**kwargs: additional arguments for the gcloud command
Returns:
stdout, stderr, tuple from running the command
"""
return vm_util.IssueRetryableCommand(cmd.GetCommand(), **kwargs)
# The function that is used to issue a command, when given a GcloudCommand
# object and additional arguments. Can be overridden.
_issue_command_function = IssueCommandFunction
# The function that is used to issue a retryable command, when given a
# GcloudCommand object and additional arguments. Can be overridden.
_issue_retryable_command_function = IssueRetryableCommandFunction
def SetIssueCommandFunction(func):
"""Set the issue command function to be the given function.
Args:
func: the function to run when issuing a GcloudCommand.
"""
global _issue_command_function
_issue_command_function = func
def SetIssueRetryableCommandFunction(func):
"""Set the issue retryable command function to be the given function.
Args:
func: the function to run when issuing a GcloudCommand.
"""
global _issue_retryable_command_function
_issue_retryable_command_function = func
class GcloudCommand(object):
"""A gcloud command.
Attributes:
args: list of strings. Non-flag args to pass to gcloud, typically
specifying an operation to perform (e.g. ['compute', 'images', 'list']
to list available images).
flags: OrderedDict mapping flag name string to flag value. Flags to pass to
gcloud (e.g. {'project': 'my-project-id'}). If a provided value is
True, the flag is passed to gcloud without a value. If a provided value
is a list, the flag is passed to gcloud multiple times, once with each
value in the list.
additional_flags: list of strings. Additional flags to append unmodified to
the end of the gcloud command (e.g. ['--metadata', 'color=red']).
rate_limited: boolean. True if rate limited, False otherwise.
"""
def __init__(self, resource, *args):
"""Initializes a GcloudCommand with the provided args and common flags.
Args:
resource: A GCE resource of type BaseResource.
*args: sequence of strings. Non-flag args to pass to gcloud, typically
specifying an operation to perform (e.g. ['compute', 'images', 'list']
to list available images).
"""
self.args = list(args)
self.flags = collections.OrderedDict()
self.additional_flags = []
self._AddCommonFlags(resource)
self.rate_limited = False
def GetCommand(self):
"""Generates the gcloud command.
Returns:
list of strings. When joined by spaces, forms the gcloud shell command.
"""
cmd = [FLAGS.gcloud_path]
cmd.extend(self.args)
for flag_name, values in sorted(self.flags.items()):
flag_name_str = '--{0}'.format(flag_name)
if values is True:
cmd.append(flag_name_str)
else:
values_iterable = values if isinstance(values, list) else [values]
for value in values_iterable:
cmd.append(flag_name_str)
cmd.append(str(value))
cmd.extend(self.additional_flags)
return cmd
def __repr__(self):
return '{0}({1})'.format(type(self).__name__, ' '.join(self.GetCommand()))
@vm_util.Retry(
poll_interval=RATE_LIMITED_MAX_POLLING_INTERVAL,
max_retries=RATE_LIMITED_MAX_RETRIES,
fuzz=RATE_LIMITED_FUZZ,
timeout=RATE_LIMITED_TIMEOUT,
retryable_exceptions=(
errors.Benchmarks.QuotaFailure.RateLimitExceededError,))
def Issue(self, **kwargs):
"""Tries to run the gcloud command once, retrying if Rate Limited.
Args:
**kwargs: Keyword arguments to forward to vm_util.IssueCommand when
issuing the gcloud command.
Returns:
A tuple of stdout, stderr, and retcode from running the gcloud command.
Raises:
RateLimitExceededError: if command fails with Rate Limit Exceeded.
IssueCommandError: if command fails without Rate Limit Exceeded.
"""
if FLAGS.retry_on_rate_limited:
try:
stdout, stderr, retcode = _issue_command_function(self, **kwargs)
except errors.VmUtil.IssueCommandError as error:
if RATE_LIMITED_MESSAGE in str(error):
self.rate_limited = True
raise errors.Benchmarks.QuotaFailure.RateLimitExceededError(
str(error))
else:
raise error
if retcode and RATE_LIMITED_MESSAGE in stderr:
self.rate_limited = True
raise errors.Benchmarks.QuotaFailure.RateLimitExceededError(stderr)
return stdout, stderr, retcode
else:
return _issue_command_function(self, **kwargs)
def IssueRetryable(self, **kwargs):
"""Tries running the gcloud command until it succeeds or times out.
Args:
**kwargs: Keyword arguments to forward to vm_util.IssueRetryableCommand
when issuing the gcloud command.
Returns:
(stdout, stderr) pair of strings from running the gcloud command.
"""
return _issue_retryable_command_function(self, **kwargs)
def _AddCommonFlags(self, resource):
"""Adds common flags to the command.
Adds common gcloud flags derived from the PKB flags and provided resource.
Args:
resource: A GCE resource of type BaseResource.
"""
self.flags['format'] = 'json'
self.flags['quiet'] = True
if resource:
if resource.project is not None:
self.flags['project'] = resource.project
if hasattr(resource, 'zone') and resource.zone:
self.flags['zone'] = resource.zone
self.additional_flags.extend(FLAGS.additional_gcloud_flags or ())
_QUOTA_EXCEEDED_REGEX = re.compile(
r"(Quota '.*' exceeded|Insufficient \w+ quota)")
_NOT_ENOUGH_RESOURCES_STDERR = ('does not have enough resources available to '
'fulfill the request.')
_NOT_ENOUGH_RESOURCES_MESSAGE = 'Creation failed due to not enough resources: '
def CheckGcloudResponseKnownFailures(stderr, retcode):
"""Checks gcloud responses for quota exceeded errors.
Args:
stderr: The stderr from a gcloud command.
retcode: The return code from a gcloud command.
"""
if retcode:
if _QUOTA_EXCEEDED_REGEX.search(stderr):
message = virtual_machine.QUOTA_EXCEEDED_MESSAGE + stderr
logging.error(message)
raise errors.Benchmarks.QuotaFailure(message)
if _NOT_ENOUGH_RESOURCES_STDERR in stderr:
message = _NOT_ENOUGH_RESOURCES_MESSAGE + stderr
logging.error(message)
raise errors.Benchmarks.InsufficientCapacityCloudFailure(message)
def AuthenticateServiceAccount(vm, vm_gcloud_path='gcloud', benchmark=None):
"""Authorize gcloud to access Google Cloud Platform with a service account.
If you want gcloud (and other tools in the Cloud SDK) to use service account
credentials to make requests, use this method to authenticate.
Account name is provided by FLAGS.gcp_service_account
Credentials are fetched from a file whose local path is provided by
FLAGS.gcp_service_account_key_file, which contains private authorization key.
In the absence of a locally supplied credential file, the file is retrieved
from pre-provisioned data bucket.
Args:
vm: vm on which the gcloud library needs to be authenticated.
vm_gcloud_path: Optional path to the gcloud binary on the vm.
benchmark: The module for retrieving the associated service account file.
"""
if not FLAGS.gcp_service_account:
raise errors.Setup.InvalidFlagConfigurationError(
'Authentication requires the service account name to be '
'specified via --gcp_service_account.')
if not FLAGS.gcp_service_account_key_file:
raise errors.Setup.InvalidFlagConfigurationError(
'Authentication requires the service account credential json to be '
'specified via --gcp_service_account_key_file.')
if '/' in FLAGS.gcp_service_account_key_file:
vm.PushFile(FLAGS.gcp_service_account_key_file, vm_util.VM_TMP_DIR)
key_file_name = FLAGS.gcp_service_account_key_file.split('/')[-1]
else:
vm.InstallPreprovisionedBenchmarkData(benchmark,
[FLAGS.gcp_service_account_key_file],
vm_util.VM_TMP_DIR)
key_file_name = FLAGS.gcp_service_account_key_file
activate_cmd = ('{} auth activate-service-account {} --key-file={}/{}'
.format(vm_gcloud_path, FLAGS.gcp_service_account,
vm_util.VM_TMP_DIR, key_file_name))
vm.RemoteCommand(activate_cmd)
def InstallGcloudComponents(vm, vm_gcloud_path='gcloud', component='alpha'):
"""Install gcloud components on the target vm.
Args:
vm: vm on which the gcloud's alpha components need to be installed.
vm_gcloud_path: Optional path to the gcloud binary on the vm.
component: Gcloud component to install.
"""
install_cmd = '{} components install {} --quiet'.format(vm_gcloud_path,
component)
vm.RemoteCommand(install_cmd)
def FormatTags(tags_dict):
"""Format a dict of tags into arguments.
Args:
tags_dict: Tags to be formatted.
Returns:
A string contains formatted tags
"""
return ','.join(
'{0}={1}'.format(k, v) for k, v in sorted(six.iteritems(tags_dict)))
def GetDefaultTags(timeout_minutes=None):
"""Get the default tags in a dictionary.
Args:
timeout_minutes: Timeout used for setting the timeout_utc tag.
Returns:
A dict of tags, contributed from the benchmark spec.
"""
benchmark_spec = context.GetThreadBenchmarkSpec()
if not benchmark_spec:
return {}
return benchmark_spec.GetResourceTags(timeout_minutes)
def MakeFormattedDefaultTags(timeout_minutes=None):
"""Get the default tags formatted.
Args:
timeout_minutes: Timeout used for setting the timeout_utc tag.
Returns:
A string contains tags, contributed from the benchmark spec.
"""
return FormatTags(GetDefaultTags(timeout_minutes))
|
import asyncio
from datetime import timedelta
from functools import partial
import logging
from pathlib import Path
from typing import Optional
from oauthlib.oauth2 import AccessDeniedError
import requests
from ring_doorbell import Auth, Ring
from homeassistant.const import __version__
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.event import async_track_time_interval
from homeassistant.util.async_ import run_callback_threadsafe
_LOGGER = logging.getLogger(__name__)
ATTRIBUTION = "Data provided by Ring.com"
NOTIFICATION_ID = "ring_notification"
NOTIFICATION_TITLE = "Ring Setup"
DOMAIN = "ring"
DEFAULT_ENTITY_NAMESPACE = "ring"
PLATFORMS = ("binary_sensor", "light", "sensor", "switch", "camera")
async def async_setup(hass, config):
"""Set up the Ring component."""
if DOMAIN not in config:
return True
def legacy_cleanup():
"""Clean up old tokens."""
old_cache = Path(hass.config.path(".ring_cache.pickle"))
if old_cache.is_file():
old_cache.unlink()
await hass.async_add_executor_job(legacy_cleanup)
return True
async def async_setup_entry(hass, entry):
"""Set up a config entry."""
def token_updater(token):
"""Handle from sync context when token is updated."""
run_callback_threadsafe(
hass.loop,
partial(
hass.config_entries.async_update_entry,
entry,
data={**entry.data, "token": token},
),
).result()
auth = Auth(f"HomeAssistant/{__version__}", entry.data["token"], token_updater)
ring = Ring(auth)
try:
await hass.async_add_executor_job(ring.update_data)
except AccessDeniedError:
_LOGGER.error("Access token is no longer valid. Please set up Ring again")
return False
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = {
"api": ring,
"devices": ring.devices(),
"device_data": GlobalDataUpdater(
hass, "device", entry.entry_id, ring, "update_devices", timedelta(minutes=1)
),
"dings_data": GlobalDataUpdater(
hass,
"active dings",
entry.entry_id,
ring,
"update_dings",
timedelta(seconds=5),
),
"history_data": DeviceDataUpdater(
hass,
"history",
entry.entry_id,
ring,
lambda device: device.history(limit=10),
timedelta(minutes=1),
),
"health_data": DeviceDataUpdater(
hass,
"health",
entry.entry_id,
ring,
lambda device: device.update_health_data(),
timedelta(minutes=1),
),
}
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
if hass.services.has_service(DOMAIN, "update"):
return True
async def async_refresh_all(_):
"""Refresh all ring data."""
for info in hass.data[DOMAIN].values():
await info["device_data"].async_refresh_all()
await info["dings_data"].async_refresh_all()
await hass.async_add_executor_job(info["history_data"].refresh_all)
await hass.async_add_executor_job(info["health_data"].refresh_all)
# register service
hass.services.async_register(DOMAIN, "update", async_refresh_all)
return True
async def async_unload_entry(hass, entry):
"""Unload Ring entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
if not unload_ok:
return False
hass.data[DOMAIN].pop(entry.entry_id)
if len(hass.data[DOMAIN]) != 0:
return True
# Last entry unloaded, clean up service
hass.services.async_remove(DOMAIN, "update")
return True
class GlobalDataUpdater:
"""Data storage for single API endpoint."""
def __init__(
self,
hass: HomeAssistant,
data_type: str,
config_entry_id: str,
ring: Ring,
update_method: str,
update_interval: timedelta,
):
"""Initialize global data updater."""
self.hass = hass
self.data_type = data_type
self.config_entry_id = config_entry_id
self.ring = ring
self.update_method = update_method
self.update_interval = update_interval
self.listeners = []
self._unsub_interval = None
@callback
def async_add_listener(self, update_callback):
"""Listen for data updates."""
# This is the first listener, set up interval.
if not self.listeners:
self._unsub_interval = async_track_time_interval(
self.hass, self.async_refresh_all, self.update_interval
)
self.listeners.append(update_callback)
@callback
def async_remove_listener(self, update_callback):
"""Remove data update."""
self.listeners.remove(update_callback)
if not self.listeners:
self._unsub_interval()
self._unsub_interval = None
async def async_refresh_all(self, _now: Optional[int] = None) -> None:
"""Time to update."""
if not self.listeners:
return
try:
await self.hass.async_add_executor_job(
getattr(self.ring, self.update_method)
)
except AccessDeniedError:
_LOGGER.error("Ring access token is no longer valid. Set up Ring again")
await self.hass.config_entries.async_unload(self.config_entry_id)
return
except requests.Timeout:
_LOGGER.warning(
"Time out fetching Ring %s data",
self.data_type,
)
return
except requests.RequestException as err:
_LOGGER.warning(
"Error fetching Ring %s data: %s",
self.data_type,
err,
)
return
for update_callback in self.listeners:
update_callback()
class DeviceDataUpdater:
"""Data storage for device data."""
def __init__(
self,
hass: HomeAssistant,
data_type: str,
config_entry_id: str,
ring: Ring,
update_method: str,
update_interval: timedelta,
):
"""Initialize device data updater."""
self.data_type = data_type
self.hass = hass
self.config_entry_id = config_entry_id
self.ring = ring
self.update_method = update_method
self.update_interval = update_interval
self.devices = {}
self._unsub_interval = None
async def async_track_device(self, device, update_callback):
"""Track a device."""
if not self.devices:
self._unsub_interval = async_track_time_interval(
self.hass, self.refresh_all, self.update_interval
)
if device.device_id not in self.devices:
self.devices[device.device_id] = {
"device": device,
"update_callbacks": [update_callback],
"data": None,
}
# Store task so that other concurrent requests can wait for us to finish and
# data be available.
self.devices[device.device_id]["task"] = asyncio.current_task()
self.devices[device.device_id][
"data"
] = await self.hass.async_add_executor_job(self.update_method, device)
self.devices[device.device_id].pop("task")
else:
self.devices[device.device_id]["update_callbacks"].append(update_callback)
# If someone is currently fetching data as part of the initialization, wait for them
if "task" in self.devices[device.device_id]:
await self.devices[device.device_id]["task"]
update_callback(self.devices[device.device_id]["data"])
@callback
def async_untrack_device(self, device, update_callback):
"""Untrack a device."""
self.devices[device.device_id]["update_callbacks"].remove(update_callback)
if not self.devices[device.device_id]["update_callbacks"]:
self.devices.pop(device.device_id)
if not self.devices:
self._unsub_interval()
self._unsub_interval = None
def refresh_all(self, _=None):
"""Refresh all registered devices."""
for device_id, info in self.devices.items():
try:
data = info["data"] = self.update_method(info["device"])
except AccessDeniedError:
_LOGGER.error("Ring access token is no longer valid. Set up Ring again")
self.hass.add_job(
self.hass.config_entries.async_unload(self.config_entry_id)
)
return
except requests.Timeout:
_LOGGER.warning(
"Time out fetching Ring %s data for device %s",
self.data_type,
device_id,
)
continue
except requests.RequestException as err:
_LOGGER.warning(
"Error fetching Ring %s data for device %s: %s",
self.data_type,
device_id,
err,
)
continue
for update_callback in info["update_callbacks"]:
self.hass.loop.call_soon_threadsafe(update_callback, data)
|
from django.conf import settings
from django.contrib import admin
from django.contrib.admin import AdminSite
from django.contrib.auth.views import LogoutView
from django.shortcuts import render
from django.urls import reverse
from django.utils.translation import gettext_lazy as _
from django.views.decorators.cache import never_cache
from rest_framework.authtoken.admin import TokenAdmin
from rest_framework.authtoken.models import Token
from social_django.admin import AssociationOption, NonceOption, UserSocialAuthOption
from social_django.models import Association, Nonce, UserSocialAuth
from weblate.accounts.admin import AuditLogAdmin, ProfileAdmin, VerifiedEmailAdmin
from weblate.accounts.forms import AdminLoginForm
from weblate.accounts.models import AuditLog, Profile, VerifiedEmail
from weblate.auth.admin import RoleAdmin, WeblateGroupAdmin, WeblateUserAdmin
from weblate.auth.models import Group, Role, User
from weblate.checks.admin import CheckAdmin
from weblate.checks.models import Check
from weblate.configuration.admin import SettingAdmin
from weblate.configuration.models import Setting
from weblate.fonts.admin import FontAdmin, FontGroupAdmin
from weblate.fonts.models import Font, FontGroup
from weblate.glossary.admin import GlossaryAdmin, TermAdmin
from weblate.glossary.models import Glossary, Term
from weblate.lang.admin import LanguageAdmin
from weblate.lang.models import Language
from weblate.memory.admin import MemoryAdmin
from weblate.memory.models import Memory
from weblate.screenshots.admin import ScreenshotAdmin
from weblate.screenshots.models import Screenshot
from weblate.trans.admin import (
AnnouncementAdmin,
ChangeAdmin,
CommentAdmin,
ComponentAdmin,
ComponentListAdmin,
ContributorAgreementAdmin,
ProjectAdmin,
SuggestionAdmin,
TranslationAdmin,
UnitAdmin,
)
from weblate.trans.models import (
Announcement,
Change,
Comment,
Component,
ComponentList,
ContributorAgreement,
Project,
Suggestion,
Translation,
Unit,
)
from weblate.utils import messages
from weblate.wladmin.models import ConfigurationError
class WeblateAdminSite(AdminSite):
login_form = AdminLoginForm
site_header = _("Weblate administration")
site_title = _("Weblate administration")
index_template = "admin/weblate-index.html"
enable_nav_sidebar = False
@property
def site_url(self):
if settings.URL_PREFIX:
return settings.URL_PREFIX
return "/"
def discover(self):
"""Manual discovery."""
# Accounts
self.register(User, WeblateUserAdmin)
self.register(Role, RoleAdmin)
self.register(Group, WeblateGroupAdmin)
self.register(AuditLog, AuditLogAdmin)
self.register(Profile, ProfileAdmin)
self.register(VerifiedEmail, VerifiedEmailAdmin)
# Languages
self.register(Language, LanguageAdmin)
# Memory
self.register(Memory, MemoryAdmin)
# Screenshots
self.register(Screenshot, ScreenshotAdmin)
# Fonts
self.register(Font, FontAdmin)
self.register(FontGroup, FontGroupAdmin)
# Translations
self.register(Project, ProjectAdmin)
self.register(Component, ComponentAdmin)
self.register(Announcement, AnnouncementAdmin)
self.register(ComponentList, ComponentListAdmin)
self.register(ContributorAgreement, ContributorAgreementAdmin)
self.register(Glossary, GlossaryAdmin)
# Settings
self.register(Setting, SettingAdmin)
# Show some controls only in debug mode
if settings.DEBUG:
self.register(Translation, TranslationAdmin)
self.register(Unit, UnitAdmin)
self.register(Suggestion, SuggestionAdmin)
self.register(Comment, CommentAdmin)
self.register(Check, CheckAdmin)
self.register(Term, TermAdmin)
self.register(Change, ChangeAdmin)
# Billing
if "weblate.billing" in settings.INSTALLED_APPS:
# pylint: disable=wrong-import-position
from weblate.billing.admin import BillingAdmin, InvoiceAdmin, PlanAdmin
from weblate.billing.models import Billing, Invoice, Plan
self.register(Plan, PlanAdmin)
self.register(Billing, BillingAdmin)
self.register(Invoice, InvoiceAdmin)
# Hosted
if "wlhosted.integrations" in settings.INSTALLED_APPS:
# pylint: disable=wrong-import-position
from wlhosted.payments.admin import CustomerAdmin, PaymentAdmin
from wlhosted.payments.models import Customer, Payment
self.register(Customer, CustomerAdmin)
self.register(Payment, PaymentAdmin)
# Legal
if "weblate.legal" in settings.INSTALLED_APPS:
# pylint: disable=wrong-import-position
from weblate.legal.admin import AgreementAdmin
from weblate.legal.models import Agreement
self.register(Agreement, AgreementAdmin)
# SAML identity provider
if "djangosaml2idp" in settings.INSTALLED_APPS:
# pylint: disable=wrong-import-position
from djangosaml2idp.admin import PersistentIdAdmin, ServiceProviderAdmin
from djangosaml2idp.models import PersistentId, ServiceProvider
self.register(PersistentId, PersistentIdAdmin)
self.register(ServiceProvider, ServiceProviderAdmin)
# Python Social Auth
self.register(UserSocialAuth, UserSocialAuthOption)
self.register(Nonce, NonceOption)
self.register(Association, AssociationOption)
# Django REST Framework
self.register(Token, TokenAdmin)
# Simple SSO
if "simple_sso.sso_server" in settings.INSTALLED_APPS:
from simple_sso.sso_server.models import Consumer
from simple_sso.sso_server.server import ConsumerAdmin
self.register(Consumer, ConsumerAdmin)
@never_cache
def logout(self, request, extra_context=None):
if request.method == "POST":
messages.info(request, _("Thank you for using Weblate."))
request.current_app = self.name
return LogoutView.as_view(next_page=reverse("admin:login"))(request)
context = self.each_context(request)
context["title"] = _("Sign out")
return render(request, "admin/logout-confirm.html", context)
def each_context(self, request):
result = super().each_context(request)
empty = [_("Object listing turned off")]
result["empty_selectable_objects_list"] = [empty]
result["empty_objects_list"] = empty
result["configuration_errors"] = ConfigurationError.objects.filter(
ignored=False
)
return result
@property
def urls(self):
return self.get_urls()
SITE = WeblateAdminSite()
SITE.discover()
admin.site = SITE
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
from compare_gan.metrics import eval_task
import numpy as np
import tensorflow as tf
class KIDScoreTask(eval_task.EvalTask):
"""Evaluation task for the KID score."""
_LABEL = "kid_score"
def run_after_session(self, fake_dset, real_dset):
score = kid(fake_dset.activations, real_dset.activations)
return {self._LABEL: score}
def kid(fake_activations,
real_activations,
max_batch_size=1024,
dtype=None,
return_stderr=False):
"""Unbiased estimator of the Kernel Inception Distance.
As defined by https://arxiv.org/abs/1801.01401.
If return_stderr, also returns an estimate of the standard error, i.e. the
standard deviation of the KID estimator. Returns nan if the number
of batches is too small (< 5); for more reliable estimates, one could use
the asymptotic variance estimate given in https://arxiv.org/abs/1611.04488.
Uses a block estimator, as in https://arxiv.org/abs/1307.1954, with blocks
no larger than max_batch_size. This is slightly different than the authors'
provided code, but is also unbiased (and provides more-valid a variance
estimate).
NOTE: the blocking code assumes that real_activations and
fake_activations are in random order. If real_activations is sorted
in a meaningful order, the estimator will be biased.
Args:
fake_activations: [batch, num_features] tensor with inception features.
real_activations: [batch, num_features] tensor with inception features.
max_batch_size: Batches to compute the KID.
dtype: Type used by the computations.
return_stderr: If true, also returns the std_error from the KID computation.
Returns:
KID score (and optionally std error).
"""
real_activations.get_shape().assert_has_rank(2)
fake_activations.get_shape().assert_has_rank(2)
# need to know dimension for the kernel, and batch size to split things
real_activations.get_shape().assert_is_fully_defined()
fake_activations.get_shape().assert_is_fully_defined()
n_real, dim = real_activations.get_shape().as_list()
n_gen, dim2 = fake_activations.get_shape().as_list()
assert dim2 == dim
# tensorflow_gan forces doubles for FID, but I don't think we need that here
if dtype is None:
dtype = real_activations.dtype
assert fake_activations.dtype == dtype
else:
real_activations = tf.cast(real_activations, dtype)
fake_activations = tf.cast(fake_activations, dtype)
# split into largest approximately-equally-sized blocks
n_bins = int(math.ceil(max(n_real, n_gen) / max_batch_size))
bins_r = np.full(n_bins, int(math.ceil(n_real / n_bins)))
bins_g = np.full(n_bins, int(math.ceil(n_gen / n_bins)))
bins_r[:(n_bins * bins_r[0]) - n_real] -= 1
bins_g[:(n_bins * bins_r[0]) - n_gen] -= 1
assert bins_r.min() >= 2
assert bins_g.min() >= 2
inds_r = tf.constant(np.r_[0, np.cumsum(bins_r)])
inds_g = tf.constant(np.r_[0, np.cumsum(bins_g)])
dim_ = tf.cast(dim, dtype)
def get_kid_batch(i):
"""Computes KID on a given batch of features.
Takes real_activations[ind_r[i] : ind_r[i+1]] and
fake_activations[ind_g[i] : ind_g[i+1]].
Args:
i: is the index of the batch.
Returns:
KID for the given batch.
"""
r_s = inds_r[i]
r_e = inds_r[i + 1]
r = real_activations[r_s:r_e]
m = tf.cast(r_e - r_s, dtype)
g_s = inds_g[i]
g_e = inds_g[i + 1]
g = fake_activations[g_s:g_e]
n = tf.cast(r_e - r_s, dtype)
# Could probably do this a bit faster...
k_rr = (tf.matmul(r, r, transpose_b=True) / dim_ + 1)**3
k_rg = (tf.matmul(r, g, transpose_b=True) / dim_ + 1)**3
k_gg = (tf.matmul(g, g, transpose_b=True) / dim_ + 1)**3
return (
-2 * tf.reduce_mean(k_rg) + (tf.reduce_sum(k_rr) - tf.trace(k_rr)) /
(m * (m - 1)) + (tf.reduce_sum(k_gg) - tf.trace(k_gg)) / (n * (n - 1)))
ests = tf.map_fn(
get_kid_batch, np.arange(n_bins), dtype=dtype, back_prop=False)
if return_stderr:
if n_bins < 5:
return tf.reduce_mean(ests), np.nan
mn, var = tf.nn.moments(ests, [0])
return mn, tf.sqrt(var / n_bins)
else:
return tf.reduce_mean(ests)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
from compare_gan import utils
import gin
import six
import tensorflow as tf
@six.add_metaclass(abc.ABCMeta)
class _Module(object):
"""Base class for architectures.
Long term this will be replaced by `tf.Module` in TF 2.0.
"""
def __init__(self, name):
self._name = name
@property
def name(self):
return self._name
@property
def trainable_variables(self):
return [var for var in tf.trainable_variables() if self._name in var.name]
@gin.configurable("G", blacklist=["name", "image_shape"])
class AbstractGenerator(_Module):
"""Interface for generator architectures."""
def __init__(self,
name="generator",
image_shape=None,
batch_norm_fn=None,
spectral_norm=False):
"""Constructor for all generator architectures.
Args:
name: Scope name of the generator.
image_shape: Image shape to be generated, [height, width, colors].
batch_norm_fn: Function for batch normalization or None.
spectral_norm: If True use spectral normalization for all weights.
"""
super(AbstractGenerator, self).__init__(name=name)
self._name = name
self._image_shape = image_shape
self._batch_norm_fn = batch_norm_fn
self._spectral_norm = spectral_norm
def __call__(self, z, y, is_training, reuse=tf.AUTO_REUSE):
with tf.variable_scope(self.name, values=[z, y], reuse=reuse):
outputs = self.apply(z=z, y=y, is_training=is_training)
return outputs
def batch_norm(self, inputs, **kwargs):
if self._batch_norm_fn is None:
return inputs
args = kwargs.copy()
args["inputs"] = inputs
if "use_sn" not in args:
args["use_sn"] = self._spectral_norm
return utils.call_with_accepted_args(self._batch_norm_fn, **args)
@abc.abstractmethod
def apply(self, z, y, is_training):
"""Apply the generator on a input.
Args:
z: `Tensor` of shape [batch_size, z_dim] with latent code.
y: `Tensor` of shape [batch_size, num_classes] with one hot encoded
labels.
is_training: Boolean, whether the architecture should be constructed for
training or inference.
Returns:
Generated images of shape [batch_size] + self.image_shape.
"""
@gin.configurable("D", blacklist=["name"])
class AbstractDiscriminator(_Module):
"""Interface for discriminator architectures."""
def __init__(self,
name="discriminator",
batch_norm_fn=None,
layer_norm=False,
spectral_norm=False):
super(AbstractDiscriminator, self).__init__(name=name)
self._name = name
self._batch_norm_fn = batch_norm_fn
self._layer_norm = layer_norm
self._spectral_norm = spectral_norm
def __call__(self, x, y, is_training, reuse=tf.AUTO_REUSE):
with tf.variable_scope(self.name, values=[x, y], reuse=reuse):
outputs = self.apply(x=x, y=y, is_training=is_training)
return outputs
def batch_norm(self, inputs, **kwargs):
if self._batch_norm_fn is None:
return inputs
args = kwargs.copy()
args["inputs"] = inputs
if "use_sn" not in args:
args["use_sn"] = self._spectral_norm
return utils.call_with_accepted_args(self._batch_norm_fn, **args)
@abc.abstractmethod
def apply(self, x, y, is_training):
"""Apply the discriminator on a input.
Args:
x: `Tensor` of shape [batch_size, ?, ?, ?] with real or fake images.
y: `Tensor` of shape [batch_size, num_classes] with one hot encoded
labels.
is_training: Boolean, whether the architecture should be constructed for
training or inference.
Returns:
Tuple of 3 Tensors, the final prediction of the discriminator, the logits
before the final output activation function and logits form the second
last layer.
"""
|
import logging
import bthomehub5_devicelist
import voluptuous as vol
from homeassistant.components.device_tracker import (
DOMAIN,
PLATFORM_SCHEMA,
DeviceScanner,
)
from homeassistant.const import CONF_HOST
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
CONF_DEFAULT_IP = "192.168.1.254"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Optional(CONF_HOST, default=CONF_DEFAULT_IP): cv.string}
)
def get_scanner(hass, config):
"""Return a BT Home Hub 5 scanner if successful."""
scanner = BTHomeHub5DeviceScanner(config[DOMAIN])
return scanner if scanner.success_init else None
class BTHomeHub5DeviceScanner(DeviceScanner):
"""This class queries a BT Home Hub 5."""
def __init__(self, config):
"""Initialise the scanner."""
_LOGGER.info("Initialising BT Home Hub 5")
self.host = config[CONF_HOST]
self.last_results = {}
# Test the router is accessible
data = bthomehub5_devicelist.get_devicelist(self.host)
self.success_init = data is not None
def scan_devices(self):
"""Scan for new devices and return a list with found device IDs."""
self.update_info()
return (device for device in self.last_results)
def get_device_name(self, device):
"""Return the name of the given device or None if we don't know."""
# If not initialised and not already scanned and not found.
if device not in self.last_results:
self.update_info()
if not self.last_results:
return None
return self.last_results.get(device)
def update_info(self):
"""Ensure the information from the BT Home Hub 5 is up to date."""
_LOGGER.info("Scanning")
data = bthomehub5_devicelist.get_devicelist(self.host)
if not data:
_LOGGER.warning("Error scanning devices")
return
self.last_results = data
|
import string
class InvalidInterpolation(Exception):
def __init__(self, string, place):
self.string = string
self.place = place
class Interpolator(object):
"""
Configuration options may contain environment variables. For example,
suppose the shell contains ``VERIFIER_NAME=testinfra`` and the following
molecule.yml is supplied.
.. code-block:: yaml
verifier:
- name: ${VERIFIER_NAME}
Molecule will substitute ``$VERIFIER_NAME`` with the value of the
``VERIFIER_NAME`` environment variable.
.. warning::
If an environment variable is not set, Molecule substitutes with an
empty string.
Both ``$VARIABLE`` and ``${VARIABLE}`` syntax are supported. Extended
shell-style features, such as ``${VARIABLE-default}`` and
``${VARIABLE:-default}`` are also supported.
If a literal dollar sign is needed in a configuration, use a double dollar
sign (`$$`).
Molecule will substitute special ``MOLECULE_`` environment variables
defined in `molecule.yml`.
.. important::
Remember, the ``MOLECULE_`` namespace is reserved for Molecule. Do not
prefix your own variables with `MOLECULE_`.
A file may be placed in the root of the project as `env.yml`, and Molecule
will read variables when rendering `molecule.yml`. See command usage.
"""
def __init__(self, templater, mapping):
self.templater = templater
self.mapping = mapping
def interpolate(self, string, keep_string=None):
try:
return self.templater(string).substitute(self.mapping, keep_string)
except ValueError as e:
raise InvalidInterpolation(string, e)
class TemplateWithDefaults(string.Template):
idpattern = r'[_a-z][_a-z0-9]*(?::?-[^}]+)?'
# Modified from python2.7/string.py
def substitute(self, mapping, keep_string):
# Helper function for .sub()
def convert(mo):
# Check the most common path first.
named = mo.group('named') or mo.group('braced')
if named is not None:
# TODO(retr0h): This needs to be better handled.
if keep_string and named.startswith(keep_string):
return '$%s' % named
if ':-' in named:
var, _, default = named.partition(':-')
return mapping.get(var) or default
if '-' in named:
var, _, default = named.partition('-')
return mapping.get(var, default)
val = mapping.get(named, '')
return '%s' % (val, )
if mo.group('escaped') is not None:
return self.delimiter
if mo.group('invalid') is not None:
self._invalid(mo)
return self.pattern.sub(convert, self.template)
|
from random import randint
from pylatex import Document, LongTabu, Tabu, Center
from pylatex.utils import bold
def genenerate_tabus():
geometry_options = {
"landscape": True,
"margin": "1.5in",
"headheight": "20pt",
"headsep": "10pt",
"includeheadfoot": True
}
doc = Document(page_numbers=True, geometry_options=geometry_options)
# Generate data table with 'tight' columns
fmt = "X[r] X[r] X[r] X[r] X[r] X[r]"
with doc.create(LongTabu(fmt, spread="0pt")) as data_table:
header_row1 = ["Prov", "Num", "CurBal", "IntPay", "Total", "IntR"]
data_table.add_row(header_row1, mapper=[bold])
data_table.add_hline()
data_table.add_empty_row()
data_table.end_table_header()
data_table.add_row(["Prov", "Num", "CurBal", "IntPay", "Total",
"IntR"])
row = ["PA", "9", "$100", "%10", "$1000", "Test"]
for i in range(40):
data_table.add_row(row)
with doc.create(Center()) as centered:
with centered.create(Tabu("X[r] X[r]", spread="1in")) as data_table:
header_row1 = ["X", "Y"]
data_table.add_row(header_row1, mapper=[bold])
data_table.add_hline()
row = [randint(0, 1000), randint(0, 1000)]
for i in range(4):
data_table.add_row(row)
with doc.create(Center()) as centered:
with centered.create(Tabu("X[r] X[r]", to="4in")) as data_table:
header_row1 = ["X", "Y"]
data_table.add_row(header_row1, mapper=[bold])
data_table.add_hline()
row = [randint(0, 1000), randint(0, 1000)]
for i in range(4):
data_table.add_row(row)
doc.generate_pdf("tabus", clean_tex=False)
genenerate_tabus()
|
import sys
from pscript.functions import py2js, evaljs
from pscript.stdlib import get_std_info, get_partial_std_lib
from ._loop import loop, this_is_js # noqa - import from here by tests
from ._component import Component
from ._property import Property
from ._js import create_js_component_class, JS_EVENT
class StdoutMismatchError(Exception):
""" Raised when the stdout mismatches.
"""
pass
class FakeStream:
""" To capture Pythons stdout and stderr during the both-tests.
"""
def __init__(self):
self._parts = []
def write(self, msg):
# Keep single messages together, so that errors are compared as one "line"
msg2 = msg.replace('\n', '\r')
if msg.endswith('\n'):
self._parts.append(msg2[:-1] + '\n')
else:
self._parts.append(msg2)
def flush(self):
pass
def getvalue(self):
return ''.join(self._parts)
def call_func_in_py(func):
""" Call a function and capture it's stdout.
"""
loop.integrate(reset=True)
orig_stdout = sys.stdout
orig_stderr = sys.stderr
fake_stdout = FakeStream()
sys.stdout = sys.stderr = fake_stdout
try:
func()
finally:
sys.stdout = orig_stdout
sys.stderr = orig_stderr
loop.reset()
return fake_stdout.getvalue().rstrip()
def call_func_in_js(func, classes, extra_nodejs_args=None):
# Collect base classes
all_classes = []
for cls in classes:
for c in cls.mro():
if c is Component or c is Property or c in all_classes:
break
all_classes.append(c)
# Generate JS code
code = JS_EVENT
for c in reversed(all_classes):
code += create_js_component_class(c, c.__name__,
c.__bases__[0].__name__+'.prototype')
code += py2js(func, 'test', inline_stdlib=False, docstrings=False)
code += 'test();loop.reset();'
nargs, function_deps, method_deps = get_std_info(code)
code = get_partial_std_lib(function_deps, method_deps, []) + code
# Call (allow using file)
return evaljs(code, print_result=False, extra_nodejs_args=extra_nodejs_args)
def smart_compare(func, *comparations):
""" Compare multiple text-pairs, raising an error that shows where
the texts differ for each of the mismatching pairs.
Each comparison should be (name, text, reference).
"""
err_msgs = []
has_errors = False
for comp in comparations:
err_msg = validate_text(*comp)
if err_msg:
has_errors = True
err_msgs.append(err_msg)
else:
err_msgs.append(' ' * 8 + comp[0] + ' matches the reference\n')
if has_errors:
j = '_' * 79 + '\n'
err_msgs = [''] + err_msgs + ['']
t = 'Text mismatch in\nFile "%s", line %i, in %s:\n%s'
raise StdoutMismatchError(t % (func.__code__.co_filename,
func.__code__.co_firstlineno,
func.__name__,
j.join(err_msgs)))
def validate_text(name, text, reference):
""" Compare text with a reference. Returns None if they match, and otherwise
an error message that outlines where they differ.
"""
lines1 = text.split('\n')
lines2 = reference.split('\n')
n = max(len(lines1), len(lines2))
for i in range(len(lines1)):
if lines1[i].startswith(('[E ', '[W ', '[I ')):
lines1[i] = lines1[i].split(']', 1)[-1].lstrip() # remove log prefix
while len(lines1) < n:
lines1.append('')
while len(lines2) < n: # pragma: no cover
lines2.append('')
nchars = 35 # 2*35 + 8 for prefix and 1 spacing = 79
for i in range(n):
line1, line2 = lines1[i], lines2[i]
line1 = line1.lower()
line2 = line2.lower()
if line2.startswith('?'):
equal_enough = line2[1:].strip() in line1
else:
equal_enough = line1 == line2
if not equal_enough:
i1 = max(0, i - 16)
i2 = min(n, i + 16)
msg = ' '*8 + name.ljust(nchars) + ' ' + 'Reference'.ljust(nchars) + '\n'
for j in range(i1, i2):
linenr = str(j + 1).rjust(3, '0')
prefix = ' >> ' if j == i else ' '
msg += '{}{} '.format(prefix, linenr)
msg += _zip(_wrap(lines1[j], nchars, 3), _wrap(lines2[j], nchars, 3), 8)
# line1 = lines1[j].ljust(nchars, '\xb7')
# line2 = lines2[j].ljust(nchars, '\xb7')
# line1 = line1 if len(line1) <= nchars else line1[:nchars-1] + '…'
# line2 = line2 if len(line2) <= nchars else line2[:nchars-1] + '…'
# msg += '{}{} {} {}\n'.format(prefix, linenr, line1, line2)
return msg
def _wrap(line, nchars, maxlines):
line = line.replace('\n', '\\n').replace('\r', '\\r')
lines = []
while line:
lines.append(line[:nchars])
line = line[nchars:].lstrip()
if not lines:
lines.append('\xb7' * nchars)
elif len(lines) == 1:
lines[-1] = lines[-1].ljust(nchars, '\xb7')
elif len(lines) <= maxlines:
lines[-1] = lines[-1].ljust(nchars, ' ')
else:
lines = lines[:maxlines]
lines[-1] = lines[-1][:-1] + '…'
return lines
def _zip(lines1, lines2, offset):
n = max(len(lines1), len(lines2))
nchars = len(lines1[0])
while len(lines1) < n:
lines1.append(' ' * nchars)
while len(lines2) < n: # pragma: no cover
lines2.append(' ' * nchars)
text = ''
i = 0
for line1, line2 in zip(lines1, lines2):
if i > 0:
text += ' ' * offset
i += 1
text += line1 + ' ' + line2 + '\n'
return text
def run_in_both(*classes, js=True, py=True, extra_nodejs_args=None):
""" Decorator to run a test in both Python and JS.
The decorator should be provided with any Component classes that
you want to use in the test.
The function docstring should match the stdout + stderr of the test (case
insensitive). To provide separate reference outputs for Python and
JavaScript, use a delimiter of at least 10 '-' characters. Use "? xx"
to test that "xx" is present on a line (useful for logged exceptions).
"""
def wrapper(func):
reference = '\n'.join(line[4:] for line in func.__doc__.splitlines())
parts = reference.split('-'*10)
pyref = parts[0].strip(' \n')
jsref = parts[-1].strip(' \n-')
def runner1():
# One level of indirection to make cleaner error reporting by pytest
err = None
try:
return runner2()
except Exception as e:
err = e
if isinstance(err, StdoutMismatchError):
raise StdoutMismatchError(err)
elif isinstance(err, RuntimeError):
raise RuntimeError(err)
else:
raise err
def runner2():
# Run in Python
if py:
pyresult = call_func_in_py(func)
pyresult = pyresult.replace('"', "'").replace("\\'", "'")
pyresult = pyresult.split('!!!!')[-1]
#print('Py:\n' + pyresult)
# Run in JS
if js:
jsresult = call_func_in_js(func, classes, extra_nodejs_args)
jsresult = jsresult.replace('[ ', '[').replace(' ]', ']')
jsresult = jsresult.replace('\n ', ' ')
jsresult = jsresult.replace('"', "'").split('!!!!')[-1]
jsresult = jsresult.replace('null', 'None')
#print('JS:\n' + jsresult)
args = [func]
if py:
args.append(('Python', pyresult, pyref))
if js:
args.append(('JavaScript', jsresult, jsref))
smart_compare(*args)
print(func.__name__, 'ok')
return True
return runner1
return wrapper
|
from ReText import globalSettings
from ReText.syncscroll import SyncScroll
from ReText.preview import ReTextWebPreview
from PyQt5.QtCore import QStandardPaths
from PyQt5.QtGui import QDesktopServices
from PyQt5.QtNetwork import QNetworkDiskCache
from PyQt5.QtWebKit import QWebSettings
from PyQt5.QtWebKitWidgets import QWebPage, QWebView
class ReTextWebKitPreview(ReTextWebPreview, QWebView):
def __init__(self, tab,
editorPositionToSourceLineFunc,
sourceLineToEditorPositionFunc):
QWebView.__init__(self)
self.tab = tab
self.syncscroll = SyncScroll(self.page().mainFrame(),
editorPositionToSourceLineFunc,
sourceLineToEditorPositionFunc)
ReTextWebPreview.__init__(self, tab.editBox)
self.page().setLinkDelegationPolicy(QWebPage.DelegateAllLinks)
self.page().linkClicked.connect(self._handleLinkClicked)
self.settings().setAttribute(QWebSettings.LocalContentCanAccessFileUrls, False)
# Avoid caching of CSS
self.settings().setObjectCacheCapacities(0,0,0)
self.cache = QNetworkDiskCache()
cacheDirectory = QStandardPaths.writableLocation(QStandardPaths.CacheLocation)
self.cache.setCacheDirectory(cacheDirectory)
self.page().networkAccessManager().setCache(self.cache)
def updateFontSettings(self):
settings = self.settings()
settings.setFontFamily(QWebSettings.StandardFont,
globalSettings.font.family())
settings.setFontSize(QWebSettings.DefaultFontSize,
globalSettings.font.pointSize())
def _handleWheelEvent(self, event):
# Only pass wheelEvents on to the preview if syncscroll is
# controlling the position of the preview
if self.syncscroll.isActive():
self.wheelEvent(event)
def _handleLinkClicked(self, url):
if url.isLocalFile():
localFile = url.toLocalFile()
if localFile == self.tab.fileName and url.hasFragment():
self.page().mainFrame().scrollToAnchor(url.fragment())
return
if self.tab.openSourceFile(localFile):
return
if globalSettings.handleWebLinks:
self.load(url)
else:
QDesktopServices.openUrl(url)
|
import json
import pytest
from marshmallow import ValidationError
from lemur.pending_certificates.views import * # noqa
from .vectors import (
CSR_STR,
INTERMEDIATE_CERT_STR,
VALID_ADMIN_API_TOKEN,
VALID_ADMIN_HEADER_TOKEN,
VALID_USER_HEADER_TOKEN,
WILDCARD_CERT_STR,
)
def test_increment_attempt(pending_certificate):
from lemur.pending_certificates.service import increment_attempt
initial_attempt = pending_certificate.number_attempts
attempts = increment_attempt(pending_certificate)
assert attempts == initial_attempt + 1
def test_create_pending_certificate(async_issuer_plugin, async_authority, user):
from lemur.certificates.service import create
pending_cert = create(
authority=async_authority,
csr=CSR_STR,
owner="[email protected]",
creator=user["user"],
common_name="ACommonName",
)
assert pending_cert.external_id == "12345"
def test_create_pending(pending_certificate, user, session):
import copy
from lemur.pending_certificates.service import create_certificate, get
cert = {
"body": WILDCARD_CERT_STR,
"chain": INTERMEDIATE_CERT_STR,
"external_id": "54321",
}
# Weird copy because the session behavior. pending_certificate is a valid object but the
# return of vars(pending_certificate) is a sessionobject, and so nothing from the pending_cert
# is used to create the certificate. Maybe a bug due to using vars(), and should copy every
# field explicitly.
pending_certificate = copy.copy(get(pending_certificate.id))
real_cert = create_certificate(pending_certificate, cert, user["user"])
assert real_cert.owner == pending_certificate.owner
assert real_cert.notify == pending_certificate.notify
assert real_cert.private_key == pending_certificate.private_key
assert real_cert.external_id == "54321"
assert real_cert.key_type == "RSA2048"
@pytest.mark.parametrize(
"token,status",
[
(VALID_USER_HEADER_TOKEN, 403),
(VALID_ADMIN_HEADER_TOKEN, 204),
(VALID_ADMIN_API_TOKEN, 204),
("", 401),
],
)
def test_pending_cancel(client, pending_certificate, token, status):
assert (
client.delete(
api.url_for(
PendingCertificates, pending_certificate_id=pending_certificate.id
),
data=json.dumps({"note": "unit test", "send_email": False}),
headers=token,
).status_code
== status
)
def test_pending_upload(pending_certificate_from_full_chain_ca):
from lemur.pending_certificates.service import upload
from lemur.certificates.service import get
cert = {"body": WILDCARD_CERT_STR, "chain": None, "external_id": None}
pending_cert = upload(pending_certificate_from_full_chain_ca.id, **cert)
assert pending_cert.resolved
assert get(pending_cert.resolved_cert_id)
def test_pending_upload_with_chain(pending_certificate_from_partial_chain_ca):
from lemur.pending_certificates.service import upload
from lemur.certificates.service import get
cert = {
"body": WILDCARD_CERT_STR,
"chain": INTERMEDIATE_CERT_STR,
"external_id": None,
}
pending_cert = upload(pending_certificate_from_partial_chain_ca.id, **cert)
assert pending_cert.resolved
assert get(pending_cert.resolved_cert_id)
def test_invalid_pending_upload_with_chain(pending_certificate_from_partial_chain_ca):
from lemur.pending_certificates.service import upload
cert = {"body": WILDCARD_CERT_STR, "chain": None, "external_id": None}
with pytest.raises(ValidationError) as err:
upload(pending_certificate_from_partial_chain_ca.id, **cert)
assert str(err.value).startswith(
"Incorrect chain certificate(s) provided: '*.wild.example.org' is not signed by 'LemurTrust Unittests Root CA 2018"
)
|
from django.core.management import call_command
from django.core.management.base import CommandError
from django.test import TestCase
from weblate.auth.models import Group, User
from weblate.trans.tests.utils import TempDirMixin, get_test_file
class CommandTest(TestCase, TempDirMixin):
"""Test for management commands."""
def test_createadmin(self):
call_command("createadmin")
user = User.objects.get(username="admin")
self.assertEqual(user.full_name, "Weblate Admin")
self.assertFalse(user.check_password("admin"))
def test_createadmin_password(self):
call_command("createadmin", password="admin")
user = User.objects.get(username="admin")
self.assertEqual(user.full_name, "Weblate Admin")
self.assertTrue(user.check_password("admin"))
def test_createadmin_reuse_password(self):
call_command("createadmin", password="admin")
user = User.objects.get(username="admin")
self.assertEqual(user.full_name, "Weblate Admin")
self.assertTrue(user.check_password("admin"))
# Ensure the passord is not changed when not needed
old = user.password
call_command("createadmin", password="admin", update=True)
user = User.objects.get(username="admin")
self.assertEqual(old, user.password)
def test_createadmin_username(self):
call_command("createadmin", username="admin2")
user = User.objects.get(username="admin2")
self.assertEqual(user.full_name, "Weblate Admin")
def test_createadmin_email(self):
call_command("createadmin", email="[email protected]")
user = User.objects.get(username="admin")
self.assertEqual(user.email, "[email protected]")
def test_createadmin_twice(self):
call_command("createadmin")
with self.assertRaises(CommandError):
call_command("createadmin")
def test_createadmin_update(self):
call_command("createadmin", update=True)
call_command("createadmin", update=True, password="123456")
user = User.objects.get(username="admin")
self.assertTrue(user.check_password("123456"))
def test_createadmin_update_duplicate(self):
email = "[email protected]"
User.objects.create(username="another", email=email)
call_command("createadmin", update=True)
with self.assertRaises(CommandError):
call_command("createadmin", update=True, password="123456", email=email)
user = User.objects.get(username="another")
self.assertFalse(user.check_password("123456"))
def test_createadmin_update_email(self):
email = "[email protected]"
User.objects.create(username="another", email=email)
call_command("createadmin", update=True, password="123456", email=email)
user = User.objects.get(username="another")
self.assertTrue(user.check_password("123456"))
def test_importusers(self):
# First import
call_command("importusers", get_test_file("users.json"))
# Test that second import does not change anything
user = User.objects.get(username="weblate")
user.full_name = "Weblate test user"
user.save()
call_command("importusers", get_test_file("users.json"))
user2 = User.objects.get(username="weblate")
self.assertEqual(user.full_name, user2.full_name)
def test_importdjangousers(self):
# First import
call_command("importusers", get_test_file("users-django.json"))
self.assertEqual(User.objects.count(), 2)
def test_import_empty_users(self):
"""Test importing empty file."""
call_command("importusers", get_test_file("users-empty.json"))
# Only anonymous user
self.assertEqual(User.objects.count(), 1)
def test_import_invalid_users(self):
"""Test error handling in user import."""
call_command("importusers", get_test_file("users-invalid.json"))
# Only anonymous user
self.assertEqual(User.objects.count(), 1)
def test_setupgroups(self):
call_command("setupgroups")
group = Group.objects.get(name="Users")
self.assertTrue(group.roles.filter(name="Power user").exists())
|
import pytest
import os
from yandextank.common.util import get_test_path
from yandextank.plugins.DataUploader.cli import from_tank_config, get_logger
@pytest.mark.parametrize('test_dir, expected', [
(os.path.join(get_test_path(), 'yandextank/plugins/DataUploader/tests/test_postloader/test_empty'), (None, {})),
(os.path.join(get_test_path(), 'yandextank/plugins/DataUploader/tests/test_postloader/test_full'),
('uploader',
{'api_address': 'https://lunapark.yandex-team.ru/',
'api_attempts': 2,
'api_timeout': 5,
'enabled': True,
'job_dsc': 'hell of a kitty',
'job_name': 'Hello kitty',
'jobno_file': 'jobno.txt',
'lock_targets': 'foo.bar',
'maintenance_timeout': 5,
'network_attempts': 2,
'operator': 'fomars',
'package': 'yandextank.plugins.DataUploader',
'task': 'LOAD-204'})
),
(os.path.join(get_test_path(), 'yandextank/plugins/DataUploader/tests/test_postloader/test_disabled'),
('uploader', {'enabled': False, 'package': 'yandextank.plugins.DataUploader'})),
])
def test_from_tank_config(test_dir, expected):
get_logger()
assert from_tank_config(test_dir) == expected
|
import os
from perfkitbenchmarker import vm_util
LAPACK_VERSION = '3.6.1'
LAPACK_FOLDER = 'lapack-%s' % LAPACK_VERSION
LAPACK_TAR = '%s.tgz' % LAPACK_FOLDER
LAPACK_URL = 'https://www.netlib.org/lapack/%s' % LAPACK_TAR
LAPACK_DIR = os.path.join(vm_util.VM_TMP_DIR, LAPACK_FOLDER)
PACKAGE_NAME = 'lapack'
PREPROVISIONED_DATA = {
LAPACK_TAR:
'888a50d787a9d828074db581c80b2d22bdb91435a673b1bf6cd6eb51aa50d1de'
}
PACKAGE_DATA_URL = {
LAPACK_TAR: LAPACK_URL
}
def _Install(vm):
"""Install LAPACK lib."""
vm.Install('fortran')
vm.Install('cmake')
vm.InstallPreprovisionedPackageData(
PACKAGE_NAME, PREPROVISIONED_DATA.keys(), vm_util.VM_TMP_DIR)
vm.RemoteCommand(
'cd %s; tar xf %s' % (
vm_util.VM_TMP_DIR, LAPACK_TAR))
vm.RemoteCommand(
'cd %s; mv make.inc.example make.inc; cmake .; make -j %s' % (
LAPACK_DIR, vm.num_cpus))
vm.RemoteCommand(
'cd %s; make -j %s' % (os.path.join(LAPACK_DIR, 'BLAS'),
vm.NumCpusForBenchmark()))
def YumInstall(vm):
"""Installs the lapack package on the VM."""
_Install(vm)
def AptInstall(vm):
"""Installs the lapack package on the VM."""
_Install(vm)
|
from datetime import datetime
import json
from aiokafka import AIOKafkaProducer
import voluptuous as vol
from homeassistant.const import (
CONF_IP_ADDRESS,
CONF_PASSWORD,
CONF_PORT,
CONF_USERNAME,
EVENT_HOMEASSISTANT_STOP,
EVENT_STATE_CHANGED,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entityfilter import FILTER_SCHEMA
from homeassistant.util import ssl as ssl_util
DOMAIN = "apache_kafka"
CONF_FILTER = "filter"
CONF_TOPIC = "topic"
CONF_SECURITY_PROTOCOL = "security_protocol"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_IP_ADDRESS): cv.string,
vol.Required(CONF_PORT): cv.port,
vol.Required(CONF_TOPIC): cv.string,
vol.Optional(CONF_FILTER, default={}): FILTER_SCHEMA,
vol.Optional(CONF_SECURITY_PROTOCOL, default="PLAINTEXT"): vol.In(
["PLAINTEXT", "SASL_SSL"]
),
vol.Optional(CONF_USERNAME): cv.string,
vol.Optional(CONF_PASSWORD): cv.string,
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Activate the Apache Kafka integration."""
conf = config[DOMAIN]
kafka = hass.data[DOMAIN] = KafkaManager(
hass,
conf[CONF_IP_ADDRESS],
conf[CONF_PORT],
conf[CONF_TOPIC],
conf[CONF_FILTER],
conf[CONF_SECURITY_PROTOCOL],
conf.get(CONF_USERNAME),
conf.get(CONF_PASSWORD),
)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, kafka.shutdown)
await kafka.start()
return True
class DateTimeJSONEncoder(json.JSONEncoder):
"""Encode python objects.
Additionally add encoding for datetime objects as isoformat.
"""
def default(self, o):
"""Implement encoding logic."""
if isinstance(o, datetime):
return o.isoformat()
return super().default(o)
class KafkaManager:
"""Define a manager to buffer events to Kafka."""
def __init__(
self,
hass,
ip_address,
port,
topic,
entities_filter,
security_protocol,
username,
password,
):
"""Initialize."""
self._encoder = DateTimeJSONEncoder()
self._entities_filter = entities_filter
self._hass = hass
ssl_context = ssl_util.client_context()
self._producer = AIOKafkaProducer(
loop=hass.loop,
bootstrap_servers=f"{ip_address}:{port}",
compression_type="gzip",
security_protocol=security_protocol,
ssl_context=ssl_context,
sasl_mechanism="PLAIN",
sasl_plain_username=username,
sasl_plain_password=password,
)
self._topic = topic
def _encode_event(self, event):
"""Translate events into a binary JSON payload."""
state = event.data.get("new_state")
if (
state is None
or state.state in (STATE_UNKNOWN, "", STATE_UNAVAILABLE)
or not self._entities_filter(state.entity_id)
):
return
return json.dumps(obj=state.as_dict(), default=self._encoder.encode).encode(
"utf-8"
)
async def start(self):
"""Start the Kafka manager."""
self._hass.bus.async_listen(EVENT_STATE_CHANGED, self.write)
await self._producer.start()
async def shutdown(self, _):
"""Shut the manager down."""
await self._producer.stop()
async def write(self, event):
"""Write a binary payload to Kafka."""
payload = self._encode_event(event)
if payload:
await self._producer.send_and_wait(self._topic, payload)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import logging
from compare_gan.metrics import eval_task
from compare_gan.metrics import image_similarity
import numpy as np
from six.moves import range
import tensorflow as tf
class MultiscaleSSIMTask(eval_task.EvalTask):
"""Task that computes MSSIMScore for generated images."""
_LABEL = "ms_ssim"
def run_after_session(self, options, eval_data_fake, eval_data_real=None):
del options, eval_data_real
score = _compute_multiscale_ssim_score(eval_data_fake.images)
return {self._LABEL: score}
def _compute_multiscale_ssim_score(fake_images):
"""Compute ms-ssim score ."""
batch_size = 64
with tf.Graph().as_default():
fake_images_batch = tf.train.shuffle_batch(
[tf.convert_to_tensor(fake_images, dtype=tf.float32)],
capacity=16*batch_size,
min_after_dequeue=8*batch_size,
num_threads=4,
enqueue_many=True,
batch_size=batch_size)
# Following section 5.3 of https://arxiv.org/pdf/1710.08446.pdf, we only
# evaluate 5 batches of the generated images.
eval_fn = compute_msssim(
generated_images=fake_images_batch, num_batches=5)
with tf.train.MonitoredTrainingSession() as sess:
score = eval_fn(sess)
return score
def compute_msssim(generated_images, num_batches):
"""Get a fn returning the ms ssim score for generated images.
Args:
generated_images: TF Tensor of shape [batch_size, dim, dim, 3] which
evaluates to a batch of generated images. Should be in range [0..255].
num_batches: Number of batches to consider.
Returns:
eval_fn: a function which takes a session as an argument and returns the
average ms ssim score among all the possible image pairs from
generated_images.
"""
batch_size = int(generated_images.get_shape()[0])
assert batch_size > 1
# Generate all possible image pairs from input set of imgs.
pair1 = tf.tile(generated_images, [batch_size, 1, 1, 1])
pair2 = tf.reshape(
tf.tile(generated_images, [1, batch_size, 1, 1]), [
batch_size * batch_size, generated_images.shape[1],
generated_images.shape[2], generated_images.shape[3]
])
# Compute the mean of the scores (but ignore the 'identical' images - which
# should get 1.0 from the MultiscaleSSIM)
score = tf.reduce_sum(image_similarity.multiscale_ssim(pair1, pair2))
score -= batch_size
score = tf.div(score, batch_size * batch_size - batch_size)
# Define a function which wraps some session.run calls to generate a large
# number of images and compute multiscale ssim metric on them.
def _eval_fn(session):
"""Function which wraps session.run calls to compute given metric."""
logging.info("Computing MS-SSIM score...")
scores = []
for _ in range(num_batches):
scores.append(session.run(score))
result = np.mean(scores)
return result
return _eval_fn
|
import datetime
import logging
import requests
from tank_utility import auth, device as tank_monitor
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_DEVICES, CONF_EMAIL, CONF_PASSWORD, PERCENTAGE
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = datetime.timedelta(hours=1)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_EMAIL): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_DEVICES): vol.All(cv.ensure_list, vol.Length(min=1)),
}
)
SENSOR_TYPE = "tank"
SENSOR_ROUNDING_PRECISION = 1
SENSOR_ATTRS = [
"name",
"address",
"capacity",
"fuelType",
"orientation",
"status",
"time",
"time_iso",
]
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Tank Utility sensor."""
email = config.get(CONF_EMAIL)
password = config.get(CONF_PASSWORD)
devices = config.get(CONF_DEVICES)
try:
token = auth.get_token(email, password)
except requests.exceptions.HTTPError as http_error:
if (
http_error.response.status_code
== requests.codes.unauthorized # pylint: disable=no-member
):
_LOGGER.error("Invalid credentials")
return
all_sensors = []
for device in devices:
sensor = TankUtilitySensor(email, password, token, device)
all_sensors.append(sensor)
add_entities(all_sensors, True)
class TankUtilitySensor(Entity):
"""Representation of a Tank Utility sensor."""
def __init__(self, email, password, token, device):
"""Initialize the sensor."""
self._email = email
self._password = password
self._token = token
self._device = device
self._state = None
self._name = f"Tank Utility {self.device}"
self._unit_of_measurement = PERCENTAGE
self._attributes = {}
@property
def device(self):
"""Return the device identifier."""
return self._device
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit of measurement of the device."""
return self._unit_of_measurement
@property
def device_state_attributes(self):
"""Return the attributes of the device."""
return self._attributes
def get_data(self):
"""Get data from the device.
Flatten dictionary to map device to map of device data.
"""
data = {}
try:
data = tank_monitor.get_device_data(self._token, self.device)
except requests.exceptions.HTTPError as http_error:
if (
http_error.response.status_code
== requests.codes.unauthorized # pylint: disable=no-member
or http_error.response.status_code
== requests.codes.bad_request # pylint: disable=no-member
):
_LOGGER.info("Getting new token")
self._token = auth.get_token(self._email, self._password, force=True)
data = tank_monitor.get_device_data(self._token, self.device)
else:
raise http_error
data.update(data.pop("device", {}))
data.update(data.pop("lastReading", {}))
return data
def update(self):
"""Set the device state and attributes."""
data = self.get_data()
self._state = round(data[SENSOR_TYPE], SENSOR_ROUNDING_PRECISION)
self._attributes = {k: v for k, v in data.items() if k in SENSOR_ATTRS}
|
from homeassistant.components.lock import DOMAIN, LockEntity
from . import FIBARO_DEVICES, FibaroDevice
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Fibaro locks."""
if discovery_info is None:
return
add_entities(
[FibaroLock(device) for device in hass.data[FIBARO_DEVICES]["lock"]], True
)
class FibaroLock(FibaroDevice, LockEntity):
"""Representation of a Fibaro Lock."""
def __init__(self, fibaro_device):
"""Initialize the Fibaro device."""
self._state = False
super().__init__(fibaro_device)
self.entity_id = f"{DOMAIN}.{self.ha_id}"
def lock(self, **kwargs):
"""Lock the device."""
self.action("secure")
self._state = True
def unlock(self, **kwargs):
"""Unlock the device."""
self.action("unsecure")
self._state = False
@property
def is_locked(self):
"""Return true if device is locked."""
return self._state
def update(self):
"""Update device state."""
self._state = self.current_binary_state
|
import datetime
import decimal
import logging
import sqlalchemy
from sqlalchemy.orm import scoped_session, sessionmaker
import voluptuous as vol
from homeassistant.components.recorder import CONF_DB_URL, DEFAULT_DB_FILE, DEFAULT_URL
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_NAME, CONF_UNIT_OF_MEASUREMENT, CONF_VALUE_TEMPLATE
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
CONF_COLUMN_NAME = "column"
CONF_QUERIES = "queries"
CONF_QUERY = "query"
def validate_sql_select(value):
"""Validate that value is a SQL SELECT query."""
if not value.lstrip().lower().startswith("select"):
raise vol.Invalid("Only SELECT queries allowed")
return value
_QUERY_SCHEME = vol.Schema(
{
vol.Required(CONF_COLUMN_NAME): cv.string,
vol.Required(CONF_NAME): cv.string,
vol.Required(CONF_QUERY): vol.All(cv.string, validate_sql_select),
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
}
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_QUERIES): [_QUERY_SCHEME], vol.Optional(CONF_DB_URL): cv.string}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the SQL sensor platform."""
db_url = config.get(CONF_DB_URL)
if not db_url:
db_url = DEFAULT_URL.format(hass_config_path=hass.config.path(DEFAULT_DB_FILE))
try:
engine = sqlalchemy.create_engine(db_url)
sessmaker = scoped_session(sessionmaker(bind=engine))
# Run a dummy query just to test the db_url
sess = sessmaker()
sess.execute("SELECT 1;")
except sqlalchemy.exc.SQLAlchemyError as err:
_LOGGER.error("Couldn't connect using %s DB_URL: %s", db_url, err)
return
finally:
sess.close()
queries = []
for query in config.get(CONF_QUERIES):
name = query.get(CONF_NAME)
query_str = query.get(CONF_QUERY)
unit = query.get(CONF_UNIT_OF_MEASUREMENT)
value_template = query.get(CONF_VALUE_TEMPLATE)
column_name = query.get(CONF_COLUMN_NAME)
if value_template is not None:
value_template.hass = hass
sensor = SQLSensor(
name, sessmaker, query_str, column_name, unit, value_template
)
queries.append(sensor)
add_entities(queries, True)
class SQLSensor(Entity):
"""Representation of an SQL sensor."""
def __init__(self, name, sessmaker, query, column, unit, value_template):
"""Initialize the SQL sensor."""
self._name = name
if "LIMIT" in query:
self._query = query
else:
self._query = query.replace(";", " LIMIT 1;")
self._unit_of_measurement = unit
self._template = value_template
self._column_name = column
self.sessionmaker = sessmaker
self._state = None
self._attributes = None
@property
def name(self):
"""Return the name of the query."""
return self._name
@property
def state(self):
"""Return the query's current state."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return self._unit_of_measurement
@property
def device_state_attributes(self):
"""Return the state attributes."""
return self._attributes
def update(self):
"""Retrieve sensor data from the query."""
data = None
try:
sess = self.sessionmaker()
result = sess.execute(self._query)
self._attributes = {}
if not result.returns_rows or result.rowcount == 0:
_LOGGER.warning("%s returned no results", self._query)
self._state = None
return
for res in result:
_LOGGER.debug("result = %s", res.items())
data = res[self._column_name]
for key, value in res.items():
if isinstance(value, decimal.Decimal):
value = float(value)
if isinstance(value, datetime.date):
value = str(value)
self._attributes[key] = value
except sqlalchemy.exc.SQLAlchemyError as err:
_LOGGER.error("Error executing query %s: %s", self._query, err)
return
finally:
sess.close()
if data is not None and self._template is not None:
self._state = self._template.async_render_with_possible_json_value(
data, None
)
else:
self._state = data
|
from flexx import flx
class Redirect(flx.Widget):
def init(self):
self.but1 = flx.Button(text='Redirect')
self.but2 = flx.Button(text='Open new page')
@flx.reaction('but1.pointer_click')
def on_redirect(self, *events):
global window
window.location.href = 'http://python.org' # allow going back
# window.location.replace('http://python.org') # hard redirect
@flx.reaction('but2.pointer_click')
def on_opennew(self, *events):
global window
window.open('http://python.org', '_blank')
if __name__ == '__main__':
m = flx.launch(Redirect, 'browser')
flx.start()
|
import os
import re
import hvac
from flask import current_app
from lemur.common.defaults import common_name, country, state, location, organizational_unit, organization
from lemur.common.utils import parse_certificate
from lemur.plugins.bases import DestinationPlugin
from lemur.plugins.bases import SourcePlugin
from cryptography import x509
from cryptography.hazmat.backends import default_backend
class VaultSourcePlugin(SourcePlugin):
""" Class for importing certificates from Hashicorp Vault"""
title = "Vault"
slug = "vault-source"
description = "Discovers all certificates in a given path"
author = "Christopher Jolley"
author_url = "https://github.com/alwaysjolley/lemur"
options = [
{
"name": "vaultUrl",
"type": "str",
"required": True,
"validation": "^https?://[a-zA-Z0-9.:-]+$",
"helpMessage": "Valid URL to Hashi Vault instance",
},
{
"name": "vaultKvApiVersion",
"type": "select",
"value": "2",
"available": ["1", "2"],
"required": True,
"helpMessage": "Version of the Vault KV API to use",
},
{
"name": "authenticationMethod",
"type": "select",
"value": "token",
"available": ["token", "kubernetes"],
"required": True,
"helpMessage": "Authentication method to use",
},
{
"name": "tokenFileOrVaultRole",
"type": "str",
"required": True,
"validation": "^([a-zA-Z0-9/._-]+/?)+$",
"helpMessage": "Must be vaild file path for token based auth and valid role if k8s based auth",
},
{
"name": "vaultMount",
"type": "str",
"required": True,
"validation": r"^\S+$",
"helpMessage": "Must be a valid Vault secrets mount name!",
},
{
"name": "vaultPath",
"type": "str",
"required": True,
"validation": "^([a-zA-Z0-9._-]+/?)+$",
"helpMessage": "Must be a valid Vault secrets path",
},
{
"name": "objectName",
"type": "str",
"required": True,
"validation": "[0-9a-zA-Z.:_-]+",
"helpMessage": "Object Name to search",
},
]
def get_certificates(self, options, **kwargs):
"""Pull certificates from objects in Hashicorp Vault"""
data = []
cert = []
body = ""
url = self.get_option("vaultUrl", options)
auth_method = self.get_option("authenticationMethod", options)
auth_key = self.get_option("tokenFileOrVaultRole", options)
mount = self.get_option("vaultMount", options)
path = self.get_option("vaultPath", options)
obj_name = self.get_option("objectName", options)
api_version = self.get_option("vaultKvApiVersion", options)
cert_filter = "-----BEGIN CERTIFICATE-----"
cert_delimiter = "-----END CERTIFICATE-----"
client = hvac.Client(url=url)
if auth_method == 'token':
with open(auth_key, "r") as tfile:
token = tfile.readline().rstrip("\n")
client.token = token
if auth_method == 'kubernetes':
token_path = '/var/run/secrets/kubernetes.io/serviceaccount/token'
with open(token_path, 'r') as f:
jwt = f.read()
client.auth_kubernetes(auth_key, jwt)
client.secrets.kv.default_kv_version = api_version
path = "{0}/{1}".format(path, obj_name)
secret = get_secret(client, mount, path)
for cname in secret["data"]:
if "crt" in secret["data"][cname]:
cert = secret["data"][cname]["crt"].split(cert_delimiter + "\n")
elif "pem" in secret["data"][cname]:
cert = secret["data"][cname]["pem"].split(cert_delimiter + "\n")
else:
for key in secret["data"][cname]:
if secret["data"][cname][key].startswith(cert_filter):
cert = secret["data"][cname][key].split(cert_delimiter + "\n")
break
body = cert[0] + cert_delimiter
if "chain" in secret["data"][cname]:
chain = secret["data"][cname]["chain"]
elif len(cert) > 1:
if cert[1].startswith(cert_filter):
chain = cert[1] + cert_delimiter
else:
chain = None
else:
chain = None
data.append({"body": body, "chain": chain, "name": cname})
return [
dict(body=c["body"], chain=c.get("chain"), name=c["name"]) for c in data
]
def get_endpoints(self, options, **kwargs):
""" Not implemented yet """
endpoints = []
return endpoints
class VaultDestinationPlugin(DestinationPlugin):
"""Hashicorp Vault Destination plugin for Lemur"""
title = "Vault"
slug = "hashi-vault-destination"
description = "Allow the uploading of certificates to Hashi Vault as secret"
author = "Christopher Jolley"
author_url = "https://github.com/alwaysjolley/lemur"
options = [
{
"name": "vaultUrl",
"type": "str",
"required": True,
"validation": "^https?://[a-zA-Z0-9.:-]+$",
"helpMessage": "Valid URL to Hashi Vault instance",
},
{
"name": "vaultKvApiVersion",
"type": "select",
"value": "2",
"available": ["1", "2"],
"required": True,
"helpMessage": "Version of the Vault KV API to use",
},
{
"name": "authenticationMethod",
"type": "select",
"value": "token",
"available": ["token", "kubernetes"],
"required": True,
"helpMessage": "Authentication method to use",
},
{
"name": "tokenFileOrVaultRole",
"type": "str",
"required": True,
"validation": "^([a-zA-Z0-9/._-]+/?)+$",
"helpMessage": "Must be vaild file path for token based auth and valid role if k8s based auth",
},
{
"name": "vaultMount",
"type": "str",
"required": True,
"validation": r"^\S+$",
"helpMessage": "Must be a valid Vault secrets mount name!",
},
{
"name": "vaultPath",
"type": "str",
"required": True,
"validation": "^(([a-zA-Z0-9._-]+|{(CN|OU|O|L|S|C)})+/?)+$",
"helpMessage": "Must be a valid Vault secrets path. Support vars: {CN|OU|O|L|S|C}",
},
{
"name": "objectName",
"type": "str",
"required": False,
"validation": "^([0-9a-zA-Z.:_-]+|{(CN|OU|O|L|S|C)})+$",
"helpMessage": "Name to bundle certs under, if blank use {CN}. Support vars: {CN|OU|O|L|S|C}",
},
{
"name": "bundleChain",
"type": "select",
"value": "cert only",
"available": ["Nginx", "Apache", "PEM", "no chain"],
"required": True,
"helpMessage": "Bundle the chain into the certificate",
},
{
"name": "sanFilter",
"type": "str",
"value": ".*",
"required": False,
"validation": ".*",
"helpMessage": "Valid regex filter",
},
]
def __init__(self, *args, **kwargs):
super(VaultDestinationPlugin, self).__init__(*args, **kwargs)
def upload(self, name, body, private_key, cert_chain, options, **kwargs):
"""
Upload certificate and private key
:param private_key:
:param cert_chain:
:return:
"""
cert = parse_certificate(body)
cname = common_name(cert)
url = self.get_option("vaultUrl", options)
auth_method = self.get_option("authenticationMethod", options)
auth_key = self.get_option("tokenFileOrVaultRole", options)
mount = self.get_option("vaultMount", options)
path = self.get_option("vaultPath", options)
bundle = self.get_option("bundleChain", options)
obj_name = self.get_option("objectName", options)
api_version = self.get_option("vaultKvApiVersion", options)
san_filter = self.get_option("sanFilter", options)
san_list = get_san_list(body)
if san_filter:
for san in san_list:
try:
if not re.match(san_filter, san, flags=re.IGNORECASE):
current_app.logger.exception(
"Exception uploading secret to vault: invalid SAN: {}".format(
san
),
exc_info=True,
)
os._exit(1)
except re.error:
current_app.logger.exception(
"Exception compiling regex filter: invalid filter",
exc_info=True,
)
client = hvac.Client(url=url)
if auth_method == 'token':
with open(auth_key, "r") as tfile:
token = tfile.readline().rstrip("\n")
client.token = token
if auth_method == 'kubernetes':
token_path = '/var/run/secrets/kubernetes.io/serviceaccount/token'
with open(token_path, 'r') as f:
jwt = f.read()
client.auth_kubernetes(auth_key, jwt)
client.secrets.kv.default_kv_version = api_version
t_path = path.format(
CN=cname,
OU=organizational_unit(cert),
O=organization(cert), # noqa: E741
L=location(cert),
S=state(cert),
C=country(cert)
)
if not obj_name:
obj_name = '{CN}'
f_obj_name = obj_name.format(
CN=cname,
OU=organizational_unit(cert),
O=organization(cert), # noqa: E741
L=location(cert),
S=state(cert),
C=country(cert)
)
path = "{0}/{1}".format(t_path, f_obj_name)
secret = get_secret(client, mount, path)
secret["data"][cname] = {}
if not cert_chain:
chain = ''
else:
chain = cert_chain
if bundle == "Nginx":
secret["data"][cname]["crt"] = "{0}\n{1}".format(body, chain)
secret["data"][cname]["key"] = private_key
elif bundle == "Apache":
secret["data"][cname]["crt"] = body
secret["data"][cname]["chain"] = chain
secret["data"][cname]["key"] = private_key
elif bundle == "PEM":
secret["data"][cname]["pem"] = "{0}\n{1}\n{2}".format(
body, chain, private_key
)
else:
secret["data"][cname]["crt"] = body
secret["data"][cname]["key"] = private_key
if isinstance(san_list, list):
secret["data"][cname]["san"] = san_list
try:
client.secrets.kv.create_or_update_secret(
path=path, mount_point=mount, secret=secret["data"]
)
except ConnectionError as err:
current_app.logger.exception(
"Exception uploading secret to vault: {0}".format(err), exc_info=True
)
def get_san_list(body):
""" parse certificate for SAN names and return list, return empty list on error """
san_list = []
try:
byte_body = body.encode("utf-8")
cert = x509.load_pem_x509_certificate(byte_body, default_backend())
ext = cert.extensions.get_extension_for_oid(
x509.oid.ExtensionOID.SUBJECT_ALTERNATIVE_NAME
)
san_list = ext.value.get_values_for_type(x509.DNSName)
except x509.extensions.ExtensionNotFound:
pass
finally:
return san_list
def get_secret(client, mount, path):
""" retreive existing data from mount path and return dictionary """
result = {"data": {}}
try:
if client.secrets.kv.default_kv_version == "1":
result = client.secrets.kv.v1.read_secret(path=path, mount_point=mount)
else:
result = client.secrets.kv.v2.read_secret_version(
path=path, mount_point=mount
)
result = result['data']
except ConnectionError:
pass
finally:
return result
|
import logging
from requests.exceptions import ConnectTimeout, HTTPError
from skybellpy import Skybell
import voluptuous as vol
from homeassistant.const import (
ATTR_ATTRIBUTION,
CONF_PASSWORD,
CONF_USERNAME,
__version__,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
ATTRIBUTION = "Data provided by Skybell.com"
NOTIFICATION_ID = "skybell_notification"
NOTIFICATION_TITLE = "Skybell Sensor Setup"
DOMAIN = "skybell"
DEFAULT_CACHEDB = "./skybell_cache.pickle"
DEFAULT_ENTITY_NAMESPACE = "skybell"
AGENT_IDENTIFIER = f"HomeAssistant/{__version__}"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
}
)
},
extra=vol.ALLOW_EXTRA,
)
def setup(hass, config):
"""Set up the Skybell component."""
conf = config[DOMAIN]
username = conf.get(CONF_USERNAME)
password = conf.get(CONF_PASSWORD)
try:
cache = hass.config.path(DEFAULT_CACHEDB)
skybell = Skybell(
username=username,
password=password,
get_devices=True,
cache_path=cache,
agent_identifier=AGENT_IDENTIFIER,
)
hass.data[DOMAIN] = skybell
except (ConnectTimeout, HTTPError) as ex:
_LOGGER.error("Unable to connect to Skybell service: %s", str(ex))
hass.components.persistent_notification.create(
"Error: {}<br />"
"You will need to restart hass after fixing."
"".format(ex),
title=NOTIFICATION_TITLE,
notification_id=NOTIFICATION_ID,
)
return False
return True
class SkybellDevice(Entity):
"""A HA implementation for Skybell devices."""
def __init__(self, device):
"""Initialize a sensor for Skybell device."""
self._device = device
def update(self):
"""Update automation state."""
self._device.refresh()
@property
def device_state_attributes(self):
"""Return the state attributes."""
return {
ATTR_ATTRIBUTION: ATTRIBUTION,
"device_id": self._device.device_id,
"status": self._device.status,
"location": self._device.location,
"wifi_ssid": self._device.wifi_ssid,
"wifi_status": self._device.wifi_status,
"last_check_in": self._device.last_check_in,
"motion_threshold": self._device.motion_threshold,
"video_profile": self._device.video_profile,
}
|
import asyncio
from datetime import timedelta
import ssl
from aiohttp import CookieJar
import aiounifi
from aiounifi.controller import (
DATA_CLIENT_REMOVED,
DATA_EVENT,
SIGNAL_CONNECTION_STATE,
SIGNAL_DATA,
)
from aiounifi.events import (
ACCESS_POINT_CONNECTED,
GATEWAY_CONNECTED,
SWITCH_CONNECTED,
WIRED_CLIENT_CONNECTED,
WIRELESS_CLIENT_CONNECTED,
WIRELESS_GUEST_CONNECTED,
)
from aiounifi.websocket import STATE_DISCONNECTED, STATE_RUNNING
import async_timeout
from homeassistant.components.device_tracker import DOMAIN as TRACKER_DOMAIN
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
from homeassistant.const import CONF_HOST
from homeassistant.core import callback
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import aiohttp_client
from homeassistant.helpers.dispatcher import async_dispatcher_send
from .const import (
CONF_ALLOW_BANDWIDTH_SENSORS,
CONF_ALLOW_UPTIME_SENSORS,
CONF_BLOCK_CLIENT,
CONF_CONTROLLER,
CONF_DETECTION_TIME,
CONF_IGNORE_WIRED_BUG,
CONF_POE_CLIENTS,
CONF_SITE_ID,
CONF_SSID_FILTER,
CONF_TRACK_CLIENTS,
CONF_TRACK_DEVICES,
CONF_TRACK_WIRED_CLIENTS,
CONTROLLER_ID,
DEFAULT_ALLOW_BANDWIDTH_SENSORS,
DEFAULT_ALLOW_UPTIME_SENSORS,
DEFAULT_DETECTION_TIME,
DEFAULT_IGNORE_WIRED_BUG,
DEFAULT_POE_CLIENTS,
DEFAULT_TRACK_CLIENTS,
DEFAULT_TRACK_DEVICES,
DEFAULT_TRACK_WIRED_CLIENTS,
DOMAIN as UNIFI_DOMAIN,
LOGGER,
UNIFI_WIRELESS_CLIENTS,
)
from .errors import AuthenticationRequired, CannotConnect
RETRY_TIMER = 15
SUPPORTED_PLATFORMS = [TRACKER_DOMAIN, SENSOR_DOMAIN, SWITCH_DOMAIN]
CLIENT_CONNECTED = (
WIRED_CLIENT_CONNECTED,
WIRELESS_CLIENT_CONNECTED,
WIRELESS_GUEST_CONNECTED,
)
DEVICE_CONNECTED = (
ACCESS_POINT_CONNECTED,
GATEWAY_CONNECTED,
SWITCH_CONNECTED,
)
class UniFiController:
"""Manages a single UniFi Controller."""
def __init__(self, hass, config_entry):
"""Initialize the system."""
self.hass = hass
self.config_entry = config_entry
self.available = True
self.api = None
self.progress = None
self.wireless_clients = None
self.listeners = []
self._site_name = None
self._site_role = None
self.entities = {}
@property
def controller_id(self):
"""Return the controller ID."""
return CONTROLLER_ID.format(host=self.host, site=self.site)
@property
def host(self):
"""Return the host of this controller."""
return self.config_entry.data[CONF_CONTROLLER][CONF_HOST]
@property
def site(self):
"""Return the site of this config entry."""
return self.config_entry.data[CONF_CONTROLLER][CONF_SITE_ID]
@property
def site_name(self):
"""Return the nice name of site."""
return self._site_name
@property
def site_role(self):
"""Return the site user role of this controller."""
return self._site_role
@property
def mac(self):
"""Return the mac address of this controller."""
for client in self.api.clients.values():
if self.host == client.ip:
return client.mac
return None
# Device tracker options
@property
def option_track_clients(self):
"""Config entry option to not track clients."""
return self.config_entry.options.get(CONF_TRACK_CLIENTS, DEFAULT_TRACK_CLIENTS)
@property
def option_track_wired_clients(self):
"""Config entry option to not track wired clients."""
return self.config_entry.options.get(
CONF_TRACK_WIRED_CLIENTS, DEFAULT_TRACK_WIRED_CLIENTS
)
@property
def option_track_devices(self):
"""Config entry option to not track devices."""
return self.config_entry.options.get(CONF_TRACK_DEVICES, DEFAULT_TRACK_DEVICES)
@property
def option_ssid_filter(self):
"""Config entry option listing what SSIDs are being used to track clients."""
return self.config_entry.options.get(CONF_SSID_FILTER, [])
@property
def option_detection_time(self):
"""Config entry option defining number of seconds from last seen to away."""
return timedelta(
seconds=self.config_entry.options.get(
CONF_DETECTION_TIME, DEFAULT_DETECTION_TIME
)
)
@property
def option_ignore_wired_bug(self):
"""Config entry option to ignore wired bug."""
return self.config_entry.options.get(
CONF_IGNORE_WIRED_BUG, DEFAULT_IGNORE_WIRED_BUG
)
# Client control options
@property
def option_poe_clients(self):
"""Config entry option to control poe clients."""
return self.config_entry.options.get(CONF_POE_CLIENTS, DEFAULT_POE_CLIENTS)
@property
def option_block_clients(self):
"""Config entry option with list of clients to control network access."""
return self.config_entry.options.get(CONF_BLOCK_CLIENT, [])
# Statistics sensor options
@property
def option_allow_bandwidth_sensors(self):
"""Config entry option to allow bandwidth sensors."""
return self.config_entry.options.get(
CONF_ALLOW_BANDWIDTH_SENSORS, DEFAULT_ALLOW_BANDWIDTH_SENSORS
)
@property
def option_allow_uptime_sensors(self):
"""Config entry option to allow uptime sensors."""
return self.config_entry.options.get(
CONF_ALLOW_UPTIME_SENSORS, DEFAULT_ALLOW_UPTIME_SENSORS
)
@callback
def async_unifi_signalling_callback(self, signal, data):
"""Handle messages back from UniFi library."""
if signal == SIGNAL_CONNECTION_STATE:
if data == STATE_DISCONNECTED and self.available:
LOGGER.warning("Lost connection to UniFi controller")
if (data == STATE_RUNNING and not self.available) or (
data == STATE_DISCONNECTED and self.available
):
self.available = data == STATE_RUNNING
async_dispatcher_send(self.hass, self.signal_reachable)
if not self.available:
self.hass.loop.call_later(RETRY_TIMER, self.reconnect, True)
else:
LOGGER.info("Connected to UniFi controller")
elif signal == SIGNAL_DATA and data:
if DATA_EVENT in data:
clients_connected = set()
devices_connected = set()
wireless_clients_connected = False
for event in data[DATA_EVENT]:
if event.event in CLIENT_CONNECTED:
clients_connected.add(event.mac)
if not wireless_clients_connected and event.event in (
WIRELESS_CLIENT_CONNECTED,
WIRELESS_GUEST_CONNECTED,
):
wireless_clients_connected = True
elif event.event in DEVICE_CONNECTED:
devices_connected.add(event.mac)
if wireless_clients_connected:
self.update_wireless_clients()
if clients_connected or devices_connected:
async_dispatcher_send(
self.hass,
self.signal_update,
clients_connected,
devices_connected,
)
elif DATA_CLIENT_REMOVED in data:
async_dispatcher_send(
self.hass, self.signal_remove, data[DATA_CLIENT_REMOVED]
)
@property
def signal_reachable(self) -> str:
"""Integration specific event to signal a change in connection status."""
return f"unifi-reachable-{self.controller_id}"
@property
def signal_update(self):
"""Event specific per UniFi entry to signal new data."""
return f"unifi-update-{self.controller_id}"
@property
def signal_remove(self):
"""Event specific per UniFi entry to signal removal of entities."""
return f"unifi-remove-{self.controller_id}"
@property
def signal_options_update(self):
"""Event specific per UniFi entry to signal new options."""
return f"unifi-options-{self.controller_id}"
def update_wireless_clients(self):
"""Update set of known to be wireless clients."""
new_wireless_clients = set()
for client_id in self.api.clients:
if (
client_id not in self.wireless_clients
and not self.api.clients[client_id].is_wired
):
new_wireless_clients.add(client_id)
if new_wireless_clients:
self.wireless_clients |= new_wireless_clients
unifi_wireless_clients = self.hass.data[UNIFI_WIRELESS_CLIENTS]
unifi_wireless_clients.update_data(self.wireless_clients, self.config_entry)
async def async_setup(self):
"""Set up a UniFi controller."""
try:
self.api = await get_controller(
self.hass,
**self.config_entry.data[CONF_CONTROLLER],
async_callback=self.async_unifi_signalling_callback,
)
await self.api.initialize()
sites = await self.api.sites()
for site in sites.values():
if self.site == site["name"]:
self._site_name = site["desc"]
break
description = await self.api.site_description()
self._site_role = description[0]["site_role"]
except CannotConnect as err:
raise ConfigEntryNotReady from err
except Exception as err: # pylint: disable=broad-except
LOGGER.error("Unknown error connecting with UniFi controller: %s", err)
return False
# Restore clients that is not a part of active clients list.
entity_registry = await self.hass.helpers.entity_registry.async_get_registry()
for entity in entity_registry.entities.values():
if (
entity.config_entry_id != self.config_entry.entry_id
or "-" not in entity.unique_id
):
continue
mac = ""
if entity.domain == TRACKER_DOMAIN:
mac, _ = entity.unique_id.split("-", 1)
elif entity.domain == SWITCH_DOMAIN:
_, mac = entity.unique_id.split("-", 1)
if mac in self.api.clients or mac not in self.api.clients_all:
continue
client = self.api.clients_all[mac]
self.api.clients.process_raw([client.raw])
LOGGER.debug(
"Restore disconnected client %s (%s)",
entity.entity_id,
client.mac,
)
wireless_clients = self.hass.data[UNIFI_WIRELESS_CLIENTS]
self.wireless_clients = wireless_clients.get_data(self.config_entry)
self.update_wireless_clients()
for platform in SUPPORTED_PLATFORMS:
self.hass.async_create_task(
self.hass.config_entries.async_forward_entry_setup(
self.config_entry, platform
)
)
self.api.start_websocket()
self.config_entry.add_update_listener(self.async_config_entry_updated)
return True
@staticmethod
async def async_config_entry_updated(hass, config_entry) -> None:
"""Handle signals of config entry being updated."""
controller = hass.data[UNIFI_DOMAIN][config_entry.entry_id]
async_dispatcher_send(hass, controller.signal_options_update)
@callback
def reconnect(self, log=False) -> None:
"""Prepare to reconnect UniFi session."""
if log:
LOGGER.info("Will try to reconnect to UniFi controller")
self.hass.loop.create_task(self.async_reconnect())
async def async_reconnect(self) -> None:
"""Try to reconnect UniFi session."""
try:
with async_timeout.timeout(5):
await self.api.login()
self.api.start_websocket()
except (asyncio.TimeoutError, aiounifi.AiounifiException):
self.hass.loop.call_later(RETRY_TIMER, self.reconnect)
@callback
def shutdown(self, event) -> None:
"""Wrap the call to unifi.close.
Used as an argument to EventBus.async_listen_once.
"""
self.api.stop_websocket()
async def async_reset(self):
"""Reset this controller to default state.
Will cancel any scheduled setup retry and will unload
the config entry.
"""
self.api.stop_websocket()
for platform in SUPPORTED_PLATFORMS:
await self.hass.config_entries.async_forward_entry_unload(
self.config_entry, platform
)
for unsub_dispatcher in self.listeners:
unsub_dispatcher()
self.listeners = []
return True
async def get_controller(
hass, host, username, password, port, site, verify_ssl, async_callback=None
):
"""Create a controller object and verify authentication."""
sslcontext = None
if verify_ssl:
session = aiohttp_client.async_get_clientsession(hass)
if isinstance(verify_ssl, str):
sslcontext = ssl.create_default_context(cafile=verify_ssl)
else:
session = aiohttp_client.async_create_clientsession(
hass, verify_ssl=verify_ssl, cookie_jar=CookieJar(unsafe=True)
)
controller = aiounifi.Controller(
host,
username=username,
password=password,
port=port,
site=site,
websession=session,
sslcontext=sslcontext,
callback=async_callback,
)
try:
with async_timeout.timeout(10):
await controller.check_unifi_os()
await controller.login()
return controller
except aiounifi.Unauthorized as err:
LOGGER.warning("Connected to UniFi at %s but not registered.", host)
raise AuthenticationRequired from err
except (asyncio.TimeoutError, aiounifi.RequestError) as err:
LOGGER.error("Error connecting to the UniFi controller at %s", host)
raise CannotConnect from err
except aiounifi.AiounifiException as err:
LOGGER.exception("Unknown UniFi communication error occurred")
raise AuthenticationRequired from err
|
import socket
import mock
import pytest
from paasta_tools import generate_services_file
MOCK_NAMESPACES = [
("foo.main", {"proxy_port": 1024}),
("bar.canary", {"proxy_port": 1025}),
]
@pytest.yield_fixture
def mock_namespaces():
with mock.patch(
"paasta_tools.generate_services_file.get_all_namespaces",
autospec=True,
return_value=MOCK_NAMESPACES,
):
yield
def test_generate_configuration(mock_namespaces):
expected = {
"foo.main": {"host": "169.254.255.254", "port": 1024},
"bar.canary": {"host": "169.254.255.254", "port": 1025},
}
assert expected == generate_services_file.generate_configuration()
@mock.patch("paasta_tools.generate_services_file.parse_args", autospec=True)
@mock.patch.object(socket, "getfqdn", return_value="somehost.yelp", autospec=True)
def test_main_yaml(mock_getfqdn, mock_parse_args, tmpdir, mock_namespaces):
services_file = tmpdir.join("services.yaml")
fake_args = mock.Mock()
fake_args.output_format = "yaml"
fake_args.output_filename = services_file.strpath
mock_parse_args.return_value = fake_args
expected_value = (
"# This file is automatically generated by paasta_tools.\n"
"# It was automatically generated at $TIME on somehost.yelp.\n"
"---\n"
"bar.canary:\n"
" host: 169.254.255.254\n"
" port: 1025\n"
"foo.main:\n"
" host: 169.254.255.254\n"
" port: 1024\n"
)
with mock.patch.object(generate_services_file, "datetime") as m:
m.now().isoformat.return_value = "$TIME"
generate_services_file.main()
assert services_file.read() == expected_value
# If the only difference is the timestamp, the file should not be regenerated.
with mock.patch.object(generate_services_file, "datetime") as m:
m.now().isoformat.return_value = "$TIME+1"
generate_services_file.main()
assert services_file.read() == expected_value
@mock.patch("paasta_tools.generate_services_file.parse_args", autospec=True)
def test_main_json(mock_parse_args, tmpdir, mock_namespaces):
services_file = tmpdir.join("services.json")
fake_args = mock.Mock()
fake_args.output_format = "json"
fake_args.output_filename = services_file.strpath
mock_parse_args.return_value = fake_args
generate_services_file.main()
assert (
services_file.read()
== """{
"bar.canary": {
"host": "169.254.255.254",
"port": 1025
},
"foo.main": {
"host": "169.254.255.254",
"port": 1024
}
}"""
)
|
import argparse
import matplotlib.pyplot as plt
import chainer
from chainercv.datasets import coco_bbox_label_names
from chainercv.datasets import coco_instance_segmentation_label_names
from chainercv.links import FasterRCNNFPNResNet101
from chainercv.links import FasterRCNNFPNResNet50
from chainercv.links import MaskRCNNFPNResNet101
from chainercv.links import MaskRCNNFPNResNet50
from chainercv import utils
from chainercv.visualizations import vis_bbox
from chainercv.visualizations import vis_instance_segmentation
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
'--model',
choices=('faster_rcnn_fpn_resnet50', 'faster_rcnn_fpn_resnet101',
'mask_rcnn_fpn_resnet50', 'mask_rcnn_fpn_resnet101'),
default='faster_rcnn_fpn_resnet50')
parser.add_argument('--gpu', type=int, default=-1)
parser.add_argument('--pretrained-model')
parser.add_argument(
'--dataset', choices=('coco',), default='coco')
parser.add_argument('image')
args = parser.parse_args()
if args.model == 'faster_rcnn_fpn_resnet50':
mode = 'bbox'
cls = FasterRCNNFPNResNet50
elif args.model == 'faster_rcnn_fpn_resnet101':
mode = 'bbox'
cls = FasterRCNNFPNResNet101
elif args.model == 'mask_rcnn_fpn_resnet50':
mode = 'instance_segmentation'
cls = MaskRCNNFPNResNet50
elif args.model == 'mask_rcnn_fpn_resnet101':
mode = 'instance_segmentation'
cls = MaskRCNNFPNResNet101
if args.dataset == 'coco':
if args.pretrained_model is None:
args.pretrained_model = 'coco'
if mode == 'bbox':
label_names = coco_bbox_label_names
elif mode == 'instance_segmentation':
label_names = coco_instance_segmentation_label_names
model = cls(n_fg_class=len(label_names),
pretrained_model=args.pretrained_model)
if args.gpu >= 0:
chainer.cuda.get_device_from_id(args.gpu).use()
model.to_gpu()
img = utils.read_image(args.image)
if mode == 'bbox':
bboxes, labels, scores = model.predict([img])
bbox = bboxes[0]
label = labels[0]
score = scores[0]
vis_bbox(
img, bbox, label, score, label_names=label_names)
elif mode == 'instance_segmentation':
masks, labels, scores = model.predict([img])
mask = masks[0]
label = labels[0]
score = scores[0]
vis_instance_segmentation(
img, mask, label, score, label_names=label_names)
plt.show()
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import subprocess
from absl import logging
from absl.testing import _bazelize_command
from absl.testing import absltest
from absl.testing import parameterized
@parameterized.named_parameters(
('as_env_variable', True),
('as_commandline_args', False),
)
class TestFilteringTest(absltest.TestCase):
"""Integration tests: Runs a test binary with filtering.
This is done by either setting the filtering environment variable, or passing
the filters as command line arguments.
"""
def setUp(self):
self._test_name = 'absl/testing/tests/absltest_filtering_test_helper'
def _run_filtered(self, test_filter, use_env_variable):
"""Runs the py_test binary in a subprocess.
Args:
test_filter: string, the filter argument to use.
use_env_variable: bool, pass the test filter as environment variable if
True, otherwise pass as command line arguments.
Returns:
(stdout, exit_code) tuple of (string, int).
"""
env = {}
if 'SYSTEMROOT' in os.environ:
# This is used by the random module on Windows to locate crypto
# libraries.
env['SYSTEMROOT'] = os.environ['SYSTEMROOT']
additional_args = []
if test_filter is not None:
if use_env_variable:
env['TESTBRIDGE_TEST_ONLY'] = test_filter
elif test_filter:
additional_args.extend(test_filter.split(' '))
proc = subprocess.Popen(
args=([_bazelize_command.get_executable_path(self._test_name)]
+ additional_args),
env=env,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
universal_newlines=True)
stdout = proc.communicate()[0]
logging.info('output: %s', stdout)
return stdout, proc.wait()
def test_no_filter(self, use_env_variable):
out, exit_code = self._run_filtered(None, use_env_variable)
self.assertEqual(1, exit_code)
self.assertIn('class B test E', out)
def test_empty_filter(self, use_env_variable):
out, exit_code = self._run_filtered('', use_env_variable)
self.assertEqual(1, exit_code)
self.assertIn('class B test E', out)
def test_class_filter(self, use_env_variable):
out, exit_code = self._run_filtered('ClassA', use_env_variable)
self.assertEqual(0, exit_code)
self.assertNotIn('class B', out)
def test_method_filter(self, use_env_variable):
out, exit_code = self._run_filtered('ClassB.testA', use_env_variable)
self.assertEqual(0, exit_code)
self.assertNotIn('class A', out)
self.assertNotIn('class B test B', out)
out, exit_code = self._run_filtered('ClassB.testE', use_env_variable)
self.assertEqual(1, exit_code)
self.assertNotIn('class A', out)
def test_multiple_class_and_method_filter(self, use_env_variable):
out, exit_code = self._run_filtered(
'ClassA.testA ClassA.testB ClassB.testC', use_env_variable)
self.assertEqual(0, exit_code)
self.assertIn('class A test A', out)
self.assertIn('class A test B', out)
self.assertNotIn('class A test C', out)
self.assertIn('class B test C', out)
self.assertNotIn('class B test A', out)
def test_not_found_filters(self, use_env_variable):
out, exit_code = self._run_filtered(
'NotExistedClass.not_existed_method', use_env_variable)
self.assertEqual(1, exit_code)
self.assertIn("has no attribute 'NotExistedClass'", out)
if __name__ == '__main__':
absltest.main()
|
import contextlib
import unittest
from absl import flags
from absl.testing import flagsaver
import mock
from perfkitbenchmarker import benchmark_spec
from perfkitbenchmarker import configs
from perfkitbenchmarker import errors
from perfkitbenchmarker import linux_benchmarks
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.configs import benchmark_config_spec
from perfkitbenchmarker.providers.gcp import gce_network
from tests import pkb_common_test_case
from six.moves import builtins
FLAGS = flags.FLAGS
_PROJECT = 'project'
_CLOUD = 'GCP'
_BENCHMARK_NAME = 'iperf'
_URI = 'uri45678'
_COMPONENT = 'test_component'
_CFG_DEFAULT_DEFAULT = """
iperf:
vm_groups:
vm_1:
cloud: GCP
vm_spec:
GCP:
zone: us-west1-b
machine_type: n1-standard-4
vm_2:
cloud: GCP
vm_spec:
GCP:
zone: us-central1-c
machine_type: n1-standard-4
"""
_CFG_MULTI_MULTI = """
iperf:
vm_groups:
vm_1:
cloud: GCP
cidr: 10.0.1.0/24
vm_spec:
GCP:
zone: us-west1-b
machine_type: n1-standard-4
vm_2:
cloud: GCP
cidr: 192.168.1.0/24
vm_spec:
GCP:
zone: us-central1-c
machine_type: n1-standard-4
"""
_CFG_DEFAULT_MULTI = """
iperf:
vm_groups:
vm_1:
cloud: GCP
vm_spec:
GCP:
zone: us-west1-b
machine_type: n1-standard-4
vm_2:
cloud: GCP
cidr: 192.168.1.0/24
vm_spec:
GCP:
zone: us-central1-c
machine_type: n1-standard-4
"""
_CFG_SAME_ZONE_AND_CIDR = """
iperf:
vm_groups:
vm_1:
cloud: GCP
cidr: 10.0.1.0/24
vm_spec:
GCP:
zone: us-west1-b
machine_type: n1-standard-4
vm_2:
cloud: GCP
cidr: 10.0.1.0/24
vm_spec:
GCP:
zone: us-west1-b
machine_type: n1-standard-4
"""
_CFG_SAME_ZONE_DIFF_CIDR = """
iperf:
vm_groups:
vm_1:
cloud: GCP
cidr: 10.0.1.0/24
vm_spec:
GCP:
zone: us-west1-b
machine_type: n1-standard-4
vm_2:
cloud: GCP
cidr: 10.0.2.0/24
vm_spec:
GCP:
zone: us-west1-b
machine_type: n1-standard-4
"""
_REGEX_GCE_NET_NAMES = r'(?:[a-z](?:[-a-z0-9]{0,61}[a-z0-9])?)'
_REGEX_GCE_FW_NAMES = r'(?:[a-z](?:[-a-z0-9]{0,61}[a-z0-9])?)'
class BaseGceNetworkTest(pkb_common_test_case.PkbCommonTestCase):
def _CreateBenchmarkSpecFromYaml(self, yaml_string,
benchmark_name=_BENCHMARK_NAME):
config = configs.LoadConfig(yaml_string, {}, benchmark_name)
return self._CreateBenchmarkSpecFromConfigDict(config, benchmark_name)
def _CreateBenchmarkSpecFromConfigDict(self, config_dict, benchmark_name):
config_spec = benchmark_config_spec.BenchmarkConfigSpec(
benchmark_name,
flag_values=FLAGS,
**config_dict)
benchmark_module = next((b for b in linux_benchmarks.BENCHMARKS
if b.BENCHMARK_NAME == benchmark_name))
return benchmark_spec.BenchmarkSpec(benchmark_module, config_spec, _URI)
class TestGceNetworkConfig(BaseGceNetworkTest):
def testLoadDefaultConfig(self):
spec = self._CreateBenchmarkSpecFromYaml(_CFG_DEFAULT_DEFAULT)
with PatchCriticalObjects([('', '', 0)]):
spec.ConstructVirtualMachines()
self.assertDictContainsSubset({'cidr': None}, spec.custom_subnets['vm_1'])
self.assertDictContainsSubset({'cidr': None}, spec.custom_subnets['vm_2'])
self.assertLen(spec.networks, 1)
for k in spec.networks.keys():
self.assertItemsEqual(['10.0.0.0/8'], spec.networks[k].all_nets)
def testLoadDefaultConfigWithFlags(self):
FLAGS.gce_subnet_region = 'us-north1-b'
FLAGS.gce_subnet_addr = '1.2.3.4/33'
spec = self._CreateBenchmarkSpecFromYaml(_CFG_DEFAULT_DEFAULT)
with PatchCriticalObjects([('', '', 0)]):
spec.ConstructVirtualMachines()
self.assertDictContainsSubset({'cidr': None}, spec.custom_subnets['vm_1'])
self.assertDictContainsSubset({'cidr': None}, spec.custom_subnets['vm_2'])
self.assertLen(spec.networks, 1)
for k in spec.networks.keys():
self.assertItemsEqual(['1.2.3.4/33'], spec.networks[k].all_nets)
def testLoadCustomConfig(self):
spec = self._CreateBenchmarkSpecFromYaml(_CFG_MULTI_MULTI)
with PatchCriticalObjects([('', '', 0)]):
spec.ConstructVirtualMachines()
self.assertDictContainsSubset({'cidr': '10.0.1.0/24'},
spec.custom_subnets['vm_1'])
self.assertDictContainsSubset({'cidr': '192.168.1.0/24'},
spec.custom_subnets['vm_2'])
self.assertLen(spec.networks, 2)
for k in spec.networks.keys():
self.assertItemsEqual(['192.168.1.0/24', '10.0.1.0/24'],
spec.networks[k].all_nets)
def testLoadCustomConfigWithFlags(self):
FLAGS.gce_subnet_region = 'us-north1-b'
FLAGS.gce_subnet_addr = '1.2.3.4/33'
spec = self._CreateBenchmarkSpecFromYaml(_CFG_MULTI_MULTI)
with PatchCriticalObjects([('', '', 0)]):
spec.ConstructVirtualMachines()
self.assertDictContainsSubset({'cidr': '10.0.1.0/24'},
spec.custom_subnets['vm_1'])
self.assertDictContainsSubset({'cidr': '192.168.1.0/24'},
spec.custom_subnets['vm_2'])
self.assertLen(spec.networks, 2)
for k in spec.networks.keys():
self.assertItemsEqual(['192.168.1.0/24', '10.0.1.0/24'],
spec.networks[k].all_nets)
def testLoadMixedConfig(self):
spec = self._CreateBenchmarkSpecFromYaml(_CFG_DEFAULT_MULTI)
with PatchCriticalObjects([('', '', 0)]):
spec.ConstructVirtualMachines()
self.assertDictContainsSubset({'cidr': None}, spec.custom_subnets['vm_1'])
self.assertDictContainsSubset({'cidr': '192.168.1.0/24'},
spec.custom_subnets['vm_2'])
self.assertLen(spec.networks, 2)
for k in spec.networks.keys():
self.assertItemsEqual(['10.0.0.0/8', '192.168.1.0/24'],
spec.networks[k].all_nets)
def testLoadMixedConfigWithFlags(self):
FLAGS.gce_subnet_region = 'us-north1-b'
FLAGS.gce_subnet_addr = '1.2.3.4/33'
spec = self._CreateBenchmarkSpecFromYaml(_CFG_DEFAULT_MULTI)
with PatchCriticalObjects([('', '', 0)]):
spec.ConstructVirtualMachines()
self.assertDictContainsSubset({'cidr': None}, spec.custom_subnets['vm_1'])
self.assertDictContainsSubset({'cidr': '192.168.1.0/24'},
spec.custom_subnets['vm_2'])
self.assertLen(spec.networks, 2)
for k in spec.networks.keys():
self.assertItemsEqual(['1.2.3.4/33', '192.168.1.0/24'],
spec.networks[k].all_nets)
def testLoadSameZoneCidrConfig(self):
spec = self._CreateBenchmarkSpecFromYaml(_CFG_SAME_ZONE_AND_CIDR)
with PatchCriticalObjects([('', '', 0)]):
spec.ConstructVirtualMachines()
self.assertDictContainsSubset({'cidr': '10.0.1.0/24'},
spec.custom_subnets['vm_1'])
self.assertDictContainsSubset({'cidr': '10.0.1.0/24'},
spec.custom_subnets['vm_2'])
self.assertLen(spec.networks, 1)
for k in spec.networks.keys():
self.assertItemsEqual(['10.0.1.0/24'], spec.networks[k].all_nets)
def testLoadSameZoneDiffCidrConfig(self):
spec = self._CreateBenchmarkSpecFromYaml(_CFG_SAME_ZONE_DIFF_CIDR)
with PatchCriticalObjects([('', '', 0)]):
spec.ConstructVirtualMachines()
self.assertDictContainsSubset({'cidr': '10.0.1.0/24'},
spec.custom_subnets['vm_1'])
self.assertDictContainsSubset({'cidr': '10.0.2.0/24'},
spec.custom_subnets['vm_2'])
self.assertLen(spec.networks, 2)
for k in spec.networks.keys():
self.assertItemsEqual(['10.0.1.0/24', '10.0.2.0/24'],
spec.networks[k].all_nets)
class TestGceNetworkNames(BaseGceNetworkTest):
def setUp(self):
super(TestGceNetworkNames, self).setUp()
# need a benchmarkspec in the context to run
FLAGS.run_uri = _URI
config_spec = benchmark_config_spec.BenchmarkConfigSpec(
'cluster_boot', flag_values=FLAGS)
benchmark_spec.BenchmarkSpec(mock.Mock(), config_spec, 'uid')
########
# Network Names
########
def testGetDefaultNetworkName(self):
project = _PROJECT
zone = 'us-north1-b'
cidr = None
# long_cidr = '123.567.901/13' # @TODO net_utils for address sanity checks
vm = mock.Mock(zone=zone, project=project, cidr=cidr)
net = gce_network.GceNetwork.GetNetwork(vm)
net_name = net._MakeGceNetworkName()
net_type = 'default'
cidr_string = None
uri = _URI
expected_netname = '-'.join(
i for i in ('pkb-network', net_type, cidr_string, uri) if
i and i not in 'default')
self.assertEqual(expected_netname,
net_name) # pkb-network-uri45678 (default)
self.assertRegexpMatches(net_name, _REGEX_GCE_NET_NAMES)
def testGetSingleNetworkName(self):
FLAGS.gce_subnet_region = 'us-south1-c'
FLAGS.gce_subnet_addr = '2.2.3.4/33'
project = _PROJECT
zone = 'us-north1-b'
cidr = None
vm = mock.Mock(zone=zone, project=project, cidr=cidr)
net = gce_network.GceNetwork.GetNetwork(vm)
net_name = net._MakeGceNetworkName()
net_type = 'single'
cidr_string = '2-2-3-4-33'
uri = _URI
expected_netname = '-'.join(
i for i in ('pkb-network', net_type, cidr_string, uri) if
i and i not in 'default')
self.assertEqual(
expected_netname,
net_name) # pkb-network-single-2-2-3-4-33-uri45678 (single)
self.assertRegexpMatches(net_name, _REGEX_GCE_NET_NAMES)
def testGetMultiNetworkName(self):
project = _PROJECT
zone = 'us-north1-b'
cidr = '1.2.3.4/56'
vm = mock.Mock(zone=zone, project=project, cidr=cidr)
net = gce_network.GceNetwork.GetNetwork(vm)
net_name = net._MakeGceNetworkName()
net_type = 'multi'
cidr_string = '1-2-3-4-56'
uri = _URI
expected_netname = '-'.join(
i for i in ('pkb-network', net_type, cidr_string, uri) if
i and i not in 'default')
self.assertEqual(expected_netname,
net_name) # pkb-network-multi-1-2-3-4-56-uri45678 (multi)
self.assertRegexpMatches(net_name, _REGEX_GCE_NET_NAMES)
@flagsaver.flagsaver(
gce_network_name='my-network', gce_subnet_name='my-subnet')
def testSpecifyNetworkName(self):
vm = mock.Mock(zone='us-north1-b', project=_PROJECT, cidr='1.2.3.4/56')
net = gce_network.GceNetwork.GetNetwork(vm)
self.assertEqual('my-network', net.network_resource.name)
self.assertEqual('my-subnet', net.subnet_resource.name)
########
# FireWall Names
########
def testGetDefaultFWName(self):
project = _PROJECT
zone = 'us-north1-b'
cidr = None
vm = mock.Mock(zone=zone, project=project, cidr=cidr)
net = gce_network.GceNetwork.GetNetwork(vm)
fw_name = net._MakeGceFWRuleName()
net_type = 'default'
src_cidr_string = 'internal'
dst_cidr_string = '10-0-0-0-8'
src_port = None
dst_port = None
uri = _URI
expected_name = '-'.join(
i for i in (
net_type, src_cidr_string, dst_cidr_string, src_port,
dst_port, uri) if i)
self.assertEqual(expected_name, fw_name)
self.assertRegexpMatches(fw_name, _REGEX_GCE_FW_NAMES)
def testGetSingleFWName(self):
FLAGS.gce_subnet_region = 'us-south1-c'
FLAGS.gce_subnet_addr = '2.2.3.4/33'
project = _PROJECT
zone = 'us-north1-b'
cidr = None
lo_port = None
hi_port = None
vm = mock.Mock(zone=zone, project=project, cidr=cidr)
net = gce_network.GceNetwork.GetNetwork(vm)
fw_name = net._MakeGceFWRuleName(
net_type=None, src_cidr=None, dst_cidr=None, port_range_lo=lo_port,
port_range_hi=hi_port, uri=None)
net_type = 'single'
src_cidr_string = 'internal'
dst_cidr_string = '2-2-3-4-33'
src_port = None
dst_port = None
uri = _URI
expected_name = '-'.join(
i for i in (net_type, src_cidr_string, dst_cidr_string, src_port,
dst_port, uri)
if i)
self.assertEqual(expected_name,
fw_name) # single-internal-2-2-3-4-33-uri45678
self.assertRegexpMatches(fw_name, _REGEX_GCE_FW_NAMES)
def testGetMultiFWNameWithPorts(self):
project = _PROJECT
zone = 'us-north1-b'
cidr = '1.2.3.4/56'
# cidr = None
vm = mock.Mock(zone=zone, project=project, cidr=cidr)
net = gce_network.GceNetwork.GetNetwork(vm)
dst_cidr = None
lo_port = 49152
hi_port = 65535
fw_name = net._MakeGceFWRuleName(
net_type=None, src_cidr=None, dst_cidr=dst_cidr,
port_range_lo=lo_port,
port_range_hi=hi_port, uri=None)
prefix = None
net_type = 'multi'
src_cidr_string = 'internal'
dst_cidr_string = '1-2-3-4-56'
src_port = '49152'
dst_port = '65535'
uri = _URI
expected_name = '-'.join(
i for i in (prefix, net_type, src_cidr_string, dst_cidr_string,
src_port, dst_port, uri) if i)
self.assertEqual(expected_name,
fw_name) # multi-internal-1-2-3-4-56-49152-65535-uri45678
self.assertRegexpMatches(fw_name, _REGEX_GCE_FW_NAMES)
def testGetMultiFWNameWithPortsDst(self):
project = _PROJECT
zone = 'us-north1-b'
cidr = '1.2.3.4/56'
vm = mock.Mock(zone=zone, project=project, cidr=cidr)
net = gce_network.GceNetwork.GetNetwork(vm)
dst_cidr = '123.567.9.1/13'
lo_port = 49152
hi_port = 65535
fw_name = net._MakeGceFWRuleName(
net_type=None, src_cidr=None, dst_cidr=dst_cidr,
port_range_lo=lo_port,
port_range_hi=hi_port, uri=None)
prefix = 'perfkit-firewall'
net_type = 'multi'
src_cidr_string = '1-2-3-4-56'
dst_cidr_string = '123-567-9-1-13'
src_port = '49152'
dst_port = '65535'
uri = _URI
expected_name = '-'.join(
i for i in (prefix, net_type, src_cidr_string, dst_cidr_string,
src_port, dst_port, uri) if i)
# perfkit-firewall-multi-1-2-3-4-56-123-567-901-13-49152-65535-uri45678
self.assertEqual(expected_name, fw_name)
self.assertRegexpMatches(fw_name, _REGEX_GCE_FW_NAMES)
# found in tests/gce_virtual_machine_test.py
@contextlib.contextmanager
def PatchCriticalObjects(retvals=None):
"""A context manager that patches a few critical objects with mocks."""
def ReturnVal(*unused_arg, **unused_kwargs):
del unused_arg
del unused_kwargs
return ('', '', 0) if retvals is None else retvals.pop(0)
with mock.patch(
vm_util.__name__ + '.IssueCommand',
side_effect=ReturnVal) as issue_command, mock.patch(
builtins.__name__ + '.open'), mock.patch(vm_util.__name__ +
'.NamedTemporaryFile'):
yield issue_command
class TestGceNetwork(BaseGceNetworkTest):
def setUp(self):
super(TestGceNetwork, self).setUp()
# need a benchmarkspec in the context to run
config_spec = benchmark_config_spec.BenchmarkConfigSpec(
'cluster_boot', flag_values=FLAGS)
benchmark_spec.BenchmarkSpec(mock.Mock(), config_spec, 'uid')
def testGetNetwork(self):
project = 'myproject'
zone = 'us-east1-a'
vm = mock.Mock(zone=zone, project=project, cidr=None)
net = gce_network.GceNetwork.GetNetwork(vm)
self.assertEqual(project, net.project)
self.assertEqual(zone, net.zone)
class GceFirewallRuleTest(pkb_common_test_case.PkbCommonTestCase):
@mock.patch('time.sleep', side_effect=lambda _: None)
def testGceFirewallRuleSuccessfulAfterRateLimited(self, mock_cmd):
fake_rets = [('stdout', 'Rate Limit Exceeded', 1),
('stdout', 'some warning perhaps', 0)]
with PatchCriticalObjects(fake_rets) as issue_command:
fr = gce_network.GceFirewallRule('name', 'project', 'allow',
'network_name')
fr._Create()
self.assertEqual(issue_command.call_count, 2)
@mock.patch('time.sleep', side_effect=lambda _: None)
def testGceFirewallRuleGenericErrorAfterRateLimited(self, mock_cmd):
fake_rets = [('stdout', 'Rate Limit Exceeded', 1),
('stdout', 'Rate Limit Exceeded', 1),
('stdout', 'some random firewall error', 1)]
with PatchCriticalObjects(fake_rets) as issue_command:
with self.assertRaises(errors.VmUtil.IssueCommandError):
fr = gce_network.GceFirewallRule('name', 'project', 'allow',
'network_name')
fr._Create()
self.assertEqual(issue_command.call_count, 3)
@mock.patch('time.sleep', side_effect=lambda _: None)
def testGceFirewallRuleAlreadyExistsAfterRateLimited(self, mock_cmd):
fake_rets = [('stdout', 'Rate Limit Exceeded', 1),
('stdout', 'Rate Limit Exceeded', 1),
('stdout', 'firewall already exists', 1)]
with PatchCriticalObjects(fake_rets) as issue_command:
fr = gce_network.GceFirewallRule('name', 'project', 'allow',
'network_name')
fr._Create()
self.assertEqual(issue_command.call_count, 3)
@mock.patch('time.sleep', side_effect=lambda _: None)
def testGceFirewallRuleGenericError(self, mock_cmd):
fake_rets = [('stdout', 'some random firewall error', 1)]
with PatchCriticalObjects(fake_rets) as issue_command:
with self.assertRaises(errors.VmUtil.IssueCommandError):
fr = gce_network.GceFirewallRule('name', 'project', 'allow',
'network_name')
fr._Create()
self.assertEqual(issue_command.call_count, 1)
if __name__ == '__main__':
unittest.main()
|
import numpy as np
from ._logging import logger
from ..defaults import _handle_default
def _set_pandas_dtype(df, columns, dtype):
"""Try to set the right columns to dtype."""
for column in columns:
df[column] = df[column].astype(dtype)
logger.info('Converting "%s" to "%s"...' % (column, dtype))
def _scale_dataframe_data(inst, data, picks, scalings):
ch_types = inst.get_channel_types()
ch_types_used = list()
scalings = _handle_default('scalings', scalings)
for tt in scalings.keys():
if tt in ch_types:
ch_types_used.append(tt)
for tt in ch_types_used:
scaling = scalings[tt]
idx = [ii for ii in range(len(picks)) if ch_types[ii] == tt]
if len(idx):
data[:, idx] *= scaling
return data
def _convert_times(inst, times, time_format):
"""Convert vector of time in seconds to ms, datetime, or timedelta."""
# private function; pandas already checked in calling function
from pandas import to_timedelta
if time_format == 'ms':
times = np.round(times * 1e3).astype(np.int64)
elif time_format == 'timedelta':
times = to_timedelta(times, unit='s')
elif time_format == 'datetime':
times = (to_timedelta(times + inst.first_time, unit='s') +
inst.info['meas_date'])
return times
def _build_data_frame(inst, data, picks, long_format, mindex, index,
default_index, col_names=None, col_kind='channel'):
"""Build DataFrame from MNE-object-derived data array."""
# private function; pandas already checked in calling function
from pandas import DataFrame
from ..source_estimate import _BaseSourceEstimate
# build DataFrame
if col_names is None:
col_names = [inst.ch_names[p] for p in picks]
df = DataFrame(data, columns=col_names)
for i, (k, v) in enumerate(mindex):
df.insert(i, k, v)
# build Index
if long_format:
df.set_index(default_index, inplace=True)
df.columns.name = col_kind
elif index is not None:
df.set_index(index, inplace=True)
if set(index) == set(default_index):
df.columns.name = col_kind
# long format
if long_format:
df = df.stack().reset_index()
df.rename(columns={0: 'value'}, inplace=True)
# add column for channel types (as appropriate)
ch_map = (None if isinstance(inst, _BaseSourceEstimate) else
dict(zip(np.array(inst.ch_names)[picks],
np.array(inst.get_channel_types())[picks])))
if ch_map is not None:
col_index = len(df.columns) - 1
ch_type = df['channel'].map(ch_map)
df.insert(col_index, 'ch_type', ch_type)
# restore index
if index is not None:
df.set_index(index, inplace=True)
# convert channel/vertex/ch_type columns to factors
to_factor = [c for c in df.columns.tolist()
if c not in ('time', 'value')]
_set_pandas_dtype(df, to_factor, 'category')
return df
|
import os
from slacker import Slacker
def post_slack():
"""Post slack message."""
try:
token = os.environ['SLACK_TOKEN']
slack = Slacker(token)
obj = slack.chat.post_message('#general', 'Hello fellow slackers!')
print(obj.successful, obj.__dict__['body']['channel'], obj.__dict__[
'body']['ts'])
except KeyError as ex:
print('Environment variable %s not set.' % str(ex))
if __name__ == '__main__':
post_slack()
|
from homeassistant.const import DEVICE_CLASS_TIMESTAMP
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity
import homeassistant.util.dt as dt_util
from .const import DATA_UPDATED, DOMAIN, PRAYER_TIMES_ICON, SENSOR_TYPES
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Islamic prayer times sensor platform."""
client = hass.data[DOMAIN]
entities = []
for sensor_type in SENSOR_TYPES:
entities.append(IslamicPrayerTimeSensor(sensor_type, client))
async_add_entities(entities, True)
class IslamicPrayerTimeSensor(Entity):
"""Representation of an Islamic prayer time sensor."""
def __init__(self, sensor_type, client):
"""Initialize the Islamic prayer time sensor."""
self.sensor_type = sensor_type
self.client = client
@property
def name(self):
"""Return the name of the sensor."""
return f"{self.sensor_type} {SENSOR_TYPES[self.sensor_type]}"
@property
def unique_id(self):
"""Return the unique id of the entity."""
return self.sensor_type
@property
def icon(self):
"""Icon to display in the front end."""
return PRAYER_TIMES_ICON
@property
def state(self):
"""Return the state of the sensor."""
return (
self.client.prayer_times_info.get(self.sensor_type)
.astimezone(dt_util.UTC)
.isoformat()
)
@property
def should_poll(self):
"""Disable polling."""
return False
@property
def device_class(self):
"""Return the device class."""
return DEVICE_CLASS_TIMESTAMP
async def async_added_to_hass(self):
"""Handle entity which will be added."""
self.async_on_remove(
async_dispatcher_connect(self.hass, DATA_UPDATED, self.async_write_ha_state)
)
|
import urwid
class MultiPudding(urwid.Widget):
_sizing = frozenset(['flow', 'box'])
def rows(self, size, focus=False):
return 1
def render(self, size, focus=False):
if len(size) == 1:
(maxcol,) = size
maxrow = 1
else:
(maxcol, maxrow) = size
num_pudding = maxcol / len("Pudding")
return urwid.TextCanvas(["Pudding" * num_pudding] * maxrow,
maxcol=maxcol)
|
import distutils.cmd
import distutils.command.build
import distutils.command.build_py
import distutils.command.install
import distutils.command.install_data
import distutils.dir_util
import distutils.dist
import glob
import os.path
import platform
import sys
from distutils.log import info
try:
import distro
except ImportError:
python_version = tuple(int(x) for x in platform.python_version_tuple()[:2])
if python_version >= (3, 8):
print(
'Missing build requirement "distro" Python module; '
'install paths may be incorrect', file=sys.stderr)
windows_build = os.name == 'nt'
def has_help(self):
return "build_help" in self.distribution.cmdclass and not windows_build
def has_icons(self):
return "build_icons" in self.distribution.cmdclass
def has_i18n(self):
return "build_i18n" in self.distribution.cmdclass and not windows_build
def has_data(self):
return "build_data" in self.distribution.cmdclass
distutils.command.build.build.sub_commands.extend([
("build_i18n", has_i18n),
("build_icons", has_icons),
("build_help", has_help),
("build_data", has_data),
])
class MeldDistribution(distutils.dist.Distribution):
global_options = distutils.dist.Distribution.global_options + [
("no-update-icon-cache", None, "Don't run gtk-update-icon-cache"),
("no-compile-schemas", None, "Don't compile gsettings schemas"),
]
def __init__(self, *args, **kwargs):
self.no_update_icon_cache = False
self.no_compile_schemas = False
super().__init__(*args, **kwargs)
class build_data(distutils.cmd.Command):
gschemas = [
('share/glib-2.0/schemas', ['data/org.gnome.meld.gschema.xml'])
]
frozen_gschemas = [
('share/meld', ['data/gschemas.compiled']),
]
style_source = "data/styles/*.style-scheme.xml.in"
style_target_dir = 'share/meld/styles'
# FIXME: This is way too much hard coding, but I really hope
# it also doesn't last that long.
resource_source = "meld/resources/meld.gresource.xml"
resource_target = "org.gnome.Meld.gresource"
def initialize_options(self):
pass
def finalize_options(self):
pass
def get_data_files(self):
data_files = []
build_path = os.path.join('build', 'data')
if not os.path.exists(build_path):
os.makedirs(build_path)
info("compiling gresources")
resource_dir = os.path.dirname(self.resource_source)
target = os.path.join(build_path, self.resource_target)
self.spawn([
"glib-compile-resources",
"--target={}".format(target),
"--sourcedir={}".format(resource_dir),
self.resource_source,
])
data_files.append(('share/meld', [target]))
if windows_build:
gschemas = self.frozen_gschemas
else:
gschemas = self.gschemas
data_files.extend(gschemas)
if windows_build:
# These should get moved/installed by i18n, but until that
# runs on Windows we need this hack.
styles = glob.glob(self.style_source)
import shutil
targets = []
for style in styles:
assert style.endswith('.in')
target = style[:-len('.in')]
shutil.copyfile(style, target)
targets.append(target)
data_files.append((self.style_target_dir, targets))
return data_files
def run(self):
data_files = self.distribution.data_files
data_files.extend(self.get_data_files())
class build_help(distutils.cmd.Command):
help_dir = 'help'
def initialize_options(self):
pass
def finalize_options(self):
pass
def get_data_files(self):
data_files = []
name = self.distribution.metadata.name
if "LINGUAS" in os.environ:
self.selected_languages = os.environ["LINGUAS"].split()
else:
self.selected_languages = [
d for d in os.listdir(self.help_dir) if os.path.isdir(d)
]
if 'C' not in self.selected_languages:
self.selected_languages.append('C')
self.C_PAGES = glob.glob(os.path.join(self.help_dir, 'C', '*.page'))
self.C_EXTRA = glob.glob(os.path.join(self.help_dir, 'C', '*.xml'))
for lang in self.selected_languages:
source_path = os.path.join(self.help_dir, lang)
if not os.path.exists(source_path):
continue
build_path = os.path.join('build', self.help_dir, lang)
if not os.path.exists(build_path):
os.makedirs(build_path)
if lang != 'C':
po_file = os.path.join(source_path, lang + '.po')
mo_file = os.path.join(build_path, lang + '.mo')
msgfmt = ['msgfmt', po_file, '-o', mo_file]
self.spawn(msgfmt)
for page in self.C_PAGES:
itstool = [
'itstool', '-m', mo_file, '-o', build_path, page]
self.spawn(itstool)
for extra in self.C_EXTRA:
extra_path = os.path.join(
build_path, os.path.basename(extra))
if os.path.exists(extra_path):
os.unlink(extra_path)
os.symlink(os.path.relpath(extra, source_path), extra_path)
else:
distutils.dir_util.copy_tree(source_path, build_path)
xml_files = glob.glob('%s/*.xml' % build_path)
mallard_files = glob.glob('%s/*.page' % build_path)
path_help = os.path.join('share', 'help', lang, name)
path_figures = os.path.join(path_help, 'figures')
data_files.append((path_help, xml_files + mallard_files))
figures = glob.glob('%s/figures/*.png' % build_path)
if figures:
data_files.append((path_figures, figures))
return data_files
def run(self):
data_files = self.distribution.data_files
data_files.extend(self.get_data_files())
self.check_help()
def check_help(self):
for lang in self.selected_languages:
build_path = os.path.join('build', self.help_dir, lang)
if not os.path.exists(build_path):
continue
pages = [os.path.basename(p) for p in self.C_PAGES]
for page in pages:
page_path = os.path.join(build_path, page)
if not os.path.exists(page_path):
info("skipping missing file %s", page_path)
continue
lint = ['xmllint', '--noout', '--noent', '--path', build_path,
'--xinclude', page_path]
self.spawn(lint)
class build_icons(distutils.cmd.Command):
icon_dir = os.path.join("data", "icons")
target = "share/icons"
frozen_target = "share/meld/icons"
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
target_dir = self.frozen_target if windows_build else self.target
data_files = self.distribution.data_files
for theme in glob.glob(os.path.join(self.icon_dir, "*")):
for size in glob.glob(os.path.join(theme, "*")):
for category in glob.glob(os.path.join(size, "*")):
icons = (glob.glob(os.path.join(category, "*.png")) +
glob.glob(os.path.join(category, "*.svg")))
icons = [
icon for icon in icons if not os.path.islink(icon)]
if not icons:
continue
data_files.append(("%s/%s/%s/%s" %
(target_dir,
os.path.basename(theme),
os.path.basename(size),
os.path.basename(category)),
icons))
class build_i18n(distutils.cmd.Command):
bug_contact = None
domain = "meld"
po_dir = "po"
merge_po = False
# FIXME: It's ridiculous to specify these here, but I know of no other
# way except magically extracting them from self.distribution.data_files
desktop_files = [('share/applications', glob.glob("data/*.desktop.in"))]
xml_files = [
('share/meld/styles', glob.glob("data/styles/*.style-scheme.xml.in")),
('share/metainfo', glob.glob("data/*.appdata.xml.in")),
('share/mime/packages', glob.glob("data/mime/*.xml.in"))
]
schemas_files = []
key_files = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def _rebuild_po(self):
# If there is a po/LINGUAS file, or the LINGUAS environment variable
# is set, only compile the languages listed there.
selected_languages = None
linguas_file = os.path.join(self.po_dir, "LINGUAS")
if "LINGUAS" in os.environ:
selected_languages = os.environ["LINGUAS"].split()
elif os.path.isfile(linguas_file):
selected_languages = open(linguas_file).read().split()
# If we're on Windows, assume we're building frozen and make a bunch
# of insane assumptions.
if windows_build:
msgfmt = "C:\\Python27\\Tools\\i18n\\msgfmt"
else:
msgfmt = "msgfmt"
# Update po(t) files and print a report
# We have to change the working dir to the po dir for intltool
cmd = [
"intltool-update",
(self.merge_po and "-r" or "-p"), "-g", self.domain
]
wd = os.getcwd()
os.chdir(self.po_dir)
self.spawn(cmd)
os.chdir(wd)
max_po_mtime = 0
for po_file in glob.glob("%s/*.po" % self.po_dir):
lang = os.path.basename(po_file[:-3])
if selected_languages and lang not in selected_languages:
continue
mo_dir = os.path.join("build", "mo", lang, "LC_MESSAGES")
mo_file = os.path.join(mo_dir, "%s.mo" % self.domain)
if not os.path.exists(mo_dir):
os.makedirs(mo_dir)
cmd = [msgfmt, po_file, "-o", mo_file]
po_mtime = os.path.getmtime(po_file)
mo_mtime = (
os.path.exists(mo_file) and os.path.getmtime(mo_file) or 0)
if po_mtime > max_po_mtime:
max_po_mtime = po_mtime
if po_mtime > mo_mtime:
self.spawn(cmd)
targetpath = os.path.join("share/locale", lang, "LC_MESSAGES")
self.distribution.data_files.append((targetpath, (mo_file,)))
self.max_po_mtime = max_po_mtime
def run(self):
if self.bug_contact is not None:
os.environ["XGETTEXT_ARGS"] = "--msgid-bugs-address=%s " % \
self.bug_contact
# These copies are pure hacks to work around not having the
# Meson-based initial variable templating in distutils.
import shutil
shutil.copyfile(
'data/org.gnome.meld.desktop.in.in',
'data/org.gnome.meld.desktop.in',
)
shutil.copyfile(
'data/org.gnome.meld.appdata.xml.in.in',
'data/org.gnome.meld.appdata.xml.in',
)
self._rebuild_po()
intltool_switches = [
(self.xml_files, "-x"),
(self.desktop_files, "-d"),
(self.schemas_files, "-s"),
(self.key_files, "-k"),
]
for file_set, switch in intltool_switches:
for target, files in file_set:
build_target = os.path.join("build", target)
if not os.path.exists(build_target):
os.makedirs(build_target)
files_merged = []
for file in files:
file_merged = os.path.basename(file)
if file_merged.endswith(".in"):
file_merged = file_merged[:-3]
file_merged = os.path.join(build_target, file_merged)
cmd = ["intltool-merge", switch, self.po_dir, file,
file_merged]
mtime_merged = (os.path.exists(file_merged) and
os.path.getmtime(file_merged) or 0)
mtime_file = os.path.getmtime(file)
if (mtime_merged < self.max_po_mtime or
mtime_merged < mtime_file):
# Only build if output is older than input (.po,.in)
self.spawn(cmd)
files_merged.append(file_merged)
self.distribution.data_files.append((target, files_merged))
class build_py(distutils.command.build_py.build_py):
"""Insert real package installation locations into conf module
Adapted from gottengeography
"""
data_line = 'DATADIR = "%s"'
locale_line = 'LOCALEDIR = "%s"'
def build_module(self, module, module_file, package):
if module_file == 'meld/conf.py':
with open(module_file) as f:
contents = f.read()
try:
options = self.distribution.get_option_dict('install')
prefix = options['prefix'][1]
except KeyError as e:
print(e)
prefix = sys.prefix
datadir = os.path.join(prefix, 'share', 'meld')
localedir = os.path.join(prefix, 'share', 'locale')
start, end = 0, 0
lines = contents.splitlines()
for i, line in enumerate(lines):
if line.startswith('# START'):
start = i
elif line.startswith('# END'):
end = i
if start and end:
lines[start:end + 1] = [
self.data_line % datadir,
self.locale_line % localedir,
]
module_file = module_file + "-installed"
contents = "\n".join(lines)
with open(module_file, 'w') as f:
f.write(contents)
distutils.command.build_py.build_py.build_module(
self, module, module_file, package)
class install(distutils.command.install.install):
def finalize_options(self):
special_cases = ('debian', 'ubuntu', 'linuxmint')
if platform.system() == 'Linux':
# linux_distribution has been removed in Python 3.8; we require
# the third-party distro package for future handling.
try:
distribution = platform.linux_distribution()[0].lower()
except AttributeError:
try:
distribution = distro.id()
except NameError:
distribution = 'unknown'
if distribution in special_cases:
# Maintain an explicit install-layout, but use deb by default
specified_layout = getattr(self, 'install_layout', None)
self.install_layout = specified_layout or 'deb'
distutils.command.install.install.finalize_options(self)
class install_data(distutils.command.install_data.install_data):
def run(self):
distutils.command.install_data.install_data.run(self)
if not self.distribution.no_update_icon_cache:
# TODO: Generalise to non-hicolor icon themes
info("running gtk-update-icon-cache")
icon_path = os.path.join(self.install_dir, "share/icons/hicolor")
self.spawn(["gtk-update-icon-cache", "-q", "-t", icon_path])
if not self.distribution.no_compile_schemas:
info("compiling gsettings schemas")
gschema_path = build_data.gschemas[0][0]
gschema_install = os.path.join(self.install_dir, gschema_path)
self.spawn(["glib-compile-schemas", gschema_install])
|
import pyvera as pv
from homeassistant.core import HomeAssistant
from .common import ComponentFactory, new_simple_controller_config
from tests.async_mock import MagicMock
async def test_switch(
hass: HomeAssistant, vera_component_factory: ComponentFactory
) -> None:
"""Test function."""
vera_device = MagicMock(spec=pv.VeraSwitch) # type: pv.VeraSwitch
vera_device.device_id = 1
vera_device.vera_device_id = vera_device.device_id
vera_device.name = "dev1"
vera_device.category = pv.CATEGORY_SWITCH
vera_device.is_switched_on = MagicMock(return_value=False)
entity_id = "switch.dev1_1"
component_data = await vera_component_factory.configure_component(
hass=hass,
controller_config=new_simple_controller_config(
devices=(vera_device,), legacy_entity_unique_id=False
),
)
update_callback = component_data.controller_data[0].update_callback
assert hass.states.get(entity_id).state == "off"
await hass.services.async_call(
"switch",
"turn_on",
{"entity_id": entity_id},
)
await hass.async_block_till_done()
vera_device.switch_on.assert_called()
vera_device.is_switched_on.return_value = True
update_callback(vera_device)
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == "on"
await hass.services.async_call(
"switch",
"turn_off",
{"entity_id": entity_id},
)
await hass.async_block_till_done()
vera_device.switch_off.assert_called()
vera_device.is_switched_on.return_value = False
update_callback(vera_device)
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == "off"
|
from datetime import timedelta
import logging
from async_timeout import timeout
from pynzbgetapi import NZBGetAPI, NZBGetAPIException
from homeassistant.const import (
CONF_HOST,
CONF_PASSWORD,
CONF_PORT,
CONF_SCAN_INTERVAL,
CONF_SSL,
CONF_USERNAME,
CONF_VERIFY_SSL,
)
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
class NZBGetDataUpdateCoordinator(DataUpdateCoordinator):
"""Class to manage fetching NZBGet data."""
def __init__(self, hass: HomeAssistantType, *, config: dict, options: dict):
"""Initialize global NZBGet data updater."""
self.nzbget = NZBGetAPI(
config[CONF_HOST],
config.get(CONF_USERNAME),
config.get(CONF_PASSWORD),
config[CONF_SSL],
config[CONF_VERIFY_SSL],
config[CONF_PORT],
)
self._completed_downloads_init = False
self._completed_downloads = {}
update_interval = timedelta(seconds=options[CONF_SCAN_INTERVAL])
super().__init__(
hass,
_LOGGER,
name=DOMAIN,
update_interval=update_interval,
)
def _check_completed_downloads(self, history):
"""Check history for newly completed downloads."""
actual_completed_downloads = {
(x["Name"], x["Category"], x["Status"]) for x in history
}
if self._completed_downloads_init:
tmp_completed_downloads = list(
actual_completed_downloads.difference(self._completed_downloads)
)
for download in tmp_completed_downloads:
self.hass.bus.fire(
"nzbget_download_complete",
{
"name": download[0],
"category": download[1],
"status": download[2],
},
)
self._completed_downloads = actual_completed_downloads
self._completed_downloads_init = True
async def _async_update_data(self) -> dict:
"""Fetch data from NZBGet."""
def _update_data() -> dict:
"""Fetch data from NZBGet via sync functions."""
status = self.nzbget.status()
history = self.nzbget.history()
self._check_completed_downloads(history)
return {
"status": status,
"downloads": history,
}
try:
async with timeout(4):
return await self.hass.async_add_executor_job(_update_data)
except NZBGetAPIException as error:
raise UpdateFailed(f"Invalid response from API: {error}") from error
|
import pkgutil
import types
import sys
import pathlib
import importlib
import argparse
from typing import Callable, Iterator, List, Optional, Set, Tuple
import attr
from PyQt5.QtCore import pyqtSlot
from qutebrowser import components
from qutebrowser.config import config
from qutebrowser.utils import log, standarddir
from qutebrowser.misc import objects
# ModuleInfo objects for all loaded plugins
_module_infos = []
@attr.s
class InitContext:
"""Context an extension gets in its init hook."""
data_dir: pathlib.Path = attr.ib()
config_dir: pathlib.Path = attr.ib()
args: argparse.Namespace = attr.ib()
@attr.s
class ModuleInfo:
"""Information attached to an extension module.
This gets used by qutebrowser.api.hook.
"""
_ConfigChangedHooksType = List[Tuple[Optional[str], Callable]]
skip_hooks: bool = attr.ib(False)
init_hook: Optional[Callable] = attr.ib(None)
config_changed_hooks: _ConfigChangedHooksType = attr.ib(attr.Factory(list))
@attr.s
class ExtensionInfo:
"""Information about a qutebrowser extension."""
name: str = attr.ib()
def add_module_info(module: types.ModuleType) -> ModuleInfo:
"""Add ModuleInfo to a module (if not added yet)."""
# pylint: disable=protected-access
if not hasattr(module, '__qute_module_info'):
module.__qute_module_info = ModuleInfo() # type: ignore[attr-defined]
return module.__qute_module_info # type: ignore[attr-defined]
def load_components(*, skip_hooks: bool = False) -> None:
"""Load everything from qutebrowser.components."""
for info in walk_components():
_load_component(info, skip_hooks=skip_hooks)
def walk_components() -> Iterator[ExtensionInfo]:
"""Yield ExtensionInfo objects for all modules."""
if hasattr(sys, 'frozen'):
yield from _walk_pyinstaller()
else:
yield from _walk_normal()
def _on_walk_error(name: str) -> None:
raise ImportError("Failed to import {}".format(name))
def _walk_normal() -> Iterator[ExtensionInfo]:
"""Walk extensions when not using PyInstaller."""
for _finder, name, ispkg in pkgutil.walk_packages(
# Only packages have a __path__ attribute,
# but we're sure this is one.
path=components.__path__, # type: ignore[attr-defined]
prefix=components.__name__ + '.',
onerror=_on_walk_error):
if ispkg:
continue
yield ExtensionInfo(name=name)
def _walk_pyinstaller() -> Iterator[ExtensionInfo]:
"""Walk extensions when using PyInstaller.
See https://github.com/pyinstaller/pyinstaller/issues/1905
Inspired by:
https://github.com/webcomics/dosage/blob/master/dosagelib/loader.py
"""
toc: Set[str] = set()
for importer in pkgutil.iter_importers('qutebrowser'):
if hasattr(importer, 'toc'):
toc |= importer.toc
for name in toc:
if name.startswith(components.__name__ + '.'):
yield ExtensionInfo(name=name)
def _get_init_context() -> InitContext:
"""Get an InitContext object."""
return InitContext(data_dir=pathlib.Path(standarddir.data()),
config_dir=pathlib.Path(standarddir.config()),
args=objects.args)
def _load_component(info: ExtensionInfo, *,
skip_hooks: bool = False) -> types.ModuleType:
"""Load the given extension and run its init hook (if any).
Args:
skip_hooks: Whether to skip all hooks for this module.
This is used to only run @cmdutils.register decorators.
"""
log.extensions.debug("Importing {}".format(info.name))
mod = importlib.import_module(info.name)
mod_info = add_module_info(mod)
if skip_hooks:
mod_info.skip_hooks = True
if mod_info.init_hook is not None and not skip_hooks:
log.extensions.debug("Running init hook {!r}"
.format(mod_info.init_hook.__name__))
mod_info.init_hook(_get_init_context())
_module_infos.append(mod_info)
return mod
@pyqtSlot(str)
def _on_config_changed(changed_name: str) -> None:
"""Call config_changed hooks if the config changed."""
for mod_info in _module_infos:
if mod_info.skip_hooks:
continue
for option, hook in mod_info.config_changed_hooks:
if option is None:
hook()
else:
cfilter = config.change_filter(option)
cfilter.validate()
if cfilter.check_match(changed_name):
hook()
def init() -> None:
config.instance.changed.connect(_on_config_changed)
|
from flexx import flx
from flexxamples.demos.splines import Splines
from flexxamples.demos.twente import Twente
from flexxamples.demos.drawing import Drawing
class Demo(flx.Widget):
def init(self):
with flx.TabLayout():
Splines(title='Spline demo')
Twente(title='Temperature vis')
Drawing(title='Drawing app')
flx.YoutubeWidget(title='Video')
if __name__ == '__main__':
a = flx.App(Demo, title='Flexx demo')
m = a.launch()
flx.run()
|
import numpy as np
import unittest
import chainer
from chainer.datasets import TupleDataset
from chainer.iterators import SerialIterator
from chainer import testing
from chainercv.extensions import InstanceSegmentationVOCEvaluator
from chainercv.utils.testing import attr
from chainermn import create_communicator
class _InstanceSegmentationStubLink(chainer.Link):
def __init__(self, masks, labels, initial_count=0):
super(_InstanceSegmentationStubLink, self).__init__()
self.count = initial_count
self.masks = masks
self.labels = labels
def predict(self, imgs):
n_img = len(imgs)
masks = self.masks[self.count:self.count + n_img]
labels = self.labels[self.count:self.count + n_img]
scores = [np.ones_like(l) for l in labels]
self.count += n_img
return masks, labels, scores
class TestInstanceSegmentationVOCEvaluator(unittest.TestCase):
def setUp(self):
masks = np.random.uniform(size=(10, 5, 32, 48)) > 0.5
labels = np.ones((10, 5), dtype=np.int32)
self.dataset = TupleDataset(
np.random.uniform(size=(10, 3, 32, 48)),
masks, labels)
self.link = _InstanceSegmentationStubLink(masks, labels)
self.iterator = SerialIterator(
self.dataset, 1, repeat=False, shuffle=False)
self.evaluator = InstanceSegmentationVOCEvaluator(
self.iterator, self.link, label_names=('cls0', 'cls1', 'cls2'))
self.expected_ap = 1
def test_evaluate(self):
reporter = chainer.Reporter()
reporter.add_observer('target', self.link)
with reporter:
mean = self.evaluator.evaluate()
# No observation is reported to the current reporter. Instead the
# evaluator collect results in order to calculate their mean.
self.assertEqual(len(reporter.observation), 0)
np.testing.assert_equal(mean['target/map'], self.expected_ap)
np.testing.assert_equal(mean['target/ap/cls0'], np.nan)
np.testing.assert_equal(mean['target/ap/cls1'], self.expected_ap)
np.testing.assert_equal(mean['target/ap/cls2'], np.nan)
def test_call(self):
mean = self.evaluator()
# main is used as default
np.testing.assert_equal(mean['main/map'], self.expected_ap)
np.testing.assert_equal(mean['main/ap/cls0'], np.nan)
np.testing.assert_equal(mean['main/ap/cls1'], self.expected_ap)
np.testing.assert_equal(mean['main/ap/cls2'], np.nan)
def test_evaluator_name(self):
self.evaluator.name = 'eval'
mean = self.evaluator()
# name is used as a prefix
np.testing.assert_equal(mean['eval/main/map'], self.expected_ap)
np.testing.assert_equal(mean['eval/main/ap/cls0'], np.nan)
np.testing.assert_equal(mean['eval/main/ap/cls1'], self.expected_ap)
np.testing.assert_equal(mean['eval/main/ap/cls2'], np.nan)
def test_current_report(self):
reporter = chainer.Reporter()
with reporter:
mean = self.evaluator()
# The result is reported to the current reporter.
self.assertEqual(reporter.observation, mean)
@attr.mpi
class TestInstanceSegmentationVOCEvaluatorMPI(unittest.TestCase):
def setUp(self):
self.comm = create_communicator('naive')
batchsize_per_process = 5
batchsize = batchsize_per_process * self.comm.size
if self.comm.rank == 0:
masks = [np.random.uniform(size=(5, 32, 48)) > 0.5
for _ in range(10)]
labels = [np.random.choice(np.arange(3, dtype=np.int32), size=(5,))
for _ in range(10)]
else:
masks = None
labels = None
initial_count = self.comm.rank * batchsize_per_process
masks = self.comm.bcast_obj(masks)
labels = self.comm.bcast_obj(labels)
self.masks = masks
self.labels = labels
self.dataset = TupleDataset(
np.random.uniform(size=(10, 3, 32, 48)),
masks, labels)
self.initial_count = initial_count
self.batchsize = batchsize
def test_consistency(self):
reporter = chainer.Reporter()
if self.comm.rank == 0:
multi_iterator = SerialIterator(
self.dataset, self.batchsize, repeat=False, shuffle=False)
else:
multi_iterator = None
multi_link = _InstanceSegmentationStubLink(
self.masks, self.labels, self.initial_count)
multi_evaluator = InstanceSegmentationVOCEvaluator(
multi_iterator, multi_link,
label_names=('cls0', 'cls1', 'cls2'),
comm=self.comm)
reporter.add_observer('target', multi_link)
with reporter:
multi_mean = multi_evaluator.evaluate()
if self.comm.rank != 0:
self.assertEqual(multi_mean, {})
return
single_iterator = SerialIterator(
self.dataset, self.batchsize, repeat=False, shuffle=False)
single_link = _InstanceSegmentationStubLink(
self.masks, self.labels)
single_evaluator = InstanceSegmentationVOCEvaluator(
single_iterator, single_link,
label_names=('cls0', 'cls1', 'cls2'))
reporter.add_observer('target', single_link)
with reporter:
single_mean = single_evaluator.evaluate()
self.assertEqual(set(multi_mean.keys()), set(single_mean.keys()))
for key in multi_mean.keys():
np.testing.assert_equal(single_mean[key], multi_mean[key])
testing.run_module(__name__, __file__)
|
import pandas as pd
import pytz
import pytest
from qstrader.broker.portfolio.portfolio import Portfolio
from qstrader.broker.portfolio.portfolio_event import PortfolioEvent
from qstrader.broker.transaction.transaction import Transaction
def test_initial_settings_for_default_portfolio():
"""
Test that the initial settings are as they should be
for two specified portfolios.
"""
start_dt = pd.Timestamp('2017-10-05 08:00:00', tz=pytz.UTC)
# Test a default Portfolio
port1 = Portfolio(start_dt)
assert port1.start_dt == start_dt
assert port1.current_dt == start_dt
assert port1.currency == "USD"
assert port1.starting_cash == 0.0
assert port1.portfolio_id is None
assert port1.name is None
assert port1.total_market_value == 0.0
assert port1.cash == 0.0
assert port1.total_equity == 0.0
# Test a Portfolio with keyword arguments
port2 = Portfolio(
start_dt, starting_cash=1234567.56, currency="USD",
portfolio_id=12345, name="My Second Test Portfolio"
)
assert port2.start_dt == start_dt
assert port2.current_dt == start_dt
assert port2.currency == "USD"
assert port2.starting_cash == 1234567.56
assert port2.portfolio_id == 12345
assert port2.name == "My Second Test Portfolio"
assert port2.total_equity == 1234567.56
assert port2.total_market_value == 0.0
assert port2.cash == 1234567.56
def test_portfolio_currency_settings():
"""
Test that USD and GBP currencies are correctly set with
some currency keyword arguments and that the currency
formatter produces the correct strings.
"""
start_dt = pd.Timestamp('2017-10-05 08:00:00', tz=pytz.UTC)
# Test a US portfolio produces correct values
cur1 = "USD"
port1 = Portfolio(start_dt, currency=cur1)
assert port1.currency == "USD"
# Test a UK portfolio produces correct values
cur2 = "GBP"
port2 = Portfolio(start_dt, currency=cur2)
assert port2.currency == "GBP"
def test_subscribe_funds_behaviour():
"""
Test subscribe_funds raises for incorrect datetime
Test subscribe_funds raises for negative amount
Test subscribe_funds correctly adds positive
amount, generates correct event and modifies time
"""
start_dt = pd.Timestamp('2017-10-05 08:00:00', tz=pytz.UTC)
earlier_dt = pd.Timestamp('2017-10-04 08:00:00', tz=pytz.UTC)
later_dt = pd.Timestamp('2017-10-06 08:00:00', tz=pytz.UTC)
pos_cash = 1000.0
neg_cash = -1000.0
port = Portfolio(start_dt, starting_cash=2000.0)
# Test subscribe_funds raises for incorrect datetime
with pytest.raises(ValueError):
port.subscribe_funds(earlier_dt, pos_cash)
# Test subscribe_funds raises for negative amount
with pytest.raises(ValueError):
port.subscribe_funds(start_dt, neg_cash)
# Test subscribe_funds correctly adds positive
# amount, generates correct event and modifies time
port.subscribe_funds(later_dt, pos_cash)
assert port.cash == 3000.0
assert port.total_market_value == 0.0
assert port.total_equity == 3000.0
pe1 = PortfolioEvent(
dt=start_dt, type='subscription',
description="SUBSCRIPTION", debit=0.0,
credit=2000.0, balance=2000.0
)
pe2 = PortfolioEvent(
dt=later_dt, type='subscription',
description="SUBSCRIPTION", debit=0.0,
credit=1000.0, balance=3000.0
)
assert port.history == [pe1, pe2]
assert port.current_dt == later_dt
def test_withdraw_funds_behaviour():
"""
Test withdraw_funds raises for incorrect datetime
Test withdraw_funds raises for negative amount
Test withdraw_funds raises for lack of cash
Test withdraw_funds correctly subtracts positive
amount, generates correct event and modifies time
"""
start_dt = pd.Timestamp('2017-10-05 08:00:00', tz=pytz.UTC)
earlier_dt = pd.Timestamp('2017-10-04 08:00:00', tz=pytz.UTC)
later_dt = pd.Timestamp('2017-10-06 08:00:00', tz=pytz.UTC)
even_later_dt = pd.Timestamp('2017-10-07 08:00:00', tz=pytz.UTC)
pos_cash = 1000.0
neg_cash = -1000.0
port_raise = Portfolio(start_dt)
# Test withdraw_funds raises for incorrect datetime
with pytest.raises(ValueError):
port_raise.withdraw_funds(earlier_dt, pos_cash)
# Test withdraw_funds raises for negative amount
with pytest.raises(ValueError):
port_raise.withdraw_funds(start_dt, neg_cash)
# Test withdraw_funds raises for not enough cash
port_broke = Portfolio(start_dt)
port_broke.subscribe_funds(later_dt, 1000.0)
with pytest.raises(ValueError):
port_broke.withdraw_funds(later_dt, 2000.0)
# Test withdraw_funds correctly subtracts positive
# amount, generates correct event and modifies time
# Initial subscribe
port_cor = Portfolio(start_dt)
port_cor.subscribe_funds(later_dt, pos_cash)
pe_sub = PortfolioEvent(
dt=later_dt, type='subscription',
description="SUBSCRIPTION", debit=0.0,
credit=1000.0, balance=1000.0
)
assert port_cor.cash == 1000.0
assert port_cor.total_market_value == 0.0
assert port_cor.total_equity == 1000.0
assert port_cor.history == [pe_sub]
assert port_cor.current_dt == later_dt
# Now withdraw
port_cor.withdraw_funds(even_later_dt, 468.0)
pe_wdr = PortfolioEvent(
dt=even_later_dt, type='withdrawal',
description="WITHDRAWAL", debit=468.0,
credit=0.0, balance=532.0
)
assert port_cor.cash == 532.0
assert port_cor.total_market_value == 0.0
assert port_cor.total_equity == 532.0
assert port_cor.history == [pe_sub, pe_wdr]
assert port_cor.current_dt == even_later_dt
def test_transact_asset_behaviour():
"""
Test transact_asset raises for incorrect time
Test correct total_cash and total_securities_value
for correct transaction (commission etc), correct
portfolio event and correct time update
"""
start_dt = pd.Timestamp('2017-10-05 08:00:00', tz=pytz.UTC)
earlier_dt = pd.Timestamp('2017-10-04 08:00:00', tz=pytz.UTC)
later_dt = pd.Timestamp('2017-10-06 08:00:00', tz=pytz.UTC)
even_later_dt = pd.Timestamp('2017-10-07 08:00:00', tz=pytz.UTC)
port = Portfolio(start_dt)
asset = 'EQ:AAA'
# Test transact_asset raises for incorrect time
tn_early = Transaction(
asset=asset,
quantity=100,
dt=earlier_dt,
price=567.0,
order_id=1,
commission=0.0
)
with pytest.raises(ValueError):
port.transact_asset(tn_early)
# Test transact_asset raises for transaction total
# cost exceeding total cash
port.subscribe_funds(later_dt, 1000.0)
assert port.cash == 1000.0
assert port.total_market_value == 0.0
assert port.total_equity == 1000.0
pe_sub1 = PortfolioEvent(
dt=later_dt, type='subscription',
description="SUBSCRIPTION", debit=0.0,
credit=1000.0, balance=1000.0
)
# Test correct total_cash and total_securities_value
# for correct transaction (commission etc), correct
# portfolio event and correct time update
port.subscribe_funds(even_later_dt, 99000.0)
assert port.cash == 100000.0
assert port.total_market_value == 0.0
assert port.total_equity == 100000.0
pe_sub2 = PortfolioEvent(
dt=even_later_dt, type='subscription',
description="SUBSCRIPTION", debit=0.0,
credit=99000.0, balance=100000.0
)
tn_even_later = Transaction(
asset=asset,
quantity=100,
dt=even_later_dt,
price=567.0,
order_id=1,
commission=15.78
)
port.transact_asset(tn_even_later)
assert port.cash == 43284.22
assert port.total_market_value == 56700.00
assert port.total_equity == 99984.22
description = "LONG 100 EQ:AAA 567.00 07/10/2017"
pe_tn = PortfolioEvent(
dt=even_later_dt, type="asset_transaction",
description=description, debit=56715.78,
credit=0.0, balance=43284.22
)
assert port.history == [pe_sub1, pe_sub2, pe_tn]
assert port.current_dt == even_later_dt
def test_portfolio_to_dict_empty_portfolio():
"""
Test 'portfolio_to_dict' method for an empty Portfolio.
"""
start_dt = pd.Timestamp('2017-10-05 08:00:00', tz=pytz.UTC)
port = Portfolio(start_dt)
port.subscribe_funds(start_dt, 100000.0)
port_dict = port.portfolio_to_dict()
assert port_dict == {}
def test_portfolio_to_dict_for_two_holdings():
"""
Test portfolio_to_dict for two holdings.
"""
start_dt = pd.Timestamp('2017-10-05 08:00:00', tz=pytz.UTC)
asset1_dt = pd.Timestamp('2017-10-06 08:00:00', tz=pytz.UTC)
asset2_dt = pd.Timestamp('2017-10-07 08:00:00', tz=pytz.UTC)
update_dt = pd.Timestamp('2017-10-08 08:00:00', tz=pytz.UTC)
asset1 = 'EQ:AAA'
asset2 = 'EQ:BBB'
port = Portfolio(start_dt, portfolio_id='1234')
port.subscribe_funds(start_dt, 100000.0)
tn_asset1 = Transaction(
asset=asset1, quantity=100, dt=asset1_dt,
price=567.0, order_id=1, commission=15.78
)
port.transact_asset(tn_asset1)
tn_asset2 = Transaction(
asset=asset2, quantity=100, dt=asset2_dt,
price=123.0, order_id=2, commission=7.64
)
port.transact_asset(tn_asset2)
port.update_market_value_of_asset(asset2, 134.0, update_dt)
test_holdings = {
asset1: {
"quantity": 100,
"market_value": 56700.0,
"unrealised_pnl": -15.78,
"realised_pnl": 0.0,
"total_pnl": -15.78
},
asset2: {
"quantity": 100,
"market_value": 13400.0,
"unrealised_pnl": 1092.3600000000006,
"realised_pnl": 0.0,
"total_pnl": 1092.3600000000006
}
}
port_holdings = port.portfolio_to_dict()
# This is needed because we're not using Decimal
# datatypes and have to compare slightly differing
# floating point representations
for asset in (asset1, asset2):
for key, val in test_holdings[asset].items():
assert port_holdings[asset][key] == pytest.approx(
test_holdings[asset][key]
)
def test_update_market_value_of_asset_not_in_list():
"""
Test update_market_value_of_asset for asset not in list.
"""
start_dt = pd.Timestamp('2017-10-05 08:00:00', tz=pytz.UTC)
later_dt = pd.Timestamp('2017-10-06 08:00:00', tz=pytz.UTC)
port = Portfolio(start_dt)
asset = 'EQ:AAA'
update = port.update_market_value_of_asset(
asset, 54.34, later_dt
)
assert update is None
def test_update_market_value_of_asset_negative_price():
"""
Test update_market_value_of_asset for
asset with negative price.
"""
start_dt = pd.Timestamp('2017-10-05 08:00:00', tz=pytz.UTC)
later_dt = pd.Timestamp('2017-10-06 08:00:00', tz=pytz.UTC)
port = Portfolio(start_dt)
asset = 'EQ:AAA'
port.subscribe_funds(later_dt, 100000.0)
tn_asset = Transaction(
asset=asset,
quantity=100,
dt=later_dt,
price=567.0,
order_id=1,
commission=15.78
)
port.transact_asset(tn_asset)
with pytest.raises(ValueError):
port.update_market_value_of_asset(
asset, -54.34, later_dt
)
def test_update_market_value_of_asset_earlier_date():
"""
Test update_market_value_of_asset for asset
with current_trade_date in past
"""
start_dt = pd.Timestamp('2017-10-05 08:00:00', tz=pytz.UTC)
earlier_dt = pd.Timestamp('2017-10-04 08:00:00', tz=pytz.UTC)
later_dt = pd.Timestamp('2017-10-06 08:00:00', tz=pytz.UTC)
port = Portfolio(start_dt, portfolio_id='1234')
asset = 'EQ:AAA'
port.subscribe_funds(later_dt, 100000.0)
tn_asset = Transaction(
asset=asset,
quantity=100,
dt=later_dt,
price=567.0,
order_id=1,
commission=15.78
)
port.transact_asset(tn_asset)
with pytest.raises(ValueError):
port.update_market_value_of_asset(
asset, 50.23, earlier_dt
)
def test_history_to_df_empty():
"""
Test 'history_to_df' with no events.
"""
start_dt = pd.Timestamp('2017-10-05 08:00:00', tz=pytz.UTC)
port = Portfolio(start_dt)
hist_df = port.history_to_df()
test_df = pd.DataFrame(
[], columns=[
"date", "type", "description",
"debit", "credit", "balance"
]
)
test_df.set_index(keys=["date"], inplace=True)
assert sorted(test_df.columns) == sorted(hist_df.columns)
assert len(test_df) == len(hist_df)
assert len(hist_df) == 0
|
import numpy as np
from ..core import indexing
from ..core.utils import Frozen, FrozenDict, close_on_error
from ..core.variable import Variable
from .common import AbstractDataStore, BackendArray, BackendEntrypoint
from .file_manager import CachingFileManager
from .locks import HDF5_LOCK, NETCDFC_LOCK, combine_locks, ensure_lock
from .store import open_backend_dataset_store
# psuedonetcdf can invoke netCDF libraries internally
PNETCDF_LOCK = combine_locks([HDF5_LOCK, NETCDFC_LOCK])
class PncArrayWrapper(BackendArray):
def __init__(self, variable_name, datastore):
self.datastore = datastore
self.variable_name = variable_name
array = self.get_array()
self.shape = array.shape
self.dtype = np.dtype(array.dtype)
def get_array(self, needs_lock=True):
ds = self.datastore._manager.acquire(needs_lock)
return ds.variables[self.variable_name]
def __getitem__(self, key):
return indexing.explicit_indexing_adapter(
key, self.shape, indexing.IndexingSupport.OUTER_1VECTOR, self._getitem
)
def _getitem(self, key):
with self.datastore.lock:
array = self.get_array(needs_lock=False)
return array[key]
class PseudoNetCDFDataStore(AbstractDataStore):
"""Store for accessing datasets via PseudoNetCDF"""
@classmethod
def open(cls, filename, lock=None, mode=None, **format_kwargs):
from PseudoNetCDF import pncopen
keywords = {"kwargs": format_kwargs}
# only include mode if explicitly passed
if mode is not None:
keywords["mode"] = mode
if lock is None:
lock = PNETCDF_LOCK
manager = CachingFileManager(pncopen, filename, lock=lock, **keywords)
return cls(manager, lock)
def __init__(self, manager, lock=None):
self._manager = manager
self.lock = ensure_lock(lock)
@property
def ds(self):
return self._manager.acquire()
def open_store_variable(self, name, var):
data = indexing.LazilyOuterIndexedArray(PncArrayWrapper(name, self))
attrs = {k: getattr(var, k) for k in var.ncattrs()}
return Variable(var.dimensions, data, attrs)
def get_variables(self):
return FrozenDict(
(k, self.open_store_variable(k, v)) for k, v in self.ds.variables.items()
)
def get_attrs(self):
return Frozen({k: getattr(self.ds, k) for k in self.ds.ncattrs()})
def get_dimensions(self):
return Frozen(self.ds.dimensions)
def get_encoding(self):
return {
"unlimited_dims": {
k for k in self.ds.dimensions if self.ds.dimensions[k].isunlimited()
}
}
def close(self):
self._manager.close()
def open_backend_dataset_pseudonetcdf(
filename_or_obj,
mask_and_scale=False,
decode_times=None,
concat_characters=None,
decode_coords=None,
drop_variables=None,
use_cftime=None,
decode_timedelta=None,
mode=None,
lock=None,
**format_kwargs,
):
store = PseudoNetCDFDataStore.open(
filename_or_obj, lock=lock, mode=mode, **format_kwargs
)
with close_on_error(store):
ds = open_backend_dataset_store(
store,
mask_and_scale=mask_and_scale,
decode_times=decode_times,
concat_characters=concat_characters,
decode_coords=decode_coords,
drop_variables=drop_variables,
use_cftime=use_cftime,
decode_timedelta=decode_timedelta,
)
return ds
# *args and **kwargs are not allowed in open_backend_dataset_ kwargs,
# unless the open_dataset_parameters are explicity defined like this:
open_dataset_parameters = (
"filename_or_obj",
"mask_and_scale",
"decode_times",
"concat_characters",
"decode_coords",
"drop_variables",
"use_cftime",
"decode_timedelta",
"mode",
"lock",
)
pseudonetcdf_backend = BackendEntrypoint(
open_dataset=open_backend_dataset_pseudonetcdf,
open_dataset_parameters=open_dataset_parameters,
)
|
import logging
from pyqwikswitch.qwikswitch import SENSORS
from homeassistant.components.binary_sensor import BinarySensorEntity
from homeassistant.core import callback
from . import DOMAIN as QWIKSWITCH, QSEntity
_LOGGER = logging.getLogger(__name__)
async def async_setup_platform(hass, _, add_entities, discovery_info=None):
"""Add binary sensor from the main Qwikswitch component."""
if discovery_info is None:
return
qsusb = hass.data[QWIKSWITCH]
_LOGGER.debug("Setup qwikswitch.binary_sensor %s, %s", qsusb, discovery_info)
devs = [QSBinarySensor(sensor) for sensor in discovery_info[QWIKSWITCH]]
add_entities(devs)
class QSBinarySensor(QSEntity, BinarySensorEntity):
"""Sensor based on a Qwikswitch relay/dimmer module."""
_val = False
def __init__(self, sensor):
"""Initialize the sensor."""
super().__init__(sensor["id"], sensor["name"])
self.channel = sensor["channel"]
sensor_type = sensor["type"]
self._decode, _ = SENSORS[sensor_type]
self._invert = not sensor.get("invert", False)
self._class = sensor.get("class", "door")
@callback
def update_packet(self, packet):
"""Receive update packet from QSUSB."""
val = self._decode(packet, channel=self.channel)
_LOGGER.debug(
"Update %s (%s:%s) decoded as %s: %s",
self.entity_id,
self.qsid,
self.channel,
val,
packet,
)
if val is not None:
self._val = bool(val)
self.async_write_ha_state()
@property
def is_on(self):
"""Check if device is on (non-zero)."""
return self._val == self._invert
@property
def unique_id(self):
"""Return a unique identifier for this sensor."""
return f"qs{self.qsid}:{self.channel}"
@property
def device_class(self):
"""Return the class of this sensor."""
return self._class
|
import logging
import math
import voluptuous as vol
from homeassistant import util
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_UNIT_OF_MEASUREMENT,
CONF_NAME,
EVENT_HOMEASSISTANT_START,
PERCENTAGE,
STATE_UNKNOWN,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.event import async_track_state_change_event
_LOGGER = logging.getLogger(__name__)
ATTR_CRITICAL_TEMP = "estimated_critical_temp"
ATTR_DEWPOINT = "dewpoint"
CONF_CALIBRATION_FACTOR = "calibration_factor"
CONF_INDOOR_HUMIDITY = "indoor_humidity_sensor"
CONF_INDOOR_TEMP = "indoor_temp_sensor"
CONF_OUTDOOR_TEMP = "outdoor_temp_sensor"
DEFAULT_NAME = "Mold Indicator"
MAGNUS_K2 = 17.62
MAGNUS_K3 = 243.12
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_INDOOR_TEMP): cv.entity_id,
vol.Required(CONF_OUTDOOR_TEMP): cv.entity_id,
vol.Required(CONF_INDOOR_HUMIDITY): cv.entity_id,
vol.Optional(CONF_CALIBRATION_FACTOR): vol.Coerce(float),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up MoldIndicator sensor."""
name = config.get(CONF_NAME, DEFAULT_NAME)
indoor_temp_sensor = config.get(CONF_INDOOR_TEMP)
outdoor_temp_sensor = config.get(CONF_OUTDOOR_TEMP)
indoor_humidity_sensor = config.get(CONF_INDOOR_HUMIDITY)
calib_factor = config.get(CONF_CALIBRATION_FACTOR)
async_add_entities(
[
MoldIndicator(
name,
hass.config.units.is_metric,
indoor_temp_sensor,
outdoor_temp_sensor,
indoor_humidity_sensor,
calib_factor,
)
],
False,
)
class MoldIndicator(Entity):
"""Represents a MoldIndication sensor."""
def __init__(
self,
name,
is_metric,
indoor_temp_sensor,
outdoor_temp_sensor,
indoor_humidity_sensor,
calib_factor,
):
"""Initialize the sensor."""
self._state = None
self._name = name
self._indoor_temp_sensor = indoor_temp_sensor
self._indoor_humidity_sensor = indoor_humidity_sensor
self._outdoor_temp_sensor = outdoor_temp_sensor
self._calib_factor = calib_factor
self._is_metric = is_metric
self._available = False
self._entities = {
self._indoor_temp_sensor,
self._indoor_humidity_sensor,
self._outdoor_temp_sensor,
}
self._dewpoint = None
self._indoor_temp = None
self._outdoor_temp = None
self._indoor_hum = None
self._crit_temp = None
async def async_added_to_hass(self):
"""Register callbacks."""
@callback
def mold_indicator_sensors_state_listener(event):
"""Handle for state changes for dependent sensors."""
new_state = event.data.get("new_state")
old_state = event.data.get("old_state")
entity = event.data.get("entity_id")
_LOGGER.debug(
"Sensor state change for %s that had old state %s and new state %s",
entity,
old_state,
new_state,
)
if self._update_sensor(entity, old_state, new_state):
self.async_schedule_update_ha_state(True)
@callback
def mold_indicator_startup(event):
"""Add listeners and get 1st state."""
_LOGGER.debug("Startup for %s", self.entity_id)
async_track_state_change_event(
self.hass, list(self._entities), mold_indicator_sensors_state_listener
)
# Read initial state
indoor_temp = self.hass.states.get(self._indoor_temp_sensor)
outdoor_temp = self.hass.states.get(self._outdoor_temp_sensor)
indoor_hum = self.hass.states.get(self._indoor_humidity_sensor)
schedule_update = self._update_sensor(
self._indoor_temp_sensor, None, indoor_temp
)
schedule_update = (
False
if not self._update_sensor(
self._outdoor_temp_sensor, None, outdoor_temp
)
else schedule_update
)
schedule_update = (
False
if not self._update_sensor(
self._indoor_humidity_sensor, None, indoor_hum
)
else schedule_update
)
if schedule_update:
self.async_schedule_update_ha_state(True)
self.hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_START, mold_indicator_startup
)
def _update_sensor(self, entity, old_state, new_state):
"""Update information based on new sensor states."""
_LOGGER.debug("Sensor update for %s", entity)
if new_state is None:
return False
# If old_state is not set and new state is unknown then it means
# that the sensor just started up
if old_state is None and new_state.state == STATE_UNKNOWN:
return False
if entity == self._indoor_temp_sensor:
self._indoor_temp = MoldIndicator._update_temp_sensor(new_state)
elif entity == self._outdoor_temp_sensor:
self._outdoor_temp = MoldIndicator._update_temp_sensor(new_state)
elif entity == self._indoor_humidity_sensor:
self._indoor_hum = MoldIndicator._update_hum_sensor(new_state)
return True
@staticmethod
def _update_temp_sensor(state):
"""Parse temperature sensor value."""
_LOGGER.debug("Updating temp sensor with value %s", state.state)
# Return an error if the sensor change its state to Unknown.
if state.state == STATE_UNKNOWN:
_LOGGER.error(
"Unable to parse temperature sensor %s with state: %s",
state.entity_id,
state.state,
)
return None
unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
temp = util.convert(state.state, float)
if temp is None:
_LOGGER.error(
"Unable to parse temperature sensor %s with state: %s",
state.entity_id,
state.state,
)
return None
# convert to celsius if necessary
if unit == TEMP_FAHRENHEIT:
return util.temperature.fahrenheit_to_celsius(temp)
if unit == TEMP_CELSIUS:
return temp
_LOGGER.error(
"Temp sensor %s has unsupported unit: %s (allowed: %s, %s)",
state.entity_id,
unit,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
)
return None
@staticmethod
def _update_hum_sensor(state):
"""Parse humidity sensor value."""
_LOGGER.debug("Updating humidity sensor with value %s", state.state)
# Return an error if the sensor change its state to Unknown.
if state.state == STATE_UNKNOWN:
_LOGGER.error(
"Unable to parse humidity sensor %s, state: %s",
state.entity_id,
state.state,
)
return None
unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
hum = util.convert(state.state, float)
if hum is None:
_LOGGER.error(
"Unable to parse humidity sensor %s, state: %s",
state.entity_id,
state.state,
)
return None
if unit != PERCENTAGE:
_LOGGER.error(
"Humidity sensor %s has unsupported unit: %s %s",
state.entity_id,
unit,
" (allowed: %)",
)
return None
if hum > 100 or hum < 0:
_LOGGER.error(
"Humidity sensor %s is out of range: %s %s",
state.entity_id,
hum,
"(allowed: 0-100%)",
)
return None
return hum
async def async_update(self):
"""Calculate latest state."""
_LOGGER.debug("Update state for %s", self.entity_id)
# check all sensors
if None in (self._indoor_temp, self._indoor_hum, self._outdoor_temp):
self._available = False
self._dewpoint = None
self._crit_temp = None
return
# re-calculate dewpoint and mold indicator
self._calc_dewpoint()
self._calc_moldindicator()
if self._state is None:
self._available = False
self._dewpoint = None
self._crit_temp = None
else:
self._available = True
def _calc_dewpoint(self):
"""Calculate the dewpoint for the indoor air."""
# Use magnus approximation to calculate the dew point
alpha = MAGNUS_K2 * self._indoor_temp / (MAGNUS_K3 + self._indoor_temp)
beta = MAGNUS_K2 * MAGNUS_K3 / (MAGNUS_K3 + self._indoor_temp)
if self._indoor_hum == 0:
self._dewpoint = -50 # not defined, assume very low value
else:
self._dewpoint = (
MAGNUS_K3
* (alpha + math.log(self._indoor_hum / 100.0))
/ (beta - math.log(self._indoor_hum / 100.0))
)
_LOGGER.debug("Dewpoint: %f %s", self._dewpoint, TEMP_CELSIUS)
def _calc_moldindicator(self):
"""Calculate the humidity at the (cold) calibration point."""
if None in (self._dewpoint, self._calib_factor) or self._calib_factor == 0:
_LOGGER.debug(
"Invalid inputs - dewpoint: %s, calibration-factor: %s",
self._dewpoint,
self._calib_factor,
)
self._state = None
self._available = False
self._crit_temp = None
return
# first calculate the approximate temperature at the calibration point
self._crit_temp = (
self._outdoor_temp
+ (self._indoor_temp - self._outdoor_temp) / self._calib_factor
)
_LOGGER.debug(
"Estimated Critical Temperature: %f %s", self._crit_temp, TEMP_CELSIUS
)
# Then calculate the humidity at this point
alpha = MAGNUS_K2 * self._crit_temp / (MAGNUS_K3 + self._crit_temp)
beta = MAGNUS_K2 * MAGNUS_K3 / (MAGNUS_K3 + self._crit_temp)
crit_humidity = (
math.exp(
(self._dewpoint * beta - MAGNUS_K3 * alpha)
/ (self._dewpoint + MAGNUS_K3)
)
* 100.0
)
# check bounds and format
if crit_humidity > 100:
self._state = "100"
elif crit_humidity < 0:
self._state = "0"
else:
self._state = f"{int(crit_humidity):d}"
_LOGGER.debug("Mold indicator humidity: %s", self._state)
@property
def should_poll(self):
"""Return the polling state."""
return False
@property
def name(self):
"""Return the name."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return PERCENTAGE
@property
def state(self):
"""Return the state of the entity."""
return self._state
@property
def available(self):
"""Return the availability of this sensor."""
return self._available
@property
def device_state_attributes(self):
"""Return the state attributes."""
if self._is_metric:
return {ATTR_DEWPOINT: self._dewpoint, ATTR_CRITICAL_TEMP: self._crit_temp}
dewpoint = (
util.temperature.celsius_to_fahrenheit(self._dewpoint)
if self._dewpoint is not None
else None
)
crit_temp = (
util.temperature.celsius_to_fahrenheit(self._crit_temp)
if self._crit_temp is not None
else None
)
return {ATTR_DEWPOINT: dewpoint, ATTR_CRITICAL_TEMP: crit_temp}
|
import numpy as np
import pandas as pd
from conftest import MAX_TS, random_split
from yandextank.aggregator.chopper import TimeChopper
class TestChopper(object):
def test_one_chunk(self, data):
chopper = TimeChopper([data], 5)
result = list(chopper)
assert len(result) == MAX_TS
concatinated = pd.concat(r[1] for r in result)
assert len(data) == len(concatinated), "We did not lose anything"
def test_multiple_chunks(self, data):
chunks = random_split(data)
chopper = TimeChopper(chunks, 5)
result = list(chopper)
assert len(result) == MAX_TS
concatinated = pd.concat(r[1] for r in result)
assert len(data) == len(concatinated), "We did not lose anything"
def test_partially_reversed_data(self, data):
chunks = list(random_split(data))
chunks[5], chunks[6] = chunks[6], chunks[5]
chopper = TimeChopper(chunks, 5)
result = list(chopper)
assert len(
result
) == MAX_TS, "DataFrame is splitted into proper number of chunks"
concatinated = pd.concat(r[1] for r in result)
assert len(data) == len(concatinated), "We did not lose anything"
assert np.allclose(
concatinated.values, data.values), "We did not corrupt the data"
|
Subsets and Splits